dfdf
This commit is contained in:
14
API/HTTP.py
14
API/HTTP.py
@@ -229,6 +229,13 @@ class HTTPClient:
|
||||
response.raise_for_status()
|
||||
total_bytes = int(response.headers.get("content-length", 0))
|
||||
bytes_downloaded = 0
|
||||
|
||||
# Render progress immediately (even if the transfer is very fast)
|
||||
if progress_callback:
|
||||
try:
|
||||
progress_callback(0, total_bytes)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
with open(path, "wb") as f:
|
||||
for chunk in response.iter_bytes(chunk_size):
|
||||
@@ -237,6 +244,13 @@ class HTTPClient:
|
||||
bytes_downloaded += len(chunk)
|
||||
if progress_callback:
|
||||
progress_callback(bytes_downloaded, total_bytes)
|
||||
|
||||
# Ensure a final callback is emitted.
|
||||
if progress_callback:
|
||||
try:
|
||||
progress_callback(bytes_downloaded, total_bytes)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return path
|
||||
|
||||
|
||||
@@ -152,55 +152,24 @@ class HydrusNetwork:
|
||||
logger.debug(f"{self._log_prefix()} Uploading file {file_path.name} ({file_size} bytes)")
|
||||
|
||||
# Stream upload body with a stderr progress bar (pipeline-safe).
|
||||
try:
|
||||
from models import ProgressBar
|
||||
except Exception:
|
||||
ProgressBar = None # type: ignore[assignment]
|
||||
from models import ProgressBar
|
||||
|
||||
bar = ProgressBar() if ProgressBar is not None else None
|
||||
bar = ProgressBar()
|
||||
label = f"{self._log_prefix().strip('[]')} upload"
|
||||
start_t = time.time()
|
||||
last_render_t = [start_t]
|
||||
last_log_t = [start_t]
|
||||
sent = [0]
|
||||
tty = bool(getattr(sys.stderr, "isatty", lambda: False)())
|
||||
|
||||
def _render_progress(final: bool = False) -> None:
|
||||
if bar is None:
|
||||
return
|
||||
if file_size <= 0:
|
||||
return
|
||||
now = time.time()
|
||||
if not final and (now - float(last_render_t[0])) < 0.25:
|
||||
return
|
||||
last_render_t[0] = now
|
||||
elapsed = max(0.001, now - start_t)
|
||||
speed = float(sent[0]) / elapsed
|
||||
eta_s = (float(file_size) - float(sent[0])) / speed if speed > 0 else 0.0
|
||||
minutes, seconds = divmod(int(max(0.0, eta_s)), 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}"
|
||||
speed_str = bar.format_bytes(speed) + "/s"
|
||||
|
||||
line = bar.format_progress(
|
||||
percent_str=None,
|
||||
downloaded=int(sent[0]),
|
||||
total=int(file_size),
|
||||
speed_str=speed_str,
|
||||
eta_str=eta_str,
|
||||
)
|
||||
|
||||
try:
|
||||
if tty:
|
||||
sys.stderr.write("\r" + f"[{label}] " + line + " ")
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
# Non-interactive: keep it quiet-ish.
|
||||
if final or (now - float(last_log_t[0])) >= 2.0:
|
||||
log(f"[{label}] {line}", file=sys.stderr)
|
||||
last_log_t[0] = now
|
||||
except Exception:
|
||||
pass
|
||||
bar.update(downloaded=int(sent[0]), total=int(file_size), label=str(label), file=sys.stderr)
|
||||
if final:
|
||||
bar.finish()
|
||||
|
||||
def file_gen():
|
||||
try:
|
||||
@@ -214,12 +183,6 @@ class HydrusNetwork:
|
||||
yield chunk
|
||||
finally:
|
||||
_render_progress(final=True)
|
||||
if tty:
|
||||
try:
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
response = client.request(
|
||||
spec.method,
|
||||
|
||||
@@ -258,6 +258,7 @@ class API_folder_store:
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_files_path ON files(file_path)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_tags_hash ON tags(hash)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_tags_tag ON tags(tag)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_metadata_ext ON metadata(ext)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_worker_id ON worker(worker_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_worker_status ON worker(status)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_worker_type ON worker(worker_type)")
|
||||
@@ -1858,6 +1859,73 @@ class DatabaseAPI:
|
||||
)
|
||||
return {row[0] for row in cursor.fetchall()}
|
||||
|
||||
def get_file_hashes_by_ext(self, ext_value: str, limit: Optional[int] = None) -> Set[str]:
|
||||
"""Get hashes of files whose metadata ext matches the given extension.
|
||||
|
||||
Matches case-insensitively and ignores any leading '.' in stored ext.
|
||||
Supports glob wildcards '*' and '?' in the query.
|
||||
"""
|
||||
ext_clean = str(ext_value or "").strip().lower().lstrip(".")
|
||||
ext_clean = "".join(ch for ch in ext_clean if ch.isalnum())
|
||||
if not ext_clean:
|
||||
return set()
|
||||
|
||||
cursor = self.get_cursor()
|
||||
|
||||
has_glob = ("*" in ext_value) or ("?" in ext_value)
|
||||
if has_glob:
|
||||
pattern = str(ext_value or "").strip().lower().lstrip(".")
|
||||
pattern = pattern.replace("%", "\\%").replace("_", "\\_")
|
||||
pattern = pattern.replace("*", "%").replace("?", "_")
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT DISTINCT f.hash
|
||||
FROM files f
|
||||
JOIN metadata m ON f.hash = m.hash
|
||||
WHERE LOWER(LTRIM(COALESCE(m.ext, ''), '.')) LIKE ? ESCAPE '\\'
|
||||
LIMIT ?
|
||||
""",
|
||||
(pattern, limit or 10000),
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT DISTINCT f.hash
|
||||
FROM files f
|
||||
JOIN metadata m ON f.hash = m.hash
|
||||
WHERE LOWER(LTRIM(COALESCE(m.ext, ''), '.')) = ?
|
||||
LIMIT ?
|
||||
""",
|
||||
(ext_clean, limit or 10000),
|
||||
)
|
||||
return {row[0] for row in cursor.fetchall()}
|
||||
|
||||
def get_files_by_ext(self, ext_value: str, limit: Optional[int] = None) -> List[tuple]:
|
||||
"""Get files whose metadata ext matches the given extension.
|
||||
|
||||
Returns (hash, file_path, size, ext) tuples.
|
||||
"""
|
||||
ext_clean = str(ext_value or "").strip().lower().lstrip(".")
|
||||
ext_clean = "".join(ch for ch in ext_clean if ch.isalnum())
|
||||
if not ext_clean:
|
||||
return []
|
||||
|
||||
cursor = self.get_cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT f.hash, f.file_path,
|
||||
COALESCE((SELECT size FROM metadata WHERE hash = f.hash), 0) as size,
|
||||
COALESCE((SELECT ext FROM metadata WHERE hash = f.hash), '') as ext
|
||||
FROM files f
|
||||
JOIN metadata m ON f.hash = m.hash
|
||||
WHERE LOWER(LTRIM(COALESCE(m.ext, ''), '.')) = ?
|
||||
ORDER BY f.file_path
|
||||
LIMIT ?
|
||||
""",
|
||||
(ext_clean, limit or 10000),
|
||||
)
|
||||
return cursor.fetchall()
|
||||
|
||||
def get_files_with_any_url(self, limit: Optional[int] = None) -> List[tuple]:
|
||||
"""Get files that have any non-empty URL metadata.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user