Add YAPF style + ignore, and format tracked Python files
This commit is contained in:
425
Store/Folder.py
425
Store/Folder.py
@@ -36,7 +36,11 @@ class Folder(Store):
|
||||
# Track which locations have already been migrated to avoid repeated migrations
|
||||
_migrated_locations = set()
|
||||
# Cache scan results to avoid repeated full scans across repeated instantiations
|
||||
_scan_cache: Dict[str, Tuple[bool, str, Dict[str, int]]] = {}
|
||||
_scan_cache: Dict[str,
|
||||
Tuple[bool,
|
||||
str,
|
||||
Dict[str,
|
||||
int]]] = {}
|
||||
|
||||
def __new__(cls, *args: Any, **kwargs: Any) -> "Folder":
|
||||
return super().__new__(cls)
|
||||
@@ -62,7 +66,8 @@ class Folder(Store):
|
||||
# Scan status (set during init)
|
||||
self.scan_ok: bool = True
|
||||
self.scan_detail: str = ""
|
||||
self.scan_stats: Dict[str, int] = {}
|
||||
self.scan_stats: Dict[str,
|
||||
int] = {}
|
||||
|
||||
if self._location:
|
||||
try:
|
||||
@@ -95,10 +100,16 @@ class Folder(Store):
|
||||
f" (Total: {total_db})" if total_db else ""
|
||||
)
|
||||
else:
|
||||
detail = "Up to date" + (f" (Total: {total_db})" if total_db else "")
|
||||
detail = "Up to date" + (
|
||||
f" (Total: {total_db})" if total_db else ""
|
||||
)
|
||||
Folder._scan_cache[location_key] = (True, detail, dict(stats))
|
||||
except Exception as exc:
|
||||
Folder._scan_cache[location_key] = (False, f"Scan failed: {exc}", {})
|
||||
Folder._scan_cache[location_key] = (
|
||||
False,
|
||||
f"Scan failed: {exc}",
|
||||
{}
|
||||
)
|
||||
|
||||
ok, detail, stats = Folder._scan_cache.get(location_key, (True, "", {}))
|
||||
self.scan_ok = bool(ok)
|
||||
@@ -154,8 +165,7 @@ class Folder(Store):
|
||||
|
||||
# Check if filename is already a hash (without extension)
|
||||
if len(file_path.stem) == 64 and all(
|
||||
c in "0123456789abcdef" for c in file_path.stem.lower()
|
||||
):
|
||||
c in "0123456789abcdef" for c in file_path.stem.lower()):
|
||||
continue # Already migrated, will process in second pass
|
||||
|
||||
try:
|
||||
@@ -179,7 +189,8 @@ class Folder(Store):
|
||||
tags_to_add = list(tags)
|
||||
# Check if title tag exists
|
||||
has_title_tag = any(
|
||||
t.lower().startswith("title:") for t in tags_to_add
|
||||
t.lower().startswith("title:")
|
||||
for t in tags_to_add
|
||||
)
|
||||
if url:
|
||||
url_to_add = list(url)
|
||||
@@ -202,7 +213,8 @@ class Folder(Store):
|
||||
# Rename file to hash if needed
|
||||
if hash_path != file_path and not hash_path.exists():
|
||||
debug(
|
||||
f"Migrating: {file_path.name} -> {hash_filename}", file=sys.stderr
|
||||
f"Migrating: {file_path.name} -> {hash_filename}",
|
||||
file=sys.stderr
|
||||
)
|
||||
file_path.rename(hash_path)
|
||||
|
||||
@@ -210,7 +222,8 @@ class Folder(Store):
|
||||
try:
|
||||
cursor.execute(
|
||||
"UPDATE files SET file_path = ?, updated_at = CURRENT_TIMESTAMP WHERE hash = ?",
|
||||
(db._to_db_file_path(hash_path), file_hash),
|
||||
(db._to_db_file_path(hash_path),
|
||||
file_hash),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -233,7 +246,8 @@ class Folder(Store):
|
||||
if tags_to_add:
|
||||
db.save_tags(hash_path, tags_to_add)
|
||||
debug(
|
||||
f"Added {len(tags_to_add)} tags to {file_hash}", file=sys.stderr
|
||||
f"Added {len(tags_to_add)} tags to {file_hash}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Note: url would need a separate table if you want to store them
|
||||
@@ -245,7 +259,10 @@ class Folder(Store):
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
debug(f"Failed to migrate file {file_path.name}: {exc}", file=sys.stderr)
|
||||
debug(
|
||||
f"Failed to migrate file {file_path.name}: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Second pass: ensure all files in database have a title: tag
|
||||
db.connection.commit()
|
||||
@@ -267,7 +284,10 @@ class Folder(Store):
|
||||
# Use the filename as the title
|
||||
title_tag = f"title:{file_path.name}"
|
||||
db.save_tags(file_path, [title_tag])
|
||||
debug(f"Added title tag to {file_path.name}", file=sys.stderr)
|
||||
debug(
|
||||
f"Added title tag to {file_path.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception as exc:
|
||||
debug(
|
||||
f"Failed to add title tag to file {file_path_str}: {exc}",
|
||||
@@ -279,7 +299,8 @@ class Folder(Store):
|
||||
# Third pass: discover files on disk that aren't in the database yet
|
||||
# These are hash-named files that were added after initial indexing
|
||||
cursor.execute("SELECT LOWER(hash) FROM files")
|
||||
db_hashes = {row[0] for row in cursor.fetchall()}
|
||||
db_hashes = {row[0]
|
||||
for row in cursor.fetchall()}
|
||||
|
||||
discovered = 0
|
||||
for file_path in sorted(location_path.rglob("*")):
|
||||
@@ -287,8 +308,8 @@ class Folder(Store):
|
||||
# Check if file name (without extension) is a 64-char hex hash
|
||||
name_without_ext = file_path.stem
|
||||
if len(name_without_ext) == 64 and all(
|
||||
c in "0123456789abcdef" for c in name_without_ext.lower()
|
||||
):
|
||||
c in "0123456789abcdef"
|
||||
for c in name_without_ext.lower()):
|
||||
file_hash = name_without_ext.lower()
|
||||
|
||||
# Skip if already in DB
|
||||
@@ -354,7 +375,8 @@ class Folder(Store):
|
||||
# Extract title from tags if not explicitly provided
|
||||
if not title:
|
||||
for candidate in tag_list:
|
||||
if isinstance(candidate, str) and candidate.lower().startswith("title:"):
|
||||
if isinstance(candidate,
|
||||
str) and candidate.lower().startswith("title:"):
|
||||
title = candidate.split(":", 1)[1].strip()
|
||||
break
|
||||
|
||||
@@ -364,7 +386,8 @@ class Folder(Store):
|
||||
|
||||
# Ensure title is in tags
|
||||
title_tag = f"title:{title}"
|
||||
if not any(str(candidate).lower().startswith("title:") for candidate in tag_list):
|
||||
if not any(str(candidate).lower().startswith("title:")
|
||||
for candidate in tag_list):
|
||||
tag_list = [title_tag] + list(tag_list)
|
||||
|
||||
try:
|
||||
@@ -426,7 +449,11 @@ class Folder(Store):
|
||||
# After a move, the original path no longer exists; use destination for subsequent ops.
|
||||
file_path = save_file
|
||||
except Exception:
|
||||
_copy_with_progress(file_path, save_file, label=f"folder:{self._name} move")
|
||||
_copy_with_progress(
|
||||
file_path,
|
||||
save_file,
|
||||
label=f"folder:{self._name} move"
|
||||
)
|
||||
try:
|
||||
file_path.unlink(missing_ok=True) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
@@ -438,7 +465,11 @@ class Folder(Store):
|
||||
debug(f"Local move (copy+delete): {save_file}", file=sys.stderr)
|
||||
file_path = save_file
|
||||
else:
|
||||
_copy_with_progress(file_path, save_file, label=f"folder:{self._name} copy")
|
||||
_copy_with_progress(
|
||||
file_path,
|
||||
save_file,
|
||||
label=f"folder:{self._name} copy"
|
||||
)
|
||||
debug(f"Local copy: {save_file}", file=sys.stderr)
|
||||
|
||||
# Best-effort: capture duration for media
|
||||
@@ -512,11 +543,13 @@ class Folder(Store):
|
||||
# Drop control / format chars (Cc/Cf) while preserving wildcard tokens when requested.
|
||||
cleaned_chars: list[str] = []
|
||||
for ch in s:
|
||||
if allow_wildcards and ch in {"*", "?"}:
|
||||
if allow_wildcards and ch in {"*",
|
||||
"?"}:
|
||||
cleaned_chars.append(ch)
|
||||
continue
|
||||
cat = unicodedata.category(ch)
|
||||
if cat in {"Cc", "Cf"}:
|
||||
if cat in {"Cc",
|
||||
"Cf"}:
|
||||
continue
|
||||
cleaned_chars.append(ch)
|
||||
s = "".join(cleaned_chars)
|
||||
@@ -545,7 +578,11 @@ class Folder(Store):
|
||||
sys_ext = _extract_system_filetype_ext(query_lower)
|
||||
if sys_ext:
|
||||
ext_filter = sys_ext
|
||||
query_lower = re.sub(r"\s*\bsystem:filetype\s*(?:=\s*)?[^\s,]+", " ", query_lower)
|
||||
query_lower = re.sub(
|
||||
r"\s*\bsystem:filetype\s*(?:=\s*)?[^\s,]+",
|
||||
" ",
|
||||
query_lower
|
||||
)
|
||||
query_lower = re.sub(r"\s{2,}", " ", query_lower).strip().strip(",")
|
||||
query = query_lower
|
||||
|
||||
@@ -554,7 +591,11 @@ class Folder(Store):
|
||||
m = re.search(r"\bextension:([^\s,]+)", query_lower)
|
||||
if m:
|
||||
ext_filter = _normalize_ext_filter(m.group(1)) or None
|
||||
query_lower = re.sub(r"\s*\b(?:ext|extension):[^\s,]+", " ", query_lower)
|
||||
query_lower = re.sub(
|
||||
r"\s*\b(?:ext|extension):[^\s,]+",
|
||||
" ",
|
||||
query_lower
|
||||
)
|
||||
query_lower = re.sub(r"\s{2,}", " ", query_lower).strip().strip(",")
|
||||
query = query_lower
|
||||
except Exception:
|
||||
@@ -594,11 +635,19 @@ class Folder(Store):
|
||||
return results
|
||||
|
||||
def _create_entry(
|
||||
file_path: Path, tags: list[str], size_bytes: int | None, db_hash: Optional[str]
|
||||
) -> dict[str, Any]:
|
||||
file_path: Path,
|
||||
tags: list[str],
|
||||
size_bytes: int | None,
|
||||
db_hash: Optional[str]
|
||||
) -> dict[str,
|
||||
Any]:
|
||||
path_str = str(file_path)
|
||||
# Get title from tags if available, otherwise use hash as fallback
|
||||
title = next((t.split(":", 1)[1] for t in tags if t.lower().startswith("title:")), None)
|
||||
title = next(
|
||||
(t.split(":",
|
||||
1)[1] for t in tags if t.lower().startswith("title:")),
|
||||
None
|
||||
)
|
||||
if not title:
|
||||
# Fallback to hash if no title tag exists
|
||||
hash_value = _resolve_file_hash(db_hash, file_path)
|
||||
@@ -636,7 +685,10 @@ class Folder(Store):
|
||||
if ext_filter:
|
||||
# Fetch a bounded set of hashes to intersect with other filters.
|
||||
ext_fetch_limit = (limit or 45) * 50
|
||||
ext_hashes = api.get_file_hashes_by_ext(ext_filter, limit=ext_fetch_limit)
|
||||
ext_hashes = api.get_file_hashes_by_ext(
|
||||
ext_filter,
|
||||
limit=ext_fetch_limit
|
||||
)
|
||||
|
||||
# ext-only search: query is empty (or coerced to match_all above).
|
||||
if ext_filter and (not query_lower or query_lower == "*"):
|
||||
@@ -653,7 +705,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -664,7 +721,11 @@ class Folder(Store):
|
||||
if limit is not None and len(results) >= limit:
|
||||
return results
|
||||
backend_label = str(
|
||||
getattr(self, "_name", "") or getattr(self, "NAME", "") or "folder"
|
||||
getattr(self,
|
||||
"_name",
|
||||
"") or getattr(self,
|
||||
"NAME",
|
||||
"") or "folder"
|
||||
)
|
||||
debug(f"[folder:{backend_label}] {len(results)} result(s)")
|
||||
return results
|
||||
@@ -695,36 +756,49 @@ class Folder(Store):
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
return api.get_file_hashes_by_url_like(
|
||||
_url_like_pattern(pattern), limit=url_fetch_limit
|
||||
_url_like_pattern(pattern),
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
|
||||
if namespace == "system":
|
||||
# Hydrus-compatible query: system:filetype = png
|
||||
m_ft = re.match(r"^filetype\s*(?:=\s*)?(.+)$", pattern)
|
||||
m_ft = re.match(
|
||||
r"^filetype\s*(?:=\s*)?(.+)$",
|
||||
pattern
|
||||
)
|
||||
if m_ft:
|
||||
normalized_ext = _normalize_ext_filter(m_ft.group(1))
|
||||
normalized_ext = _normalize_ext_filter(
|
||||
m_ft.group(1)
|
||||
)
|
||||
if not normalized_ext:
|
||||
return set()
|
||||
return api.get_file_hashes_by_ext(
|
||||
normalized_ext, limit=url_fetch_limit
|
||||
normalized_ext,
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
return set()
|
||||
|
||||
if namespace in {"ext", "extension"}:
|
||||
if namespace in {"ext",
|
||||
"extension"}:
|
||||
normalized_ext = _normalize_ext_filter(pattern)
|
||||
if not normalized_ext:
|
||||
return set()
|
||||
return api.get_file_hashes_by_ext(
|
||||
normalized_ext, limit=url_fetch_limit
|
||||
normalized_ext,
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
|
||||
if namespace == "store":
|
||||
if pattern not in {"local", "file", "filesystem"}:
|
||||
if pattern not in {"local",
|
||||
"file",
|
||||
"filesystem"}:
|
||||
return set()
|
||||
return api.get_all_file_hashes()
|
||||
|
||||
query_pattern = f"{namespace}:%"
|
||||
tag_rows = api.get_file_hashes_by_tag_pattern(query_pattern)
|
||||
tag_rows = api.get_file_hashes_by_tag_pattern(
|
||||
query_pattern
|
||||
)
|
||||
matched: set[str] = set()
|
||||
for file_hash, tag_val in tag_rows:
|
||||
if not tag_val:
|
||||
@@ -733,9 +807,13 @@ class Folder(Store):
|
||||
if not tag_lower.startswith(f"{namespace}:"):
|
||||
continue
|
||||
value = _normalize_namespace_text(
|
||||
tag_lower[len(namespace) + 1 :], allow_wildcards=False
|
||||
tag_lower[len(namespace) + 1:],
|
||||
allow_wildcards=False
|
||||
)
|
||||
pat = _normalize_namespace_text(
|
||||
pattern,
|
||||
allow_wildcards=True
|
||||
)
|
||||
pat = _normalize_namespace_text(pattern, allow_wildcards=True)
|
||||
if fnmatch(value, pat):
|
||||
matched.add(file_hash)
|
||||
return matched
|
||||
@@ -744,24 +822,35 @@ class Folder(Store):
|
||||
like_pattern = f"%{_like_pattern(term)}%"
|
||||
# Unqualified token: match file path, title: tags, and non-namespaced tags.
|
||||
# Do NOT match other namespaces by default (e.g., artist:men at work).
|
||||
hashes = set(api.get_file_hashes_by_path_pattern(like_pattern) or set())
|
||||
hashes = set(
|
||||
api.get_file_hashes_by_path_pattern(like_pattern)
|
||||
or set()
|
||||
)
|
||||
|
||||
try:
|
||||
title_rows = api.get_files_by_namespace_pattern(
|
||||
f"title:{like_pattern}", url_fetch_limit
|
||||
f"title:{like_pattern}",
|
||||
url_fetch_limit
|
||||
)
|
||||
hashes.update(
|
||||
{row[0] for row in (title_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (title_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
simple_rows = api.get_files_by_simple_tag_pattern(
|
||||
like_pattern, url_fetch_limit
|
||||
like_pattern,
|
||||
url_fetch_limit
|
||||
)
|
||||
hashes.update(
|
||||
{row[0] for row in (simple_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (simple_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -773,13 +862,16 @@ class Folder(Store):
|
||||
for token in tokens:
|
||||
hashes = _ids_for_token(token)
|
||||
matching_hashes = (
|
||||
hashes if matching_hashes is None else matching_hashes & hashes
|
||||
hashes if matching_hashes is None else
|
||||
matching_hashes & hashes
|
||||
)
|
||||
if not matching_hashes:
|
||||
return results
|
||||
|
||||
if ext_hashes is not None:
|
||||
matching_hashes = (matching_hashes or set()) & ext_hashes
|
||||
matching_hashes = (
|
||||
matching_hashes or set()
|
||||
) & ext_hashes
|
||||
if not matching_hashes:
|
||||
return results
|
||||
|
||||
@@ -799,7 +891,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -840,7 +937,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -856,7 +958,10 @@ class Folder(Store):
|
||||
if not pattern or pattern == "*":
|
||||
rows = api.get_files_with_any_url(limit)
|
||||
else:
|
||||
rows = api.get_files_by_url_like(_url_like_pattern(pattern), limit)
|
||||
rows = api.get_files_by_url_like(
|
||||
_url_like_pattern(pattern),
|
||||
limit
|
||||
)
|
||||
for file_hash, file_path_str, size_bytes, ext in rows:
|
||||
if not file_path_str:
|
||||
continue
|
||||
@@ -869,7 +974,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
results.append(entry)
|
||||
if limit is not None and len(results) >= limit:
|
||||
return results
|
||||
@@ -895,7 +1005,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -907,7 +1022,8 @@ class Folder(Store):
|
||||
return results
|
||||
return results
|
||||
|
||||
if namespace in {"ext", "extension"}:
|
||||
if namespace in {"ext",
|
||||
"extension"}:
|
||||
normalized_ext = _normalize_ext_filter(pattern)
|
||||
if not normalized_ext:
|
||||
return results
|
||||
@@ -924,7 +1040,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -944,15 +1065,22 @@ class Folder(Store):
|
||||
if not file_path_str:
|
||||
continue
|
||||
|
||||
tags = api.get_tags_by_namespace_and_file(file_hash, query_pattern)
|
||||
tags = api.get_tags_by_namespace_and_file(
|
||||
file_hash,
|
||||
query_pattern
|
||||
)
|
||||
|
||||
for tag in tags:
|
||||
tag_lower = tag.lower()
|
||||
if tag_lower.startswith(f"{namespace}:"):
|
||||
value = _normalize_namespace_text(
|
||||
tag_lower[len(namespace) + 1 :], allow_wildcards=False
|
||||
tag_lower[len(namespace) + 1:],
|
||||
allow_wildcards=False
|
||||
)
|
||||
pat = _normalize_namespace_text(
|
||||
pattern,
|
||||
allow_wildcards=True
|
||||
)
|
||||
pat = _normalize_namespace_text(pattern, allow_wildcards=True)
|
||||
if fnmatch(value, pat):
|
||||
if ext_hashes is not None and file_hash not in ext_hashes:
|
||||
break
|
||||
@@ -962,10 +1090,14 @@ class Folder(Store):
|
||||
size_bytes = file_path.stat().st_size
|
||||
all_tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(
|
||||
file_path, all_tags, size_bytes, file_hash
|
||||
file_path,
|
||||
all_tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
db_ext = str(ext
|
||||
or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
entry["ext"] = db_ext
|
||||
except Exception:
|
||||
@@ -985,7 +1117,8 @@ class Folder(Store):
|
||||
# - non-namespaced tag
|
||||
# Other namespaces (artist:, series:, etc.) are excluded unless explicitly queried.
|
||||
terms = [
|
||||
t.strip() for t in query_lower.replace(",", " ").split() if t.strip()
|
||||
t.strip() for t in query_lower.replace(",", " ").split()
|
||||
if t.strip()
|
||||
]
|
||||
if not terms:
|
||||
terms = [query_lower]
|
||||
@@ -1009,20 +1142,28 @@ class Folder(Store):
|
||||
|
||||
try:
|
||||
title_rows = api.get_files_by_namespace_pattern(
|
||||
f"title:{like_pattern}", fetch_limit
|
||||
f"title:{like_pattern}",
|
||||
fetch_limit
|
||||
)
|
||||
term_hashes.update(
|
||||
{row[0] for row in (title_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (title_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
simple_rows = api.get_files_by_simple_tag_pattern(
|
||||
like_pattern, fetch_limit
|
||||
like_pattern,
|
||||
fetch_limit
|
||||
)
|
||||
term_hashes.update(
|
||||
{row[0] for row in (simple_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (simple_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1031,9 +1172,8 @@ class Folder(Store):
|
||||
term_hashes &= ext_hashes
|
||||
|
||||
matching_hashes = (
|
||||
term_hashes
|
||||
if matching_hashes is None
|
||||
else (matching_hashes & term_hashes)
|
||||
term_hashes if matching_hashes is None else
|
||||
(matching_hashes & term_hashes)
|
||||
)
|
||||
if not matching_hashes:
|
||||
return results
|
||||
@@ -1054,7 +1194,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry_obj = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry_obj = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -1077,7 +1222,12 @@ class Folder(Store):
|
||||
size_bytes = file_path.stat().st_size
|
||||
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -1087,7 +1237,11 @@ class Folder(Store):
|
||||
results.append(entry)
|
||||
|
||||
backend_label = str(
|
||||
getattr(self, "_name", "") or getattr(self, "NAME", "") or "folder"
|
||||
getattr(self,
|
||||
"_name",
|
||||
"") or getattr(self,
|
||||
"NAME",
|
||||
"") or "folder"
|
||||
)
|
||||
debug(f"[folder:{backend_label}] {len(results)} result(s)")
|
||||
return results
|
||||
@@ -1101,7 +1255,10 @@ class Folder(Store):
|
||||
log(f"❌ Local search failed: {exc}", file=sys.stderr)
|
||||
raise
|
||||
|
||||
def _resolve_library_root(self, file_path: Path, config: Dict[str, Any]) -> Optional[Path]:
|
||||
def _resolve_library_root(self,
|
||||
file_path: Path,
|
||||
config: Dict[str,
|
||||
Any]) -> Optional[Path]:
|
||||
"""Return the library root containing medios-macina.db.
|
||||
|
||||
Prefer the store's configured location, then config override, then walk parents
|
||||
@@ -1188,7 +1345,8 @@ class Folder(Store):
|
||||
"""
|
||||
SELECT * FROM metadata WHERE hash = ?
|
||||
""",
|
||||
(file_hash_result,),
|
||||
(file_hash_result,
|
||||
),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
@@ -1259,7 +1417,8 @@ class Folder(Store):
|
||||
try:
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
existing_tags = [
|
||||
t for t in (db.get_tags(hash) or []) if isinstance(t, str) and t.strip()
|
||||
t for t in (db.get_tags(hash) or [])
|
||||
if isinstance(t, str) and t.strip()
|
||||
]
|
||||
|
||||
from metadata import compute_namespaced_tag_overwrite
|
||||
@@ -1273,13 +1432,16 @@ class Folder(Store):
|
||||
# Folder DB tag table is case-sensitive and add_tags_to_hash() is additive.
|
||||
# To enforce lowercase-only tags and namespace overwrites, rewrite the full tag set.
|
||||
cursor = db.connection.cursor()
|
||||
cursor.execute("DELETE FROM tags WHERE hash = ?", (hash,))
|
||||
cursor.execute("DELETE FROM tags WHERE hash = ?",
|
||||
(hash,
|
||||
))
|
||||
for t in merged:
|
||||
t = str(t).strip().lower()
|
||||
if t:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO tags (hash, tag) VALUES (?, ?)",
|
||||
(hash, t),
|
||||
(hash,
|
||||
t),
|
||||
)
|
||||
db.connection.commit()
|
||||
try:
|
||||
@@ -1304,8 +1466,7 @@ class Folder(Store):
|
||||
try:
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
tag_list = [
|
||||
str(t).strip().lower()
|
||||
for t in (tags or [])
|
||||
str(t).strip().lower() for t in (tags or [])
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
if not tag_list:
|
||||
@@ -1362,7 +1523,12 @@ class Folder(Store):
|
||||
existing_urls.append(u)
|
||||
changed = True
|
||||
if changed:
|
||||
db.update_metadata_by_hash(file_hash, {"url": existing_urls})
|
||||
db.update_metadata_by_hash(
|
||||
file_hash,
|
||||
{
|
||||
"url": existing_urls
|
||||
}
|
||||
)
|
||||
return True
|
||||
except Exception as exc:
|
||||
debug(f"Local DB add_url failed: {exc}")
|
||||
@@ -1388,7 +1554,8 @@ class Folder(Store):
|
||||
except Exception:
|
||||
normalize_urls = None # type: ignore
|
||||
|
||||
merged_by_hash: Dict[str, List[str]] = {}
|
||||
merged_by_hash: Dict[str,
|
||||
List[str]] = {}
|
||||
for file_identifier, url_list in items or []:
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if not file_hash:
|
||||
@@ -1399,9 +1566,13 @@ class Folder(Store):
|
||||
try:
|
||||
incoming = normalize_urls(url_list)
|
||||
except Exception:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
else:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
|
||||
if not incoming:
|
||||
continue
|
||||
@@ -1427,23 +1598,30 @@ class Folder(Store):
|
||||
for file_hash in merged_by_hash.keys():
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)", (file_hash,)
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)",
|
||||
(file_hash,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Load existing urls for all hashes in chunks.
|
||||
existing_urls_by_hash: Dict[str, List[str]] = {h: [] for h in merged_by_hash.keys()}
|
||||
existing_urls_by_hash: Dict[str,
|
||||
List[str]] = {
|
||||
h: []
|
||||
for h in merged_by_hash.keys()
|
||||
}
|
||||
hashes = list(merged_by_hash.keys())
|
||||
chunk_size = 400
|
||||
for i in range(0, len(hashes), chunk_size):
|
||||
chunk = hashes[i : i + chunk_size]
|
||||
chunk = hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
try:
|
||||
cursor.execute(
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})", chunk
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})",
|
||||
chunk
|
||||
)
|
||||
rows = cursor.fetchall() or []
|
||||
except Exception:
|
||||
@@ -1469,7 +1647,8 @@ class Folder(Store):
|
||||
else:
|
||||
if isinstance(parsed, list):
|
||||
parsed_urls = [
|
||||
str(u).strip() for u in parsed if str(u).strip()
|
||||
str(u).strip() for u in parsed
|
||||
if str(u).strip()
|
||||
]
|
||||
except Exception:
|
||||
parsed_urls = []
|
||||
@@ -1515,12 +1694,16 @@ class Folder(Store):
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
meta = db.get_metadata(file_hash) or {}
|
||||
existing_urls = normalize_urls(meta.get("url"))
|
||||
remove_set = {u for u in normalize_urls(url) if u}
|
||||
remove_set = {u
|
||||
for u in normalize_urls(url) if u}
|
||||
if not remove_set:
|
||||
return False
|
||||
new_urls = [u for u in existing_urls if u not in remove_set]
|
||||
if new_urls != existing_urls:
|
||||
db.update_metadata_by_hash(file_hash, {"url": new_urls})
|
||||
db.update_metadata_by_hash(file_hash,
|
||||
{
|
||||
"url": new_urls
|
||||
})
|
||||
return True
|
||||
except Exception as exc:
|
||||
debug(f"Local DB delete_url failed: {exc}")
|
||||
@@ -1529,7 +1712,12 @@ class Folder(Store):
|
||||
debug(f"delete_url failed for local file: {exc}")
|
||||
return False
|
||||
|
||||
def delete_url_bulk(self, items: List[tuple[str, List[str]]], **kwargs: Any) -> bool:
|
||||
def delete_url_bulk(
|
||||
self,
|
||||
items: List[tuple[str,
|
||||
List[str]]],
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Delete known urls from many local files in one DB session."""
|
||||
from API.folder import API_folder_store
|
||||
|
||||
@@ -1542,7 +1730,8 @@ class Folder(Store):
|
||||
except Exception:
|
||||
normalize_urls = None # type: ignore
|
||||
|
||||
remove_by_hash: Dict[str, set[str]] = {}
|
||||
remove_by_hash: Dict[str,
|
||||
set[str]] = {}
|
||||
for file_identifier, url_list in items or []:
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if not file_hash:
|
||||
@@ -1553,11 +1742,16 @@ class Folder(Store):
|
||||
try:
|
||||
incoming = normalize_urls(url_list)
|
||||
except Exception:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
else:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
|
||||
remove = {u for u in incoming if u}
|
||||
remove = {u
|
||||
for u in incoming if u}
|
||||
if not remove:
|
||||
continue
|
||||
remove_by_hash.setdefault(file_hash, set()).update(remove)
|
||||
@@ -1577,23 +1771,30 @@ class Folder(Store):
|
||||
for file_hash in remove_by_hash.keys():
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)", (file_hash,)
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)",
|
||||
(file_hash,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Load existing urls for hashes in chunks.
|
||||
existing_urls_by_hash: Dict[str, List[str]] = {h: [] for h in remove_by_hash.keys()}
|
||||
existing_urls_by_hash: Dict[str,
|
||||
List[str]] = {
|
||||
h: []
|
||||
for h in remove_by_hash.keys()
|
||||
}
|
||||
hashes = list(remove_by_hash.keys())
|
||||
chunk_size = 400
|
||||
for i in range(0, len(hashes), chunk_size):
|
||||
chunk = hashes[i : i + chunk_size]
|
||||
chunk = hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
try:
|
||||
cursor.execute(
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})", chunk
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})",
|
||||
chunk
|
||||
)
|
||||
rows = cursor.fetchall() or []
|
||||
except Exception:
|
||||
@@ -1619,7 +1820,8 @@ class Folder(Store):
|
||||
else:
|
||||
if isinstance(parsed, list):
|
||||
parsed_urls = [
|
||||
str(u).strip() for u in parsed if str(u).strip()
|
||||
str(u).strip() for u in parsed
|
||||
if str(u).strip()
|
||||
]
|
||||
except Exception:
|
||||
parsed_urls = []
|
||||
@@ -1663,15 +1865,24 @@ class Folder(Store):
|
||||
getter = getattr(db, "get_notes", None)
|
||||
if callable(getter):
|
||||
notes = getter(file_hash)
|
||||
return notes if isinstance(notes, dict) else {}
|
||||
return notes if isinstance(notes,
|
||||
dict) else {}
|
||||
# Fallback: default-only
|
||||
note = db.get_note(file_hash)
|
||||
return {"default": str(note or "")} if note else {}
|
||||
return {
|
||||
"default": str(note or "")
|
||||
} if note else {}
|
||||
except Exception as exc:
|
||||
debug(f"get_note failed for local file: {exc}")
|
||||
return {}
|
||||
|
||||
def set_note(self, file_identifier: str, name: str, text: str, **kwargs: Any) -> bool:
|
||||
def set_note(
|
||||
self,
|
||||
file_identifier: str,
|
||||
name: str,
|
||||
text: str,
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Set a named note for a local file by hash."""
|
||||
from API.folder import API_folder_store
|
||||
|
||||
@@ -1683,7 +1894,8 @@ class Folder(Store):
|
||||
return False
|
||||
|
||||
file_path = self.get_file(file_hash, **kwargs)
|
||||
if not file_path or not isinstance(file_path, Path) or not file_path.exists():
|
||||
if not file_path or not isinstance(file_path,
|
||||
Path) or not file_path.exists():
|
||||
return False
|
||||
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
@@ -1729,11 +1941,13 @@ class Folder(Store):
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Look up file paths for hashes in chunks (to verify existence).
|
||||
wanted_hashes = sorted({h for (h, _n, _t) in normalized})
|
||||
hash_to_path: Dict[str, str] = {}
|
||||
wanted_hashes = sorted({h
|
||||
for (h, _n, _t) in normalized})
|
||||
hash_to_path: Dict[str,
|
||||
str] = {}
|
||||
chunk_size = 400
|
||||
for i in range(0, len(wanted_hashes), chunk_size):
|
||||
chunk = wanted_hashes[i : i + chunk_size]
|
||||
chunk = wanted_hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
@@ -1809,7 +2023,8 @@ class Folder(Store):
|
||||
deleter2 = getattr(db, "save_note", None)
|
||||
if callable(deleter2):
|
||||
file_path = self.get_file(file_hash, **kwargs)
|
||||
if file_path and isinstance(file_path, Path) and file_path.exists():
|
||||
if file_path and isinstance(file_path,
|
||||
Path) and file_path.exists():
|
||||
deleter2(file_path, "")
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -12,8 +12,10 @@ from SYS.utils_constant import mime_maps
|
||||
|
||||
from Store._base import Store
|
||||
|
||||
|
||||
_HYDRUS_INIT_CHECK_CACHE: dict[tuple[str, str], tuple[bool, Optional[str]]] = {}
|
||||
_HYDRUS_INIT_CHECK_CACHE: dict[tuple[str,
|
||||
str],
|
||||
tuple[bool,
|
||||
Optional[str]]] = {}
|
||||
|
||||
|
||||
class HydrusNetwork(Store):
|
||||
@@ -85,21 +87,29 @@ class HydrusNetwork(Store):
|
||||
if cached is not None:
|
||||
ok, err = cached
|
||||
if not ok:
|
||||
raise RuntimeError(f"Hydrus '{self.NAME}' unavailable: {err or 'Unavailable'}")
|
||||
raise RuntimeError(
|
||||
f"Hydrus '{self.NAME}' unavailable: {err or 'Unavailable'}"
|
||||
)
|
||||
else:
|
||||
api_version_url = f"{self.URL}/api_version"
|
||||
verify_key_url = f"{self.URL}/verify_access_key"
|
||||
try:
|
||||
with httpx.Client(timeout=5.0, verify=False, follow_redirects=True) as client:
|
||||
with httpx.Client(timeout=5.0,
|
||||
verify=False,
|
||||
follow_redirects=True) as client:
|
||||
version_resp = client.get(api_version_url)
|
||||
version_resp.raise_for_status()
|
||||
version_payload = version_resp.json()
|
||||
if not isinstance(version_payload, dict):
|
||||
raise RuntimeError("Hydrus /api_version returned an unexpected response")
|
||||
raise RuntimeError(
|
||||
"Hydrus /api_version returned an unexpected response"
|
||||
)
|
||||
|
||||
verify_resp = client.get(
|
||||
verify_key_url,
|
||||
headers={"Hydrus-Client-API-Access-Key": self.API},
|
||||
headers={
|
||||
"Hydrus-Client-API-Access-Key": self.API
|
||||
},
|
||||
)
|
||||
verify_resp.raise_for_status()
|
||||
verify_payload = verify_resp.json()
|
||||
@@ -115,7 +125,11 @@ class HydrusNetwork(Store):
|
||||
raise RuntimeError(f"Hydrus '{self.NAME}' unavailable: {err}") from exc
|
||||
|
||||
# Create a persistent client for this instance (auth via access key by default).
|
||||
self._client = HydrusClient(url=self.URL, access_key=self.API, instance_name=self.NAME)
|
||||
self._client = HydrusClient(
|
||||
url=self.URL,
|
||||
access_key=self.API,
|
||||
instance_name=self.NAME
|
||||
)
|
||||
|
||||
# Best-effort total count (used for startup diagnostics). Avoid heavy payloads.
|
||||
# Some Hydrus setups appear to return no count via the CBOR client for this endpoint,
|
||||
@@ -149,7 +163,9 @@ class HydrusNetwork(Store):
|
||||
"Hydrus-Client-API-Access-Key": self.API,
|
||||
"Accept": "application/json",
|
||||
}
|
||||
with httpx.Client(timeout=5.0, verify=False, follow_redirects=True) as client:
|
||||
with httpx.Client(timeout=5.0,
|
||||
verify=False,
|
||||
follow_redirects=True) as client:
|
||||
resp = client.get(url, params=params, headers=headers)
|
||||
resp.raise_for_status()
|
||||
payload = resp.json()
|
||||
@@ -165,7 +181,10 @@ class HydrusNetwork(Store):
|
||||
self.total_count = count_val
|
||||
return self.total_count
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} total count (json) unavailable: {exc}", file=sys.stderr)
|
||||
debug(
|
||||
f"{self._log_prefix()} total count (json) unavailable: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# 2) Fallback to the API client (CBOR).
|
||||
try:
|
||||
@@ -186,7 +205,10 @@ class HydrusNetwork(Store):
|
||||
self.total_count = count_val
|
||||
return self.total_count
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} total count (client) unavailable: {exc}", file=sys.stderr)
|
||||
debug(
|
||||
f"{self._log_prefix()} total count (client) unavailable: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
return self.total_count
|
||||
|
||||
@@ -220,13 +242,13 @@ class HydrusNetwork(Store):
|
||||
# Add title to tags if provided and not already present
|
||||
if title:
|
||||
title_tag = f"title:{title}".strip().lower()
|
||||
if not any(str(candidate).lower().startswith("title:") for candidate in tag_list):
|
||||
if not any(str(candidate).lower().startswith("title:")
|
||||
for candidate in tag_list):
|
||||
tag_list = [title_tag] + list(tag_list)
|
||||
|
||||
# Hydrus is lowercase-only tags; normalize here for consistency.
|
||||
tag_list = [
|
||||
str(t).strip().lower()
|
||||
for t in (tag_list or [])
|
||||
str(t).strip().lower() for t in (tag_list or [])
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
|
||||
@@ -257,7 +279,8 @@ class HydrusNetwork(Store):
|
||||
# Hydrus returns placeholder rows for unknown hashes.
|
||||
# Only treat as a real duplicate if it has a concrete file_id.
|
||||
for meta in metas:
|
||||
if isinstance(meta, dict) and meta.get("file_id") is not None:
|
||||
if isinstance(meta,
|
||||
dict) and meta.get("file_id") is not None:
|
||||
file_exists = True
|
||||
break
|
||||
if file_exists:
|
||||
@@ -278,7 +301,10 @@ class HydrusNetwork(Store):
|
||||
|
||||
# Upload file if not already present
|
||||
if not file_exists:
|
||||
log(f"{self._log_prefix()} Uploading: {file_path.name}", file=sys.stderr)
|
||||
log(
|
||||
f"{self._log_prefix()} Uploading: {file_path.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
response = client.add_file(file_path)
|
||||
|
||||
# Extract hash from response
|
||||
@@ -305,16 +331,25 @@ class HydrusNetwork(Store):
|
||||
service_name = "my tags"
|
||||
|
||||
try:
|
||||
debug(f"{self._log_prefix()} Adding {len(tag_list)} tag(s): {tag_list}")
|
||||
debug(
|
||||
f"{self._log_prefix()} Adding {len(tag_list)} tag(s): {tag_list}"
|
||||
)
|
||||
client.add_tag(file_hash, tag_list, service_name)
|
||||
log(f"{self._log_prefix()} Tags added via '{service_name}'", file=sys.stderr)
|
||||
log(
|
||||
f"{self._log_prefix()} Tags added via '{service_name}'",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"{self._log_prefix()} ⚠️ Failed to add tags: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"{self._log_prefix()} ⚠️ Failed to add tags: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Associate url if provided (both for new and existing files)
|
||||
if url:
|
||||
log(
|
||||
f"{self._log_prefix()} Associating {len(url)} URL(s) with file", file=sys.stderr
|
||||
f"{self._log_prefix()} Associating {len(url)} URL(s) with file",
|
||||
file=sys.stderr
|
||||
)
|
||||
for url in url:
|
||||
if url:
|
||||
@@ -378,8 +413,11 @@ class HydrusNetwork(Store):
|
||||
return []
|
||||
|
||||
def _iter_url_filtered_metadata(
|
||||
url_value: str | None, want_any: bool, fetch_limit: int
|
||||
) -> list[dict[str, Any]]:
|
||||
url_value: str | None,
|
||||
want_any: bool,
|
||||
fetch_limit: int
|
||||
) -> list[dict[str,
|
||||
Any]]:
|
||||
"""Best-effort URL search by scanning Hydrus metadata with include_file_url=True."""
|
||||
|
||||
# First try a fast system predicate if Hydrus supports it.
|
||||
@@ -393,12 +431,14 @@ class HydrusNetwork(Store):
|
||||
return_file_ids=True,
|
||||
return_file_count=False,
|
||||
)
|
||||
ids = url_search.get("file_ids", []) if isinstance(url_search, dict) else []
|
||||
ids = url_search.get("file_ids",
|
||||
[]) if isinstance(url_search,
|
||||
dict) else []
|
||||
if isinstance(ids, list):
|
||||
candidate_file_ids = [
|
||||
int(x)
|
||||
for x in ids
|
||||
if isinstance(x, (int, float, str)) and str(x).strip().isdigit()
|
||||
int(x) for x in ids
|
||||
if isinstance(x, (int, float,
|
||||
str)) and str(x).strip().isdigit()
|
||||
]
|
||||
except Exception:
|
||||
candidate_file_ids = []
|
||||
@@ -411,9 +451,13 @@ class HydrusNetwork(Store):
|
||||
return_file_ids=True,
|
||||
return_file_count=False,
|
||||
)
|
||||
ids = everything.get("file_ids", []) if isinstance(everything, dict) else []
|
||||
ids = everything.get("file_ids",
|
||||
[]) if isinstance(everything,
|
||||
dict) else []
|
||||
if isinstance(ids, list):
|
||||
candidate_file_ids = [int(x) for x in ids if isinstance(x, (int, float))]
|
||||
candidate_file_ids = [
|
||||
int(x) for x in ids if isinstance(x, (int, float))
|
||||
]
|
||||
|
||||
if not candidate_file_ids:
|
||||
return []
|
||||
@@ -425,7 +469,7 @@ class HydrusNetwork(Store):
|
||||
for start in range(0, len(candidate_file_ids), chunk_size):
|
||||
if len(out) >= fetch_limit:
|
||||
break
|
||||
chunk = candidate_file_ids[start : start + chunk_size]
|
||||
chunk = candidate_file_ids[start:start + chunk_size]
|
||||
try:
|
||||
payload = client.fetch_file_metadata(
|
||||
file_ids=chunk,
|
||||
@@ -438,7 +482,9 @@ class HydrusNetwork(Store):
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
metas = payload.get("metadata", []) if isinstance(payload, dict) else []
|
||||
metas = payload.get("metadata",
|
||||
[]) if isinstance(payload,
|
||||
dict) else []
|
||||
if not isinstance(metas, list):
|
||||
continue
|
||||
|
||||
@@ -480,7 +526,11 @@ class HydrusNetwork(Store):
|
||||
m = re.search(r"\bextension:([^\s,]+)", query_lower)
|
||||
if m:
|
||||
ext_filter = _normalize_ext_filter(m.group(1)) or None
|
||||
query_lower = re.sub(r"\s*\b(?:ext|extension):[^\s,]+", " ", query_lower)
|
||||
query_lower = re.sub(
|
||||
r"\s*\b(?:ext|extension):[^\s,]+",
|
||||
" ",
|
||||
query_lower
|
||||
)
|
||||
query_lower = re.sub(r"\s{2,}", " ", query_lower).strip().strip(",")
|
||||
query = query_lower
|
||||
if ext_filter and not query_lower:
|
||||
@@ -504,20 +554,27 @@ class HydrusNetwork(Store):
|
||||
if namespace == "url":
|
||||
if not pattern or pattern == "*":
|
||||
metadata_list = _iter_url_filtered_metadata(
|
||||
None, want_any=True, fetch_limit=int(limit) if limit else 100
|
||||
None,
|
||||
want_any=True,
|
||||
fetch_limit=int(limit) if limit else 100
|
||||
)
|
||||
else:
|
||||
# Fast-path: exact URL via /add_urls/get_url_files when a full URL is provided.
|
||||
try:
|
||||
if pattern.startswith("http://") or pattern.startswith("https://"):
|
||||
if pattern.startswith("http://") or pattern.startswith(
|
||||
"https://"):
|
||||
from API.HydrusNetwork import HydrusRequestSpec
|
||||
|
||||
spec = HydrusRequestSpec(
|
||||
method="GET",
|
||||
endpoint="/add_urls/get_url_files",
|
||||
query={"url": pattern},
|
||||
query={
|
||||
"url": pattern
|
||||
},
|
||||
)
|
||||
response = client._perform_request(spec) # type: ignore[attr-defined]
|
||||
response = client._perform_request(
|
||||
spec
|
||||
) # type: ignore[attr-defined]
|
||||
hashes: list[str] = []
|
||||
file_ids: list[int] = []
|
||||
if isinstance(response, dict):
|
||||
@@ -526,8 +583,7 @@ class HydrusNetwork(Store):
|
||||
)
|
||||
if isinstance(raw_hashes, list):
|
||||
hashes = [
|
||||
str(h).strip()
|
||||
for h in raw_hashes
|
||||
str(h).strip() for h in raw_hashes
|
||||
if isinstance(h, str) and str(h).strip()
|
||||
]
|
||||
raw_ids = response.get("file_ids")
|
||||
@@ -548,12 +604,14 @@ class HydrusNetwork(Store):
|
||||
include_mime=True,
|
||||
)
|
||||
metas = (
|
||||
payload.get("metadata", [])
|
||||
if isinstance(payload, dict)
|
||||
else []
|
||||
payload.get("metadata",
|
||||
[]) if isinstance(payload,
|
||||
dict) else []
|
||||
)
|
||||
if isinstance(metas, list):
|
||||
metadata_list = [m for m in metas if isinstance(m, dict)]
|
||||
metadata_list = [
|
||||
m for m in metas if isinstance(m, dict)
|
||||
]
|
||||
elif hashes:
|
||||
payload = client.fetch_file_metadata(
|
||||
hashes=hashes,
|
||||
@@ -564,19 +622,23 @@ class HydrusNetwork(Store):
|
||||
include_mime=True,
|
||||
)
|
||||
metas = (
|
||||
payload.get("metadata", [])
|
||||
if isinstance(payload, dict)
|
||||
else []
|
||||
payload.get("metadata",
|
||||
[]) if isinstance(payload,
|
||||
dict) else []
|
||||
)
|
||||
if isinstance(metas, list):
|
||||
metadata_list = [m for m in metas if isinstance(m, dict)]
|
||||
metadata_list = [
|
||||
m for m in metas if isinstance(m, dict)
|
||||
]
|
||||
except Exception:
|
||||
metadata_list = None
|
||||
|
||||
# Fallback: substring scan
|
||||
if metadata_list is None:
|
||||
metadata_list = _iter_url_filtered_metadata(
|
||||
pattern, want_any=False, fetch_limit=int(limit) if limit else 100
|
||||
pattern,
|
||||
want_any=False,
|
||||
fetch_limit=int(limit) if limit else 100
|
||||
)
|
||||
|
||||
# Parse the query into tags
|
||||
@@ -624,7 +686,8 @@ class HydrusNetwork(Store):
|
||||
continue
|
||||
if isinstance(raw_hashes, list):
|
||||
hashes_out = [
|
||||
str(h).strip() for h in raw_hashes if isinstance(h, str) and str(h).strip()
|
||||
str(h).strip() for h in raw_hashes
|
||||
if isinstance(h, str) and str(h).strip()
|
||||
]
|
||||
return ids_out, hashes_out
|
||||
|
||||
@@ -676,7 +739,9 @@ class HydrusNetwork(Store):
|
||||
return []
|
||||
|
||||
search_result = client.search_files(
|
||||
tags=tags, return_hashes=True, return_file_ids=True
|
||||
tags=tags,
|
||||
return_hashes=True,
|
||||
return_file_ids=True
|
||||
)
|
||||
file_ids, hashes = _extract_search_ids(search_result)
|
||||
|
||||
@@ -694,7 +759,7 @@ class HydrusNetwork(Store):
|
||||
for start in range(0, len(file_ids), chunk_size):
|
||||
if len(results) >= limit:
|
||||
break
|
||||
chunk = file_ids[start : start + chunk_size]
|
||||
chunk = file_ids[start:start + chunk_size]
|
||||
try:
|
||||
payload = client.fetch_file_metadata(
|
||||
file_ids=chunk,
|
||||
@@ -706,7 +771,9 @@ class HydrusNetwork(Store):
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
metas = payload.get("metadata", []) if isinstance(payload, dict) else []
|
||||
metas = payload.get("metadata",
|
||||
[]) if isinstance(payload,
|
||||
dict) else []
|
||||
if not isinstance(metas, list):
|
||||
continue
|
||||
for meta in metas:
|
||||
@@ -720,7 +787,9 @@ class HydrusNetwork(Store):
|
||||
for category in mime_maps.values():
|
||||
for _ext_key, info in category.items():
|
||||
if mime_type in info.get("mimes", []):
|
||||
ext = str(info.get("ext", "")).strip().lstrip(".")
|
||||
ext = str(info.get("ext",
|
||||
"")
|
||||
).strip().lstrip(".")
|
||||
break
|
||||
if ext:
|
||||
break
|
||||
@@ -731,7 +800,8 @@ class HydrusNetwork(Store):
|
||||
hash_hex = meta.get("hash")
|
||||
size = meta.get("size", 0)
|
||||
|
||||
tags_set = meta.get("tags", {})
|
||||
tags_set = meta.get("tags",
|
||||
{})
|
||||
all_tags: list[str] = []
|
||||
title = f"Hydrus File {file_id}"
|
||||
if isinstance(tags_set, dict):
|
||||
@@ -748,20 +818,24 @@ class HydrusNetwork(Store):
|
||||
if not tag_l:
|
||||
continue
|
||||
all_tags.append(tag_l)
|
||||
if (
|
||||
tag_l.startswith("title:")
|
||||
and title == f"Hydrus File {file_id}"
|
||||
):
|
||||
if (tag_l.startswith("title:") and title
|
||||
== f"Hydrus File {file_id}"):
|
||||
title = tag_l.split(":", 1)[1].strip()
|
||||
|
||||
for _service_name, service_tags in tags_set.items():
|
||||
if not isinstance(service_tags, dict):
|
||||
continue
|
||||
storage_tags = service_tags.get("storage_tags", {})
|
||||
storage_tags = service_tags.get(
|
||||
"storage_tags",
|
||||
{}
|
||||
)
|
||||
if isinstance(storage_tags, dict):
|
||||
for tag_list in storage_tags.values():
|
||||
_collect(tag_list)
|
||||
display_tags = service_tags.get("display_tags", [])
|
||||
display_tags = service_tags.get(
|
||||
"display_tags",
|
||||
[]
|
||||
)
|
||||
_collect(display_tags)
|
||||
|
||||
file_url = f"{self.URL.rstrip('/')}/get_files/file?hash={hash_hex}"
|
||||
@@ -814,7 +888,8 @@ class HydrusNetwork(Store):
|
||||
metadata_list = []
|
||||
|
||||
# If our free-text searches produce nothing (or nothing survived downstream filtering), fallback to scanning.
|
||||
if (not metadata_list) and (query_lower != "*") and (":" not in query_lower):
|
||||
if (not metadata_list) and (query_lower
|
||||
!= "*") and (":" not in query_lower):
|
||||
try:
|
||||
search_result = client.search_files(
|
||||
tags=["system:everything"],
|
||||
@@ -858,7 +933,8 @@ class HydrusNetwork(Store):
|
||||
size = meta.get("size", 0)
|
||||
|
||||
# Get tags for this file and extract title
|
||||
tags_set = meta.get("tags", {})
|
||||
tags_set = meta.get("tags",
|
||||
{})
|
||||
all_tags = []
|
||||
title = f"Hydrus File {file_id}" # Default fallback
|
||||
all_tags_str = "" # For substring matching
|
||||
@@ -880,14 +956,16 @@ class HydrusNetwork(Store):
|
||||
continue
|
||||
all_tags.append(tag_l)
|
||||
all_tags_str += " " + tag_l
|
||||
if tag_l.startswith("title:") and title == f"Hydrus File {file_id}":
|
||||
if tag_l.startswith("title:"
|
||||
) and title == f"Hydrus File {file_id}":
|
||||
title = tag_l.split(":", 1)[1].strip()
|
||||
|
||||
for _service_name, service_tags in tags_set.items():
|
||||
if not isinstance(service_tags, dict):
|
||||
continue
|
||||
|
||||
storage_tags = service_tags.get("storage_tags", {})
|
||||
storage_tags = service_tags.get("storage_tags",
|
||||
{})
|
||||
if isinstance(storage_tags, dict):
|
||||
for tag_list in storage_tags.values():
|
||||
_collect(tag_list)
|
||||
@@ -939,7 +1017,8 @@ class HydrusNetwork(Store):
|
||||
# Free-form search: check if search terms match title or FREEFORM tags.
|
||||
# Do NOT implicitly match other namespace tags (except title:).
|
||||
freeform_tags = [
|
||||
t for t in all_tags if isinstance(t, str) and t and (":" not in t)
|
||||
t for t in all_tags
|
||||
if isinstance(t, str) and t and (":" not in t)
|
||||
]
|
||||
searchable_text = (title + " " + " ".join(freeform_tags)).lower()
|
||||
|
||||
@@ -1019,13 +1098,17 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
debug(f"{self._log_prefix()} delete_file: invalid file hash '{file_identifier}'")
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
debug(
|
||||
f"{self._log_prefix()} delete_file: invalid file hash '{file_identifier}'"
|
||||
)
|
||||
return False
|
||||
|
||||
reason = kwargs.get("reason")
|
||||
reason_text = (
|
||||
str(reason).strip() if isinstance(reason, str) and reason.strip() else None
|
||||
str(reason).strip() if isinstance(reason,
|
||||
str) and reason.strip() else None
|
||||
)
|
||||
|
||||
# 1) Delete file
|
||||
@@ -1035,7 +1118,9 @@ class HydrusNetwork(Store):
|
||||
try:
|
||||
client.clear_file_deletion_record([file_hash])
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} delete_file: clear_file_deletion_record failed: {exc}")
|
||||
debug(
|
||||
f"{self._log_prefix()} delete_file: clear_file_deletion_record failed: {exc}"
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as exc:
|
||||
@@ -1078,11 +1163,13 @@ class HydrusNetwork(Store):
|
||||
|
||||
# Extract title from tags
|
||||
title = f"Hydrus_{file_hash[:12]}"
|
||||
tags_payload = meta.get("tags", {})
|
||||
tags_payload = meta.get("tags",
|
||||
{})
|
||||
if isinstance(tags_payload, dict):
|
||||
for service_data in tags_payload.values():
|
||||
if isinstance(service_data, dict):
|
||||
display_tags = service_data.get("display_tags", {})
|
||||
display_tags = service_data.get("display_tags",
|
||||
{})
|
||||
if isinstance(display_tags, dict):
|
||||
current_tags = display_tags.get("0", [])
|
||||
if isinstance(current_tags, list):
|
||||
@@ -1096,7 +1183,8 @@ class HydrusNetwork(Store):
|
||||
# Hydrus may return mime as an int enum, or sometimes a human label.
|
||||
mime_val = meta.get("mime")
|
||||
filetype_human = (
|
||||
meta.get("filetype_human") or meta.get("mime_human") or meta.get("mime_string")
|
||||
meta.get("filetype_human") or meta.get("mime_human")
|
||||
or meta.get("mime_string")
|
||||
)
|
||||
|
||||
# Determine ext: prefer Hydrus metadata ext, then filetype_human (when it looks like an ext),
|
||||
@@ -1170,14 +1258,16 @@ class HydrusNetwork(Store):
|
||||
except Exception:
|
||||
dur_int = None
|
||||
|
||||
raw_urls = meta.get("known_urls") or meta.get("urls") or meta.get("url") or []
|
||||
raw_urls = meta.get("known_urls") or meta.get("urls") or meta.get("url"
|
||||
) or []
|
||||
url_list: list[str] = []
|
||||
if isinstance(raw_urls, str):
|
||||
s = raw_urls.strip()
|
||||
url_list = [s] if s else []
|
||||
elif isinstance(raw_urls, list):
|
||||
url_list = [
|
||||
str(u).strip() for u in raw_urls if isinstance(u, str) and str(u).strip()
|
||||
str(u).strip() for u in raw_urls
|
||||
if isinstance(u, str) and str(u).strip()
|
||||
]
|
||||
|
||||
return {
|
||||
@@ -1212,8 +1302,11 @@ class HydrusNetwork(Store):
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
debug(f"{self._log_prefix()} get_tags: invalid file hash '{file_identifier}'")
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
debug(
|
||||
f"{self._log_prefix()} get_tags: invalid file hash '{file_identifier}'"
|
||||
)
|
||||
return [], "unknown"
|
||||
|
||||
# Get Hydrus client and service info
|
||||
@@ -1224,17 +1317,23 @@ class HydrusNetwork(Store):
|
||||
|
||||
# Fetch file metadata
|
||||
payload = client.fetch_file_metadata(
|
||||
hashes=[file_hash], include_service_keys_to_tags=True, include_file_url=False
|
||||
hashes=[file_hash],
|
||||
include_service_keys_to_tags=True,
|
||||
include_file_url=False
|
||||
)
|
||||
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
if not isinstance(items, list) or not items:
|
||||
debug(f"{self._log_prefix()} get_tags: no metadata for hash {file_hash}")
|
||||
debug(
|
||||
f"{self._log_prefix()} get_tags: no metadata for hash {file_hash}"
|
||||
)
|
||||
return [], "unknown"
|
||||
|
||||
meta = items[0] if isinstance(items[0], dict) else None
|
||||
if not isinstance(meta, dict) or meta.get("file_id") is None:
|
||||
debug(f"{self._log_prefix()} get_tags: invalid metadata for hash {file_hash}")
|
||||
debug(
|
||||
f"{self._log_prefix()} get_tags: invalid metadata for hash {file_hash}"
|
||||
)
|
||||
return [], "unknown"
|
||||
|
||||
# Extract tags using service name
|
||||
@@ -1261,14 +1360,16 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
debug(f"{self._log_prefix()} add_tag: invalid file hash '{file_identifier}'")
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
debug(
|
||||
f"{self._log_prefix()} add_tag: invalid file hash '{file_identifier}'"
|
||||
)
|
||||
return False
|
||||
service_name = kwargs.get("service_name") or "my tags"
|
||||
|
||||
incoming_tags = [
|
||||
str(t).strip().lower()
|
||||
for t in (tags or [])
|
||||
str(t).strip().lower() for t in (tags or [])
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
if not incoming_tags:
|
||||
@@ -1316,13 +1417,17 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
debug(f"{self._log_prefix()} delete_tag: invalid file hash '{file_identifier}'")
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
debug(
|
||||
f"{self._log_prefix()} delete_tag: invalid file hash '{file_identifier}'"
|
||||
)
|
||||
return False
|
||||
service_name = kwargs.get("service_name") or "my tags"
|
||||
raw_list = list(tags) if isinstance(tags, (list, tuple)) else [str(tags)]
|
||||
tag_list = [
|
||||
str(t).strip().lower() for t in raw_list if isinstance(t, str) and str(t).strip()
|
||||
str(t).strip().lower() for t in raw_list
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
if not tag_list:
|
||||
return False
|
||||
@@ -1338,16 +1443,22 @@ class HydrusNetwork(Store):
|
||||
client = self._client
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
return []
|
||||
|
||||
payload = client.fetch_file_metadata(hashes=[file_hash], include_file_url=False)
|
||||
payload = client.fetch_file_metadata(
|
||||
hashes=[file_hash],
|
||||
include_file_url=False
|
||||
)
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
if not isinstance(items, list) or not items:
|
||||
return []
|
||||
meta = items[0] if isinstance(items[0], dict) else {}
|
||||
meta = items[0] if isinstance(items[0],
|
||||
dict) else {}
|
||||
|
||||
raw_urls: Any = meta.get("known_urls") or meta.get("urls") or meta.get("url") or []
|
||||
raw_urls: Any = meta.get("known_urls"
|
||||
) or meta.get("urls") or meta.get("url") or []
|
||||
if isinstance(raw_urls, str):
|
||||
val = raw_urls.strip()
|
||||
return [val] if val else []
|
||||
@@ -1434,7 +1545,8 @@ class HydrusNetwork(Store):
|
||||
return {}
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
return {}
|
||||
|
||||
payload = client.fetch_file_metadata(hashes=[file_hash], include_notes=True)
|
||||
@@ -1447,14 +1559,23 @@ class HydrusNetwork(Store):
|
||||
|
||||
notes_payload = meta.get("notes")
|
||||
if isinstance(notes_payload, dict):
|
||||
return {str(k): str(v or "") for k, v in notes_payload.items() if str(k).strip()}
|
||||
return {
|
||||
str(k): str(v or "")
|
||||
for k, v in notes_payload.items() if str(k).strip()
|
||||
}
|
||||
|
||||
return {}
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} get_note failed: {exc}")
|
||||
return {}
|
||||
|
||||
def set_note(self, file_identifier: str, name: str, text: str, **kwargs: Any) -> bool:
|
||||
def set_note(
|
||||
self,
|
||||
file_identifier: str,
|
||||
name: str,
|
||||
text: str,
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Set a named note for a Hydrus file (default note service only)."""
|
||||
try:
|
||||
client = self._client
|
||||
@@ -1463,7 +1584,8 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
return False
|
||||
|
||||
note_name = str(name or "").strip()
|
||||
@@ -1471,7 +1593,10 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
note_text = str(text or "")
|
||||
|
||||
client.set_notes(file_hash, {note_name: note_text})
|
||||
client.set_notes(file_hash,
|
||||
{
|
||||
note_name: note_text
|
||||
})
|
||||
return True
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} set_note failed: {exc}")
|
||||
@@ -1486,7 +1611,8 @@ class HydrusNetwork(Store):
|
||||
return False
|
||||
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" for ch in file_hash):
|
||||
if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
|
||||
for ch in file_hash):
|
||||
return False
|
||||
|
||||
note_name = str(name or "").strip()
|
||||
@@ -1501,7 +1627,10 @@ class HydrusNetwork(Store):
|
||||
|
||||
@staticmethod
|
||||
def _extract_tags_from_hydrus_meta(
|
||||
meta: Dict[str, Any], service_key: Optional[str], service_name: str
|
||||
meta: Dict[str,
|
||||
Any],
|
||||
service_key: Optional[str],
|
||||
service_name: str
|
||||
) -> List[str]:
|
||||
"""Extract current tags from Hydrus metadata dict.
|
||||
|
||||
@@ -1521,7 +1650,10 @@ class HydrusNetwork(Store):
|
||||
# Prefer display_tags (Hydrus computes siblings/parents)
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list) and display:
|
||||
return [str(t) for t in display if isinstance(t, (str, bytes)) and str(t).strip()]
|
||||
return [
|
||||
str(t) for t in display
|
||||
if isinstance(t, (str, bytes)) and str(t).strip()
|
||||
]
|
||||
|
||||
# Fallback to storage_tags status '0' (current)
|
||||
storage = svc_data.get("storage_tags")
|
||||
@@ -1529,7 +1661,8 @@ class HydrusNetwork(Store):
|
||||
current_list = storage.get("0") or storage.get(0)
|
||||
if isinstance(current_list, list):
|
||||
return [
|
||||
str(t) for t in current_list if isinstance(t, (str, bytes)) and str(t).strip()
|
||||
str(t) for t in current_list
|
||||
if isinstance(t, (str, bytes)) and str(t).strip()
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
|
||||
class Store(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def add_file(self, file_path: Path, **kwargs: Any) -> str:
|
||||
raise NotImplementedError
|
||||
@@ -65,7 +66,12 @@ class Store(ABC):
|
||||
continue
|
||||
return changed_any
|
||||
|
||||
def delete_url_bulk(self, items: List[Tuple[str, List[str]]], **kwargs: Any) -> bool:
|
||||
def delete_url_bulk(
|
||||
self,
|
||||
items: List[Tuple[str,
|
||||
List[str]]],
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Optional bulk url deletion.
|
||||
|
||||
Backends may override this to batch writes (single transaction / request).
|
||||
@@ -108,12 +114,23 @@ class Store(ABC):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def set_note(self, file_identifier: str, name: str, text: str, **kwargs: Any) -> bool:
|
||||
def set_note(
|
||||
self,
|
||||
file_identifier: str,
|
||||
name: str,
|
||||
text: str,
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Add or replace a named note for a file."""
|
||||
raise NotImplementedError
|
||||
|
||||
def selector(
|
||||
self, selected_items: List[Any], *, ctx: Any, stage_is_last: bool = True, **_kwargs: Any
|
||||
self,
|
||||
selected_items: List[Any],
|
||||
*,
|
||||
ctx: Any,
|
||||
stage_is_last: bool = True,
|
||||
**_kwargs: Any
|
||||
) -> bool:
|
||||
"""Optional hook for handling `@N` selection semantics.
|
||||
|
||||
|
||||
@@ -22,13 +22,13 @@ from SYS.logger import debug
|
||||
|
||||
from Store._base import Store as BaseStore
|
||||
|
||||
|
||||
_SHA256_HEX_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
# Backends that failed to initialize earlier in the current process.
|
||||
# Keyed by (store_type, instance_key) where instance_key is the name used under config.store.<type>.<instance_key>.
|
||||
_FAILED_BACKEND_CACHE: Dict[tuple[str, str], str] = {}
|
||||
_FAILED_BACKEND_CACHE: Dict[tuple[str,
|
||||
str],
|
||||
str] = {}
|
||||
|
||||
|
||||
def _normalize_store_type(value: str) -> str:
|
||||
@@ -57,10 +57,13 @@ def _discover_store_classes() -> Dict[str, Type[BaseStore]]:
|
||||
"""
|
||||
import Store as store_pkg
|
||||
|
||||
discovered: Dict[str, Type[BaseStore]] = {}
|
||||
discovered: Dict[str,
|
||||
Type[BaseStore]] = {}
|
||||
for module_info in pkgutil.iter_modules(store_pkg.__path__):
|
||||
module_name = module_info.name
|
||||
if module_name in {"__init__", "_base", "registry"}:
|
||||
if module_name in {"__init__",
|
||||
"_base",
|
||||
"registry"}:
|
||||
continue
|
||||
|
||||
module = importlib.import_module(f"Store.{module_name}")
|
||||
@@ -85,12 +88,15 @@ def _required_keys_for(store_cls: Type[BaseStore]) -> list[str]:
|
||||
return [str(k) for k in keys]
|
||||
if isinstance(keys, str):
|
||||
return [keys]
|
||||
raise TypeError(f"Unsupported __new__.keys type for {store_cls.__name__}: {type(keys)}")
|
||||
raise TypeError(
|
||||
f"Unsupported __new__.keys type for {store_cls.__name__}: {type(keys)}"
|
||||
)
|
||||
|
||||
|
||||
def _build_kwargs(
|
||||
store_cls: Type[BaseStore], instance_name: str, instance_config: Any
|
||||
) -> Dict[str, Any]:
|
||||
def _build_kwargs(store_cls: Type[BaseStore],
|
||||
instance_name: str,
|
||||
instance_config: Any) -> Dict[str,
|
||||
Any]:
|
||||
if isinstance(instance_config, dict):
|
||||
cfg_dict = dict(instance_config)
|
||||
else:
|
||||
@@ -99,13 +105,13 @@ def _build_kwargs(
|
||||
required = _required_keys_for(store_cls)
|
||||
|
||||
# If NAME is required but not present, allow the instance key to provide it.
|
||||
if (
|
||||
any(_normalize_config_key(k) == "NAME" for k in required)
|
||||
and _get_case_insensitive(cfg_dict, "NAME") is None
|
||||
):
|
||||
if (any(_normalize_config_key(k) == "NAME" for k in required)
|
||||
and _get_case_insensitive(cfg_dict,
|
||||
"NAME") is None):
|
||||
cfg_dict["NAME"] = str(instance_name)
|
||||
|
||||
kwargs: Dict[str, Any] = {}
|
||||
kwargs: Dict[str,
|
||||
Any] = {}
|
||||
missing: list[str] = []
|
||||
for key in required:
|
||||
value = _get_case_insensitive(cfg_dict, key)
|
||||
@@ -115,23 +121,36 @@ def _build_kwargs(
|
||||
kwargs[str(key)] = value
|
||||
|
||||
if missing:
|
||||
raise ValueError(f"Missing required keys for {store_cls.__name__}: {', '.join(missing)}")
|
||||
raise ValueError(
|
||||
f"Missing required keys for {store_cls.__name__}: {', '.join(missing)}"
|
||||
)
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
class Store:
|
||||
|
||||
def __init__(
|
||||
self, config: Optional[Dict[str, Any]] = None, suppress_debug: bool = False
|
||||
self,
|
||||
config: Optional[Dict[str,
|
||||
Any]] = None,
|
||||
suppress_debug: bool = False
|
||||
) -> None:
|
||||
self._config = config or {}
|
||||
self._suppress_debug = suppress_debug
|
||||
self._backends: Dict[str, BaseStore] = {}
|
||||
self._backend_errors: Dict[str, str] = {}
|
||||
self._backends: Dict[str,
|
||||
BaseStore] = {}
|
||||
self._backend_errors: Dict[str,
|
||||
str] = {}
|
||||
self._load_backends()
|
||||
|
||||
def _maybe_register_temp_alias(
|
||||
self, store_type: str, backend_name: str, kwargs: Dict[str, Any], backend: BaseStore
|
||||
self,
|
||||
store_type: str,
|
||||
backend_name: str,
|
||||
kwargs: Dict[str,
|
||||
Any],
|
||||
backend: BaseStore
|
||||
) -> None:
|
||||
"""If a folder backend points at config['temp'], also expose it as the 'temp' backend.
|
||||
|
||||
@@ -189,16 +208,24 @@ class Store:
|
||||
if cached_error:
|
||||
self._backend_errors[str(instance_name)] = str(cached_error)
|
||||
if isinstance(instance_config, dict):
|
||||
override_name = _get_case_insensitive(dict(instance_config), "NAME")
|
||||
override_name = _get_case_insensitive(
|
||||
dict(instance_config),
|
||||
"NAME"
|
||||
)
|
||||
if override_name:
|
||||
self._backend_errors[str(override_name)] = str(cached_error)
|
||||
continue
|
||||
try:
|
||||
kwargs = _build_kwargs(store_cls, str(instance_name), instance_config)
|
||||
kwargs = _build_kwargs(
|
||||
store_cls,
|
||||
str(instance_name),
|
||||
instance_config
|
||||
)
|
||||
|
||||
# Convenience normalization for filesystem-like paths.
|
||||
for key in list(kwargs.keys()):
|
||||
if _normalize_config_key(key) in {"PATH", "LOCATION"}:
|
||||
if _normalize_config_key(key) in {"PATH",
|
||||
"LOCATION"}:
|
||||
kwargs[key] = str(Path(str(kwargs[key])).expanduser())
|
||||
|
||||
backend = store_cls(**kwargs)
|
||||
@@ -207,7 +234,12 @@ class Store:
|
||||
self._backends[backend_name] = backend
|
||||
|
||||
# If this is the configured temp directory, also alias it as 'temp'.
|
||||
self._maybe_register_temp_alias(store_type, backend_name, kwargs, backend)
|
||||
self._maybe_register_temp_alias(
|
||||
store_type,
|
||||
backend_name,
|
||||
kwargs,
|
||||
backend
|
||||
)
|
||||
except Exception as exc:
|
||||
err_text = str(exc)
|
||||
self._backend_errors[str(instance_name)] = err_text
|
||||
@@ -233,7 +265,8 @@ class Store:
|
||||
return 2
|
||||
return 1
|
||||
|
||||
chosen: Dict[int, str] = {}
|
||||
chosen: Dict[int,
|
||||
str] = {}
|
||||
for name, backend in self._backends.items():
|
||||
if type(backend).search is BaseStore.search:
|
||||
continue
|
||||
|
||||
Reference in New Issue
Block a user