Add YAPF style + ignore, and format tracked Python files
This commit is contained in:
425
Store/Folder.py
425
Store/Folder.py
@@ -36,7 +36,11 @@ class Folder(Store):
|
||||
# Track which locations have already been migrated to avoid repeated migrations
|
||||
_migrated_locations = set()
|
||||
# Cache scan results to avoid repeated full scans across repeated instantiations
|
||||
_scan_cache: Dict[str, Tuple[bool, str, Dict[str, int]]] = {}
|
||||
_scan_cache: Dict[str,
|
||||
Tuple[bool,
|
||||
str,
|
||||
Dict[str,
|
||||
int]]] = {}
|
||||
|
||||
def __new__(cls, *args: Any, **kwargs: Any) -> "Folder":
|
||||
return super().__new__(cls)
|
||||
@@ -62,7 +66,8 @@ class Folder(Store):
|
||||
# Scan status (set during init)
|
||||
self.scan_ok: bool = True
|
||||
self.scan_detail: str = ""
|
||||
self.scan_stats: Dict[str, int] = {}
|
||||
self.scan_stats: Dict[str,
|
||||
int] = {}
|
||||
|
||||
if self._location:
|
||||
try:
|
||||
@@ -95,10 +100,16 @@ class Folder(Store):
|
||||
f" (Total: {total_db})" if total_db else ""
|
||||
)
|
||||
else:
|
||||
detail = "Up to date" + (f" (Total: {total_db})" if total_db else "")
|
||||
detail = "Up to date" + (
|
||||
f" (Total: {total_db})" if total_db else ""
|
||||
)
|
||||
Folder._scan_cache[location_key] = (True, detail, dict(stats))
|
||||
except Exception as exc:
|
||||
Folder._scan_cache[location_key] = (False, f"Scan failed: {exc}", {})
|
||||
Folder._scan_cache[location_key] = (
|
||||
False,
|
||||
f"Scan failed: {exc}",
|
||||
{}
|
||||
)
|
||||
|
||||
ok, detail, stats = Folder._scan_cache.get(location_key, (True, "", {}))
|
||||
self.scan_ok = bool(ok)
|
||||
@@ -154,8 +165,7 @@ class Folder(Store):
|
||||
|
||||
# Check if filename is already a hash (without extension)
|
||||
if len(file_path.stem) == 64 and all(
|
||||
c in "0123456789abcdef" for c in file_path.stem.lower()
|
||||
):
|
||||
c in "0123456789abcdef" for c in file_path.stem.lower()):
|
||||
continue # Already migrated, will process in second pass
|
||||
|
||||
try:
|
||||
@@ -179,7 +189,8 @@ class Folder(Store):
|
||||
tags_to_add = list(tags)
|
||||
# Check if title tag exists
|
||||
has_title_tag = any(
|
||||
t.lower().startswith("title:") for t in tags_to_add
|
||||
t.lower().startswith("title:")
|
||||
for t in tags_to_add
|
||||
)
|
||||
if url:
|
||||
url_to_add = list(url)
|
||||
@@ -202,7 +213,8 @@ class Folder(Store):
|
||||
# Rename file to hash if needed
|
||||
if hash_path != file_path and not hash_path.exists():
|
||||
debug(
|
||||
f"Migrating: {file_path.name} -> {hash_filename}", file=sys.stderr
|
||||
f"Migrating: {file_path.name} -> {hash_filename}",
|
||||
file=sys.stderr
|
||||
)
|
||||
file_path.rename(hash_path)
|
||||
|
||||
@@ -210,7 +222,8 @@ class Folder(Store):
|
||||
try:
|
||||
cursor.execute(
|
||||
"UPDATE files SET file_path = ?, updated_at = CURRENT_TIMESTAMP WHERE hash = ?",
|
||||
(db._to_db_file_path(hash_path), file_hash),
|
||||
(db._to_db_file_path(hash_path),
|
||||
file_hash),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -233,7 +246,8 @@ class Folder(Store):
|
||||
if tags_to_add:
|
||||
db.save_tags(hash_path, tags_to_add)
|
||||
debug(
|
||||
f"Added {len(tags_to_add)} tags to {file_hash}", file=sys.stderr
|
||||
f"Added {len(tags_to_add)} tags to {file_hash}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Note: url would need a separate table if you want to store them
|
||||
@@ -245,7 +259,10 @@ class Folder(Store):
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
debug(f"Failed to migrate file {file_path.name}: {exc}", file=sys.stderr)
|
||||
debug(
|
||||
f"Failed to migrate file {file_path.name}: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Second pass: ensure all files in database have a title: tag
|
||||
db.connection.commit()
|
||||
@@ -267,7 +284,10 @@ class Folder(Store):
|
||||
# Use the filename as the title
|
||||
title_tag = f"title:{file_path.name}"
|
||||
db.save_tags(file_path, [title_tag])
|
||||
debug(f"Added title tag to {file_path.name}", file=sys.stderr)
|
||||
debug(
|
||||
f"Added title tag to {file_path.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception as exc:
|
||||
debug(
|
||||
f"Failed to add title tag to file {file_path_str}: {exc}",
|
||||
@@ -279,7 +299,8 @@ class Folder(Store):
|
||||
# Third pass: discover files on disk that aren't in the database yet
|
||||
# These are hash-named files that were added after initial indexing
|
||||
cursor.execute("SELECT LOWER(hash) FROM files")
|
||||
db_hashes = {row[0] for row in cursor.fetchall()}
|
||||
db_hashes = {row[0]
|
||||
for row in cursor.fetchall()}
|
||||
|
||||
discovered = 0
|
||||
for file_path in sorted(location_path.rglob("*")):
|
||||
@@ -287,8 +308,8 @@ class Folder(Store):
|
||||
# Check if file name (without extension) is a 64-char hex hash
|
||||
name_without_ext = file_path.stem
|
||||
if len(name_without_ext) == 64 and all(
|
||||
c in "0123456789abcdef" for c in name_without_ext.lower()
|
||||
):
|
||||
c in "0123456789abcdef"
|
||||
for c in name_without_ext.lower()):
|
||||
file_hash = name_without_ext.lower()
|
||||
|
||||
# Skip if already in DB
|
||||
@@ -354,7 +375,8 @@ class Folder(Store):
|
||||
# Extract title from tags if not explicitly provided
|
||||
if not title:
|
||||
for candidate in tag_list:
|
||||
if isinstance(candidate, str) and candidate.lower().startswith("title:"):
|
||||
if isinstance(candidate,
|
||||
str) and candidate.lower().startswith("title:"):
|
||||
title = candidate.split(":", 1)[1].strip()
|
||||
break
|
||||
|
||||
@@ -364,7 +386,8 @@ class Folder(Store):
|
||||
|
||||
# Ensure title is in tags
|
||||
title_tag = f"title:{title}"
|
||||
if not any(str(candidate).lower().startswith("title:") for candidate in tag_list):
|
||||
if not any(str(candidate).lower().startswith("title:")
|
||||
for candidate in tag_list):
|
||||
tag_list = [title_tag] + list(tag_list)
|
||||
|
||||
try:
|
||||
@@ -426,7 +449,11 @@ class Folder(Store):
|
||||
# After a move, the original path no longer exists; use destination for subsequent ops.
|
||||
file_path = save_file
|
||||
except Exception:
|
||||
_copy_with_progress(file_path, save_file, label=f"folder:{self._name} move")
|
||||
_copy_with_progress(
|
||||
file_path,
|
||||
save_file,
|
||||
label=f"folder:{self._name} move"
|
||||
)
|
||||
try:
|
||||
file_path.unlink(missing_ok=True) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
@@ -438,7 +465,11 @@ class Folder(Store):
|
||||
debug(f"Local move (copy+delete): {save_file}", file=sys.stderr)
|
||||
file_path = save_file
|
||||
else:
|
||||
_copy_with_progress(file_path, save_file, label=f"folder:{self._name} copy")
|
||||
_copy_with_progress(
|
||||
file_path,
|
||||
save_file,
|
||||
label=f"folder:{self._name} copy"
|
||||
)
|
||||
debug(f"Local copy: {save_file}", file=sys.stderr)
|
||||
|
||||
# Best-effort: capture duration for media
|
||||
@@ -512,11 +543,13 @@ class Folder(Store):
|
||||
# Drop control / format chars (Cc/Cf) while preserving wildcard tokens when requested.
|
||||
cleaned_chars: list[str] = []
|
||||
for ch in s:
|
||||
if allow_wildcards and ch in {"*", "?"}:
|
||||
if allow_wildcards and ch in {"*",
|
||||
"?"}:
|
||||
cleaned_chars.append(ch)
|
||||
continue
|
||||
cat = unicodedata.category(ch)
|
||||
if cat in {"Cc", "Cf"}:
|
||||
if cat in {"Cc",
|
||||
"Cf"}:
|
||||
continue
|
||||
cleaned_chars.append(ch)
|
||||
s = "".join(cleaned_chars)
|
||||
@@ -545,7 +578,11 @@ class Folder(Store):
|
||||
sys_ext = _extract_system_filetype_ext(query_lower)
|
||||
if sys_ext:
|
||||
ext_filter = sys_ext
|
||||
query_lower = re.sub(r"\s*\bsystem:filetype\s*(?:=\s*)?[^\s,]+", " ", query_lower)
|
||||
query_lower = re.sub(
|
||||
r"\s*\bsystem:filetype\s*(?:=\s*)?[^\s,]+",
|
||||
" ",
|
||||
query_lower
|
||||
)
|
||||
query_lower = re.sub(r"\s{2,}", " ", query_lower).strip().strip(",")
|
||||
query = query_lower
|
||||
|
||||
@@ -554,7 +591,11 @@ class Folder(Store):
|
||||
m = re.search(r"\bextension:([^\s,]+)", query_lower)
|
||||
if m:
|
||||
ext_filter = _normalize_ext_filter(m.group(1)) or None
|
||||
query_lower = re.sub(r"\s*\b(?:ext|extension):[^\s,]+", " ", query_lower)
|
||||
query_lower = re.sub(
|
||||
r"\s*\b(?:ext|extension):[^\s,]+",
|
||||
" ",
|
||||
query_lower
|
||||
)
|
||||
query_lower = re.sub(r"\s{2,}", " ", query_lower).strip().strip(",")
|
||||
query = query_lower
|
||||
except Exception:
|
||||
@@ -594,11 +635,19 @@ class Folder(Store):
|
||||
return results
|
||||
|
||||
def _create_entry(
|
||||
file_path: Path, tags: list[str], size_bytes: int | None, db_hash: Optional[str]
|
||||
) -> dict[str, Any]:
|
||||
file_path: Path,
|
||||
tags: list[str],
|
||||
size_bytes: int | None,
|
||||
db_hash: Optional[str]
|
||||
) -> dict[str,
|
||||
Any]:
|
||||
path_str = str(file_path)
|
||||
# Get title from tags if available, otherwise use hash as fallback
|
||||
title = next((t.split(":", 1)[1] for t in tags if t.lower().startswith("title:")), None)
|
||||
title = next(
|
||||
(t.split(":",
|
||||
1)[1] for t in tags if t.lower().startswith("title:")),
|
||||
None
|
||||
)
|
||||
if not title:
|
||||
# Fallback to hash if no title tag exists
|
||||
hash_value = _resolve_file_hash(db_hash, file_path)
|
||||
@@ -636,7 +685,10 @@ class Folder(Store):
|
||||
if ext_filter:
|
||||
# Fetch a bounded set of hashes to intersect with other filters.
|
||||
ext_fetch_limit = (limit or 45) * 50
|
||||
ext_hashes = api.get_file_hashes_by_ext(ext_filter, limit=ext_fetch_limit)
|
||||
ext_hashes = api.get_file_hashes_by_ext(
|
||||
ext_filter,
|
||||
limit=ext_fetch_limit
|
||||
)
|
||||
|
||||
# ext-only search: query is empty (or coerced to match_all above).
|
||||
if ext_filter and (not query_lower or query_lower == "*"):
|
||||
@@ -653,7 +705,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -664,7 +721,11 @@ class Folder(Store):
|
||||
if limit is not None and len(results) >= limit:
|
||||
return results
|
||||
backend_label = str(
|
||||
getattr(self, "_name", "") or getattr(self, "NAME", "") or "folder"
|
||||
getattr(self,
|
||||
"_name",
|
||||
"") or getattr(self,
|
||||
"NAME",
|
||||
"") or "folder"
|
||||
)
|
||||
debug(f"[folder:{backend_label}] {len(results)} result(s)")
|
||||
return results
|
||||
@@ -695,36 +756,49 @@ class Folder(Store):
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
return api.get_file_hashes_by_url_like(
|
||||
_url_like_pattern(pattern), limit=url_fetch_limit
|
||||
_url_like_pattern(pattern),
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
|
||||
if namespace == "system":
|
||||
# Hydrus-compatible query: system:filetype = png
|
||||
m_ft = re.match(r"^filetype\s*(?:=\s*)?(.+)$", pattern)
|
||||
m_ft = re.match(
|
||||
r"^filetype\s*(?:=\s*)?(.+)$",
|
||||
pattern
|
||||
)
|
||||
if m_ft:
|
||||
normalized_ext = _normalize_ext_filter(m_ft.group(1))
|
||||
normalized_ext = _normalize_ext_filter(
|
||||
m_ft.group(1)
|
||||
)
|
||||
if not normalized_ext:
|
||||
return set()
|
||||
return api.get_file_hashes_by_ext(
|
||||
normalized_ext, limit=url_fetch_limit
|
||||
normalized_ext,
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
return set()
|
||||
|
||||
if namespace in {"ext", "extension"}:
|
||||
if namespace in {"ext",
|
||||
"extension"}:
|
||||
normalized_ext = _normalize_ext_filter(pattern)
|
||||
if not normalized_ext:
|
||||
return set()
|
||||
return api.get_file_hashes_by_ext(
|
||||
normalized_ext, limit=url_fetch_limit
|
||||
normalized_ext,
|
||||
limit=url_fetch_limit
|
||||
)
|
||||
|
||||
if namespace == "store":
|
||||
if pattern not in {"local", "file", "filesystem"}:
|
||||
if pattern not in {"local",
|
||||
"file",
|
||||
"filesystem"}:
|
||||
return set()
|
||||
return api.get_all_file_hashes()
|
||||
|
||||
query_pattern = f"{namespace}:%"
|
||||
tag_rows = api.get_file_hashes_by_tag_pattern(query_pattern)
|
||||
tag_rows = api.get_file_hashes_by_tag_pattern(
|
||||
query_pattern
|
||||
)
|
||||
matched: set[str] = set()
|
||||
for file_hash, tag_val in tag_rows:
|
||||
if not tag_val:
|
||||
@@ -733,9 +807,13 @@ class Folder(Store):
|
||||
if not tag_lower.startswith(f"{namespace}:"):
|
||||
continue
|
||||
value = _normalize_namespace_text(
|
||||
tag_lower[len(namespace) + 1 :], allow_wildcards=False
|
||||
tag_lower[len(namespace) + 1:],
|
||||
allow_wildcards=False
|
||||
)
|
||||
pat = _normalize_namespace_text(
|
||||
pattern,
|
||||
allow_wildcards=True
|
||||
)
|
||||
pat = _normalize_namespace_text(pattern, allow_wildcards=True)
|
||||
if fnmatch(value, pat):
|
||||
matched.add(file_hash)
|
||||
return matched
|
||||
@@ -744,24 +822,35 @@ class Folder(Store):
|
||||
like_pattern = f"%{_like_pattern(term)}%"
|
||||
# Unqualified token: match file path, title: tags, and non-namespaced tags.
|
||||
# Do NOT match other namespaces by default (e.g., artist:men at work).
|
||||
hashes = set(api.get_file_hashes_by_path_pattern(like_pattern) or set())
|
||||
hashes = set(
|
||||
api.get_file_hashes_by_path_pattern(like_pattern)
|
||||
or set()
|
||||
)
|
||||
|
||||
try:
|
||||
title_rows = api.get_files_by_namespace_pattern(
|
||||
f"title:{like_pattern}", url_fetch_limit
|
||||
f"title:{like_pattern}",
|
||||
url_fetch_limit
|
||||
)
|
||||
hashes.update(
|
||||
{row[0] for row in (title_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (title_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
simple_rows = api.get_files_by_simple_tag_pattern(
|
||||
like_pattern, url_fetch_limit
|
||||
like_pattern,
|
||||
url_fetch_limit
|
||||
)
|
||||
hashes.update(
|
||||
{row[0] for row in (simple_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (simple_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -773,13 +862,16 @@ class Folder(Store):
|
||||
for token in tokens:
|
||||
hashes = _ids_for_token(token)
|
||||
matching_hashes = (
|
||||
hashes if matching_hashes is None else matching_hashes & hashes
|
||||
hashes if matching_hashes is None else
|
||||
matching_hashes & hashes
|
||||
)
|
||||
if not matching_hashes:
|
||||
return results
|
||||
|
||||
if ext_hashes is not None:
|
||||
matching_hashes = (matching_hashes or set()) & ext_hashes
|
||||
matching_hashes = (
|
||||
matching_hashes or set()
|
||||
) & ext_hashes
|
||||
if not matching_hashes:
|
||||
return results
|
||||
|
||||
@@ -799,7 +891,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -840,7 +937,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -856,7 +958,10 @@ class Folder(Store):
|
||||
if not pattern or pattern == "*":
|
||||
rows = api.get_files_with_any_url(limit)
|
||||
else:
|
||||
rows = api.get_files_by_url_like(_url_like_pattern(pattern), limit)
|
||||
rows = api.get_files_by_url_like(
|
||||
_url_like_pattern(pattern),
|
||||
limit
|
||||
)
|
||||
for file_hash, file_path_str, size_bytes, ext in rows:
|
||||
if not file_path_str:
|
||||
continue
|
||||
@@ -869,7 +974,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
results.append(entry)
|
||||
if limit is not None and len(results) >= limit:
|
||||
return results
|
||||
@@ -895,7 +1005,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -907,7 +1022,8 @@ class Folder(Store):
|
||||
return results
|
||||
return results
|
||||
|
||||
if namespace in {"ext", "extension"}:
|
||||
if namespace in {"ext",
|
||||
"extension"}:
|
||||
normalized_ext = _normalize_ext_filter(pattern)
|
||||
if not normalized_ext:
|
||||
return results
|
||||
@@ -924,7 +1040,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -944,15 +1065,22 @@ class Folder(Store):
|
||||
if not file_path_str:
|
||||
continue
|
||||
|
||||
tags = api.get_tags_by_namespace_and_file(file_hash, query_pattern)
|
||||
tags = api.get_tags_by_namespace_and_file(
|
||||
file_hash,
|
||||
query_pattern
|
||||
)
|
||||
|
||||
for tag in tags:
|
||||
tag_lower = tag.lower()
|
||||
if tag_lower.startswith(f"{namespace}:"):
|
||||
value = _normalize_namespace_text(
|
||||
tag_lower[len(namespace) + 1 :], allow_wildcards=False
|
||||
tag_lower[len(namespace) + 1:],
|
||||
allow_wildcards=False
|
||||
)
|
||||
pat = _normalize_namespace_text(
|
||||
pattern,
|
||||
allow_wildcards=True
|
||||
)
|
||||
pat = _normalize_namespace_text(pattern, allow_wildcards=True)
|
||||
if fnmatch(value, pat):
|
||||
if ext_hashes is not None and file_hash not in ext_hashes:
|
||||
break
|
||||
@@ -962,10 +1090,14 @@ class Folder(Store):
|
||||
size_bytes = file_path.stat().st_size
|
||||
all_tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(
|
||||
file_path, all_tags, size_bytes, file_hash
|
||||
file_path,
|
||||
all_tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
db_ext = str(ext
|
||||
or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
entry["ext"] = db_ext
|
||||
except Exception:
|
||||
@@ -985,7 +1117,8 @@ class Folder(Store):
|
||||
# - non-namespaced tag
|
||||
# Other namespaces (artist:, series:, etc.) are excluded unless explicitly queried.
|
||||
terms = [
|
||||
t.strip() for t in query_lower.replace(",", " ").split() if t.strip()
|
||||
t.strip() for t in query_lower.replace(",", " ").split()
|
||||
if t.strip()
|
||||
]
|
||||
if not terms:
|
||||
terms = [query_lower]
|
||||
@@ -1009,20 +1142,28 @@ class Folder(Store):
|
||||
|
||||
try:
|
||||
title_rows = api.get_files_by_namespace_pattern(
|
||||
f"title:{like_pattern}", fetch_limit
|
||||
f"title:{like_pattern}",
|
||||
fetch_limit
|
||||
)
|
||||
term_hashes.update(
|
||||
{row[0] for row in (title_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (title_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
simple_rows = api.get_files_by_simple_tag_pattern(
|
||||
like_pattern, fetch_limit
|
||||
like_pattern,
|
||||
fetch_limit
|
||||
)
|
||||
term_hashes.update(
|
||||
{row[0] for row in (simple_rows or []) if row and row[0]}
|
||||
{
|
||||
row[0]
|
||||
for row in (simple_rows or []) if row and row[0]
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1031,9 +1172,8 @@ class Folder(Store):
|
||||
term_hashes &= ext_hashes
|
||||
|
||||
matching_hashes = (
|
||||
term_hashes
|
||||
if matching_hashes is None
|
||||
else (matching_hashes & term_hashes)
|
||||
term_hashes if matching_hashes is None else
|
||||
(matching_hashes & term_hashes)
|
||||
)
|
||||
if not matching_hashes:
|
||||
return results
|
||||
@@ -1054,7 +1194,12 @@ class Folder(Store):
|
||||
except OSError:
|
||||
size_bytes = None
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry_obj = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry_obj = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -1077,7 +1222,12 @@ class Folder(Store):
|
||||
size_bytes = file_path.stat().st_size
|
||||
|
||||
tags = api.get_tags_for_file(file_hash)
|
||||
entry = _create_entry(file_path, tags, size_bytes, file_hash)
|
||||
entry = _create_entry(
|
||||
file_path,
|
||||
tags,
|
||||
size_bytes,
|
||||
file_hash
|
||||
)
|
||||
try:
|
||||
db_ext = str(ext or "").strip().lstrip(".")
|
||||
if db_ext:
|
||||
@@ -1087,7 +1237,11 @@ class Folder(Store):
|
||||
results.append(entry)
|
||||
|
||||
backend_label = str(
|
||||
getattr(self, "_name", "") or getattr(self, "NAME", "") or "folder"
|
||||
getattr(self,
|
||||
"_name",
|
||||
"") or getattr(self,
|
||||
"NAME",
|
||||
"") or "folder"
|
||||
)
|
||||
debug(f"[folder:{backend_label}] {len(results)} result(s)")
|
||||
return results
|
||||
@@ -1101,7 +1255,10 @@ class Folder(Store):
|
||||
log(f"❌ Local search failed: {exc}", file=sys.stderr)
|
||||
raise
|
||||
|
||||
def _resolve_library_root(self, file_path: Path, config: Dict[str, Any]) -> Optional[Path]:
|
||||
def _resolve_library_root(self,
|
||||
file_path: Path,
|
||||
config: Dict[str,
|
||||
Any]) -> Optional[Path]:
|
||||
"""Return the library root containing medios-macina.db.
|
||||
|
||||
Prefer the store's configured location, then config override, then walk parents
|
||||
@@ -1188,7 +1345,8 @@ class Folder(Store):
|
||||
"""
|
||||
SELECT * FROM metadata WHERE hash = ?
|
||||
""",
|
||||
(file_hash_result,),
|
||||
(file_hash_result,
|
||||
),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
@@ -1259,7 +1417,8 @@ class Folder(Store):
|
||||
try:
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
existing_tags = [
|
||||
t for t in (db.get_tags(hash) or []) if isinstance(t, str) and t.strip()
|
||||
t for t in (db.get_tags(hash) or [])
|
||||
if isinstance(t, str) and t.strip()
|
||||
]
|
||||
|
||||
from metadata import compute_namespaced_tag_overwrite
|
||||
@@ -1273,13 +1432,16 @@ class Folder(Store):
|
||||
# Folder DB tag table is case-sensitive and add_tags_to_hash() is additive.
|
||||
# To enforce lowercase-only tags and namespace overwrites, rewrite the full tag set.
|
||||
cursor = db.connection.cursor()
|
||||
cursor.execute("DELETE FROM tags WHERE hash = ?", (hash,))
|
||||
cursor.execute("DELETE FROM tags WHERE hash = ?",
|
||||
(hash,
|
||||
))
|
||||
for t in merged:
|
||||
t = str(t).strip().lower()
|
||||
if t:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO tags (hash, tag) VALUES (?, ?)",
|
||||
(hash, t),
|
||||
(hash,
|
||||
t),
|
||||
)
|
||||
db.connection.commit()
|
||||
try:
|
||||
@@ -1304,8 +1466,7 @@ class Folder(Store):
|
||||
try:
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
tag_list = [
|
||||
str(t).strip().lower()
|
||||
for t in (tags or [])
|
||||
str(t).strip().lower() for t in (tags or [])
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
if not tag_list:
|
||||
@@ -1362,7 +1523,12 @@ class Folder(Store):
|
||||
existing_urls.append(u)
|
||||
changed = True
|
||||
if changed:
|
||||
db.update_metadata_by_hash(file_hash, {"url": existing_urls})
|
||||
db.update_metadata_by_hash(
|
||||
file_hash,
|
||||
{
|
||||
"url": existing_urls
|
||||
}
|
||||
)
|
||||
return True
|
||||
except Exception as exc:
|
||||
debug(f"Local DB add_url failed: {exc}")
|
||||
@@ -1388,7 +1554,8 @@ class Folder(Store):
|
||||
except Exception:
|
||||
normalize_urls = None # type: ignore
|
||||
|
||||
merged_by_hash: Dict[str, List[str]] = {}
|
||||
merged_by_hash: Dict[str,
|
||||
List[str]] = {}
|
||||
for file_identifier, url_list in items or []:
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if not file_hash:
|
||||
@@ -1399,9 +1566,13 @@ class Folder(Store):
|
||||
try:
|
||||
incoming = normalize_urls(url_list)
|
||||
except Exception:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
else:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
|
||||
if not incoming:
|
||||
continue
|
||||
@@ -1427,23 +1598,30 @@ class Folder(Store):
|
||||
for file_hash in merged_by_hash.keys():
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)", (file_hash,)
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)",
|
||||
(file_hash,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Load existing urls for all hashes in chunks.
|
||||
existing_urls_by_hash: Dict[str, List[str]] = {h: [] for h in merged_by_hash.keys()}
|
||||
existing_urls_by_hash: Dict[str,
|
||||
List[str]] = {
|
||||
h: []
|
||||
for h in merged_by_hash.keys()
|
||||
}
|
||||
hashes = list(merged_by_hash.keys())
|
||||
chunk_size = 400
|
||||
for i in range(0, len(hashes), chunk_size):
|
||||
chunk = hashes[i : i + chunk_size]
|
||||
chunk = hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
try:
|
||||
cursor.execute(
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})", chunk
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})",
|
||||
chunk
|
||||
)
|
||||
rows = cursor.fetchall() or []
|
||||
except Exception:
|
||||
@@ -1469,7 +1647,8 @@ class Folder(Store):
|
||||
else:
|
||||
if isinstance(parsed, list):
|
||||
parsed_urls = [
|
||||
str(u).strip() for u in parsed if str(u).strip()
|
||||
str(u).strip() for u in parsed
|
||||
if str(u).strip()
|
||||
]
|
||||
except Exception:
|
||||
parsed_urls = []
|
||||
@@ -1515,12 +1694,16 @@ class Folder(Store):
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
meta = db.get_metadata(file_hash) or {}
|
||||
existing_urls = normalize_urls(meta.get("url"))
|
||||
remove_set = {u for u in normalize_urls(url) if u}
|
||||
remove_set = {u
|
||||
for u in normalize_urls(url) if u}
|
||||
if not remove_set:
|
||||
return False
|
||||
new_urls = [u for u in existing_urls if u not in remove_set]
|
||||
if new_urls != existing_urls:
|
||||
db.update_metadata_by_hash(file_hash, {"url": new_urls})
|
||||
db.update_metadata_by_hash(file_hash,
|
||||
{
|
||||
"url": new_urls
|
||||
})
|
||||
return True
|
||||
except Exception as exc:
|
||||
debug(f"Local DB delete_url failed: {exc}")
|
||||
@@ -1529,7 +1712,12 @@ class Folder(Store):
|
||||
debug(f"delete_url failed for local file: {exc}")
|
||||
return False
|
||||
|
||||
def delete_url_bulk(self, items: List[tuple[str, List[str]]], **kwargs: Any) -> bool:
|
||||
def delete_url_bulk(
|
||||
self,
|
||||
items: List[tuple[str,
|
||||
List[str]]],
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Delete known urls from many local files in one DB session."""
|
||||
from API.folder import API_folder_store
|
||||
|
||||
@@ -1542,7 +1730,8 @@ class Folder(Store):
|
||||
except Exception:
|
||||
normalize_urls = None # type: ignore
|
||||
|
||||
remove_by_hash: Dict[str, set[str]] = {}
|
||||
remove_by_hash: Dict[str,
|
||||
set[str]] = {}
|
||||
for file_identifier, url_list in items or []:
|
||||
file_hash = str(file_identifier or "").strip().lower()
|
||||
if not file_hash:
|
||||
@@ -1553,11 +1742,16 @@ class Folder(Store):
|
||||
try:
|
||||
incoming = normalize_urls(url_list)
|
||||
except Exception:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
else:
|
||||
incoming = [str(u).strip() for u in (url_list or []) if str(u).strip()]
|
||||
incoming = [
|
||||
str(u).strip() for u in (url_list or []) if str(u).strip()
|
||||
]
|
||||
|
||||
remove = {u for u in incoming if u}
|
||||
remove = {u
|
||||
for u in incoming if u}
|
||||
if not remove:
|
||||
continue
|
||||
remove_by_hash.setdefault(file_hash, set()).update(remove)
|
||||
@@ -1577,23 +1771,30 @@ class Folder(Store):
|
||||
for file_hash in remove_by_hash.keys():
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)", (file_hash,)
|
||||
"INSERT OR IGNORE INTO metadata (hash) VALUES (?)",
|
||||
(file_hash,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Load existing urls for hashes in chunks.
|
||||
existing_urls_by_hash: Dict[str, List[str]] = {h: [] for h in remove_by_hash.keys()}
|
||||
existing_urls_by_hash: Dict[str,
|
||||
List[str]] = {
|
||||
h: []
|
||||
for h in remove_by_hash.keys()
|
||||
}
|
||||
hashes = list(remove_by_hash.keys())
|
||||
chunk_size = 400
|
||||
for i in range(0, len(hashes), chunk_size):
|
||||
chunk = hashes[i : i + chunk_size]
|
||||
chunk = hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
try:
|
||||
cursor.execute(
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})", chunk
|
||||
f"SELECT hash, url FROM metadata WHERE hash IN ({placeholders})",
|
||||
chunk
|
||||
)
|
||||
rows = cursor.fetchall() or []
|
||||
except Exception:
|
||||
@@ -1619,7 +1820,8 @@ class Folder(Store):
|
||||
else:
|
||||
if isinstance(parsed, list):
|
||||
parsed_urls = [
|
||||
str(u).strip() for u in parsed if str(u).strip()
|
||||
str(u).strip() for u in parsed
|
||||
if str(u).strip()
|
||||
]
|
||||
except Exception:
|
||||
parsed_urls = []
|
||||
@@ -1663,15 +1865,24 @@ class Folder(Store):
|
||||
getter = getattr(db, "get_notes", None)
|
||||
if callable(getter):
|
||||
notes = getter(file_hash)
|
||||
return notes if isinstance(notes, dict) else {}
|
||||
return notes if isinstance(notes,
|
||||
dict) else {}
|
||||
# Fallback: default-only
|
||||
note = db.get_note(file_hash)
|
||||
return {"default": str(note or "")} if note else {}
|
||||
return {
|
||||
"default": str(note or "")
|
||||
} if note else {}
|
||||
except Exception as exc:
|
||||
debug(f"get_note failed for local file: {exc}")
|
||||
return {}
|
||||
|
||||
def set_note(self, file_identifier: str, name: str, text: str, **kwargs: Any) -> bool:
|
||||
def set_note(
|
||||
self,
|
||||
file_identifier: str,
|
||||
name: str,
|
||||
text: str,
|
||||
**kwargs: Any
|
||||
) -> bool:
|
||||
"""Set a named note for a local file by hash."""
|
||||
from API.folder import API_folder_store
|
||||
|
||||
@@ -1683,7 +1894,8 @@ class Folder(Store):
|
||||
return False
|
||||
|
||||
file_path = self.get_file(file_hash, **kwargs)
|
||||
if not file_path or not isinstance(file_path, Path) or not file_path.exists():
|
||||
if not file_path or not isinstance(file_path,
|
||||
Path) or not file_path.exists():
|
||||
return False
|
||||
|
||||
with API_folder_store(Path(self._location)) as db:
|
||||
@@ -1729,11 +1941,13 @@ class Folder(Store):
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Look up file paths for hashes in chunks (to verify existence).
|
||||
wanted_hashes = sorted({h for (h, _n, _t) in normalized})
|
||||
hash_to_path: Dict[str, str] = {}
|
||||
wanted_hashes = sorted({h
|
||||
for (h, _n, _t) in normalized})
|
||||
hash_to_path: Dict[str,
|
||||
str] = {}
|
||||
chunk_size = 400
|
||||
for i in range(0, len(wanted_hashes), chunk_size):
|
||||
chunk = wanted_hashes[i : i + chunk_size]
|
||||
chunk = wanted_hashes[i:i + chunk_size]
|
||||
if not chunk:
|
||||
continue
|
||||
placeholders = ",".join(["?"] * len(chunk))
|
||||
@@ -1809,7 +2023,8 @@ class Folder(Store):
|
||||
deleter2 = getattr(db, "save_note", None)
|
||||
if callable(deleter2):
|
||||
file_path = self.get_file(file_hash, **kwargs)
|
||||
if file_path and isinstance(file_path, Path) and file_path.exists():
|
||||
if file_path and isinstance(file_path,
|
||||
Path) and file_path.exists():
|
||||
deleter2(file_path, "")
|
||||
return True
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user