This commit is contained in:
2026-01-02 02:28:59 -08:00
parent deb05c0d44
commit 6e9a0c28ff
13 changed files with 1402 additions and 2334 deletions

View File

@@ -60,7 +60,7 @@ class Folder(Store):
if location is None and PATH is not None:
location = str(PATH)
self._location = location
self._location = str(location) if location is not None else ""
self._name = name
# Scan status (set during init)
@@ -221,7 +221,7 @@ class Folder(Store):
# Ensure DB points to the renamed path (update by hash).
try:
cursor.execute(
"UPDATE files SET file_path = ?, updated_at = CURRENT_TIMESTAMP WHERE hash = ?",
"UPDATE file SET file_path = ?, updated_at = CURRENT_TIMESTAMP WHERE hash = ?",
(db._to_db_file_path(hash_path),
file_hash),
)
@@ -269,9 +269,9 @@ class Folder(Store):
cursor.execute(
"""
SELECT f.hash, f.file_path
FROM files f
FROM file f
WHERE NOT EXISTS (
SELECT 1 FROM tags t WHERE t.hash = f.hash AND LOWER(t.tag) LIKE 'title:%'
SELECT 1 FROM tag t WHERE t.hash = f.hash AND LOWER(t.tag) LIKE 'title:%'
)
"""
)
@@ -298,7 +298,7 @@ class Folder(Store):
# Third pass: discover files on disk that aren't in the database yet
# These are hash-named files that were added after initial indexing
cursor.execute("SELECT LOWER(hash) FROM files")
cursor.execute("SELECT LOWER(hash) FROM file")
db_hashes = {row[0]
for row in cursor.fetchall()}
@@ -484,10 +484,17 @@ class Folder(Store):
except Exception:
duration_value = None
# Save to database
# Save to database (metadata + tag/url updates share one connection)
with API_folder_store(Path(self._location)) as db:
db.get_or_create_file_entry(save_file)
# Save metadata including extension
conn = getattr(db, "connection", None)
if conn is None:
raise RuntimeError("Folder store DB connection unavailable")
cursor = conn.cursor()
debug(
f"[Folder.add_file] saving metadata for hash {file_hash}",
file=sys.stderr,
)
ext_clean = file_ext.lstrip(".") if file_ext else ""
db.save_metadata(
save_file,
@@ -498,14 +505,77 @@ class Folder(Store):
"duration": duration_value,
},
)
debug(
f"[Folder.add_file] metadata stored for hash {file_hash}",
file=sys.stderr,
)
# Add tags if provided
if tag_list:
self.add_tag(file_hash, tag_list)
if tag_list:
try:
debug(
f"[Folder.add_file] merging {len(tag_list)} tags for {file_hash}",
file=sys.stderr,
)
from SYS.metadata import compute_namespaced_tag_overwrite
# Add url if provided
if url:
self.add_url(file_hash, url)
existing_tags = [
t for t in (db.get_tags(file_hash) or [])
if isinstance(t, str) and t.strip()
]
_to_remove, _to_add, merged = compute_namespaced_tag_overwrite(
existing_tags, tag_list or []
)
if _to_remove or _to_add:
cursor.execute("DELETE FROM tag WHERE hash = ?",
(file_hash,))
for t in merged:
tag_val = str(t).strip().lower()
if tag_val:
cursor.execute(
"INSERT OR IGNORE INTO tag (hash, tag) VALUES (?, ?)",
(file_hash, tag_val),
)
conn.commit()
debug(
f"[Folder.add_file] tags rewritten for {file_hash}",
file=sys.stderr,
)
try:
db._update_metadata_modified_time(file_hash)
except Exception:
pass
except Exception as exc:
debug(f"Local DB tag merge failed: {exc}", file=sys.stderr)
if url:
try:
debug(
f"[Folder.add_file] merging {len(url)} URLs for {file_hash}",
file=sys.stderr,
)
from SYS.metadata import normalize_urls
existing_meta = db.get_metadata(file_hash) or {}
existing_urls = normalize_urls(existing_meta.get("url"))
incoming_urls = normalize_urls(url)
changed = False
for entry in list(incoming_urls or []):
if not entry:
continue
if entry not in existing_urls:
existing_urls.append(entry)
changed = True
if changed:
db.update_metadata_by_hash(
file_hash,
{"url": existing_urls},
)
debug(
f"[Folder.add_file] URLs merged for {file_hash}",
file=sys.stderr,
)
except Exception as exc:
debug(f"Local DB URL merge failed: {exc}", file=sys.stderr)
##log(f"✓ Added to local storage: {save_file.name}", file=sys.stderr)
return file_hash
@@ -1373,6 +1443,34 @@ class Folder(Store):
debug(f"Failed to get metadata for hash {file_hash}: {exc}")
return None
def set_relationship(self, alt_hash: str, king_hash: str, kind: str = "alt") -> bool:
"""Persist a relationship in the folder store DB.
This is a thin wrapper around the folder DB API so cmdlets can avoid
backend-specific branching.
"""
try:
if not self._location:
return False
alt_norm = _normalize_hash(alt_hash)
king_norm = _normalize_hash(king_hash)
if not alt_norm or not king_norm or alt_norm == king_norm:
return False
from API.folder import API_folder_store
with API_folder_store(Path(self._location).expanduser()) as db:
db.set_relationship_by_hash(
alt_norm,
king_norm,
str(kind or "alt"),
bidirectional=False,
)
return True
except Exception:
return False
def get_tag(self, file_identifier: str, **kwargs: Any) -> Tuple[List[str], str]:
"""Get tags for a local file by hash.
@@ -1432,14 +1530,14 @@ class Folder(Store):
# Folder DB tag table is case-sensitive and add_tags_to_hash() is additive.
# To enforce lowercase-only tags and namespace overwrites, rewrite the full tag set.
cursor = db.connection.cursor()
cursor.execute("DELETE FROM tags WHERE hash = ?",
cursor.execute("DELETE FROM tag WHERE hash = ?",
(hash,
))
for t in merged:
t = str(t).strip().lower()
if t:
cursor.execute(
"INSERT OR IGNORE INTO tags (hash, tag) VALUES (?, ?)",
"INSERT OR IGNORE INTO tag (hash, tag) VALUES (?, ?)",
(hash,
t),
)
@@ -1953,7 +2051,7 @@ class Folder(Store):
placeholders = ",".join(["?"] * len(chunk))
try:
cursor.execute(
f"SELECT hash, file_path FROM files WHERE hash IN ({placeholders})",
f"SELECT hash, file_path FROM file WHERE hash IN ({placeholders})",
chunk,
)
rows = cursor.fetchall() or []
@@ -1987,13 +2085,13 @@ class Folder(Store):
# Prefer upsert when supported, else fall back to INSERT OR REPLACE.
try:
cursor.executemany(
"INSERT INTO notes (hash, name, note) VALUES (?, ?, ?) "
"INSERT INTO note (hash, name, note) VALUES (?, ?, ?) "
"ON CONFLICT(hash, name) DO UPDATE SET note = excluded.note, updated_at = CURRENT_TIMESTAMP",
inserts,
)
except Exception:
cursor.executemany(
"INSERT OR REPLACE INTO notes (hash, name, note) VALUES (?, ?, ?)",
"INSERT OR REPLACE INTO note (hash, name, note) VALUES (?, ?, ?)",
inserts,
)