This commit is contained in:
2026-01-15 03:20:52 -08:00
parent 3a02a52863
commit dabc8f9d51
3 changed files with 313 additions and 148 deletions

View File

@@ -16,6 +16,7 @@ import logging
import subprocess import subprocess
import shutil import shutil
import time import time
from contextlib import contextmanager
from datetime import datetime from datetime import datetime
from pathlib import Path, PurePosixPath from pathlib import Path, PurePosixPath
from threading import RLock from threading import RLock
@@ -218,6 +219,26 @@ class API_folder_store:
self._db_lock = self._shared_db_lock self._db_lock = self._shared_db_lock
self._init_db() self._init_db()
@contextmanager
def _with_db_lock(self, *, timeout: float = 8.0):
"""Acquire the shared DB lock with a bounded wait to avoid indefinite stalls."""
locked = False
try:
locked = self._db_lock.acquire(timeout=timeout)
if not locked:
mm_debug(f"[folder-db] lock acquisition timed out after {timeout:.1f}s; proceeding unlocked")
except Exception as exc:
locked = False
mm_debug(f"[folder-db] lock acquisition failed ({exc}); proceeding unlocked")
try:
yield
finally:
if locked:
try:
self._db_lock.release()
except RuntimeError:
pass
def _normalize_input_path(self, file_path: Path) -> Path: def _normalize_input_path(self, file_path: Path) -> Path:
p = expand_path(file_path).resolve() p = expand_path(file_path).resolve()
# If the path is relative to the current working directory, we check if it's meant to be in the library_root. # If the path is relative to the current working directory, we check if it's meant to be in the library_root.
@@ -261,7 +282,7 @@ class API_folder_store:
def _init_db(self) -> None: def _init_db(self) -> None:
"""Initialize database connection and create tables if needed.""" """Initialize database connection and create tables if needed."""
with self._db_lock: with self._with_db_lock():
try: try:
# Ensure the library root exists; sqlite cannot create parent dirs. # Ensure the library root exists; sqlite cannot create parent dirs.
try: try:
@@ -723,7 +744,7 @@ class API_folder_store:
@_db_retry() @_db_retry()
def _update_metadata_modified_time(self, file_hash: str) -> None: def _update_metadata_modified_time(self, file_hash: str) -> None:
"""Update the time_modified timestamp for a file's metadata.""" """Update the time_modified timestamp for a file's metadata."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
@@ -770,7 +791,7 @@ class API_folder_store:
attempt = 0 attempt = 0
while True: while True:
try: try:
with self._db_lock: with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
mm_debug("[folder-db] SELECT files by file_path") mm_debug("[folder-db] SELECT files by file_path")
@@ -877,8 +898,11 @@ class API_folder_store:
def get_metadata(self, file_hash: str) -> Optional[Dict[str, Any]]: def get_metadata(self, file_hash: str) -> Optional[Dict[str, Any]]:
"""Get metadata for a file by hash.""" """Get metadata for a file by hash."""
max_attempts = 5
attempt = 0
while True:
try: try:
with self._db_lock: with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
@@ -911,6 +935,18 @@ class API_folder_store:
metadata["relationships"] = {} metadata["relationships"] = {}
return metadata return metadata
except sqlite3.OperationalError as e:
msg = str(e or "").lower()
if "database is locked" in msg and attempt < max_attempts:
attempt += 1
sleep_time = min(0.1 * (2 ** (attempt - 1)), 1.0)
time.sleep(sleep_time)
continue
logger.error(
f"Error getting metadata for hash {file_hash}: {e}",
exc_info=True
)
return None
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error getting metadata for hash {file_hash}: {e}", f"Error getting metadata for hash {file_hash}: {e}",
@@ -1113,7 +1149,7 @@ class API_folder_store:
file_type = get_type_from_ext(str(ext)) file_type = get_type_from_ext(str(ext))
with self._db_lock: with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
""" """
@@ -1163,7 +1199,7 @@ class API_folder_store:
tags: List[str] tags: List[str]
) -> None: ) -> None:
"""Save metadata and tags for a file in a single transaction.""" """Save metadata and tags for a file in a single transaction."""
with self._db_lock: with self._with_db_lock():
try: try:
abs_path = self._normalize_input_path(file_path) abs_path = self._normalize_input_path(file_path)
db_path = self._to_db_file_path(abs_path) db_path = self._to_db_file_path(abs_path)
@@ -1247,8 +1283,11 @@ class API_folder_store:
def get_tags(self, file_hash: str) -> List[str]: def get_tags(self, file_hash: str) -> List[str]:
"""Get all tags for a file by hash.""" """Get all tags for a file by hash."""
max_attempts = 5
attempt = 0
while True:
try: try:
with self._db_lock: with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
@@ -1262,6 +1301,15 @@ class API_folder_store:
) )
return [row[0] for row in cursor.fetchall()] return [row[0] for row in cursor.fetchall()]
except sqlite3.OperationalError as e:
msg = str(e or "").lower()
if "database is locked" in msg and attempt < max_attempts:
attempt += 1
sleep_time = min(0.1 * (2 ** (attempt - 1)), 1.0)
time.sleep(sleep_time)
continue
logger.error(f"Error getting tags for hash {file_hash}: {e}", exc_info=True)
return []
except Exception as e: except Exception as e:
logger.error(f"Error getting tags for hash {file_hash}: {e}", exc_info=True) logger.error(f"Error getting tags for hash {file_hash}: {e}", exc_info=True)
return [] return []
@@ -1357,7 +1405,7 @@ class API_folder_store:
@_db_retry() @_db_retry()
def add_tags(self, file_path: Path, tags: List[str]) -> None: def add_tags(self, file_path: Path, tags: List[str]) -> None:
"""Add tags to a file.""" """Add tags to a file."""
with self._db_lock: with self._with_db_lock():
try: try:
file_hash = self.get_or_create_file_entry(file_path) file_hash = self.get_or_create_file_entry(file_path)
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -1425,7 +1473,7 @@ class API_folder_store:
@_db_retry() @_db_retry()
def remove_tags(self, file_path: Path, tags: List[str]) -> None: def remove_tags(self, file_path: Path, tags: List[str]) -> None:
"""Remove specific tags from a file.""" """Remove specific tags from a file."""
with self._db_lock: with self._with_db_lock():
try: try:
file_hash = self.get_or_create_file_entry(file_path) file_hash = self.get_or_create_file_entry(file_path)
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -1452,7 +1500,7 @@ class API_folder_store:
@_db_retry() @_db_retry()
def add_tags_to_hash(self, file_hash: str, tags: List[str]) -> None: def add_tags_to_hash(self, file_hash: str, tags: List[str]) -> None:
"""Add tags to a file by hash.""" """Add tags to a file by hash."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -1495,7 +1543,7 @@ class API_folder_store:
@_db_retry() @_db_retry()
def remove_tags_from_hash(self, file_hash: str, tags: List[str]) -> None: def remove_tags_from_hash(self, file_hash: str, tags: List[str]) -> None:
"""Remove specific tags from a file by hash.""" """Remove specific tags from a file by hash."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -1529,7 +1577,7 @@ class API_folder_store:
Any] Any]
) -> None: ) -> None:
"""Update metadata for a file by hash.""" """Update metadata for a file by hash."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -1582,7 +1630,7 @@ class API_folder_store:
related_file_path: Path to the related file related_file_path: Path to the related file
rel_type: Type of relationship ('king', 'alt', 'related') rel_type: Type of relationship ('king', 'alt', 'related')
""" """
with self._db_lock: with self._with_db_lock():
try: try:
str_path = str(file_path.resolve()) str_path = str(file_path.resolve())
str_related_path = str(related_file_path.resolve()) str_related_path = str(related_file_path.resolve())
@@ -1734,54 +1782,86 @@ class API_folder_store:
) )
return [] return []
def get_note(self, file_hash: str) -> Optional[str]: def get_note(self, file_hash: str, name: str = "default") -> Optional[str]:
"""Get the default note for a file by hash.""" """Get a named note (default note by default) for a file hash."""
try: normalized_hash = str(file_hash or "").strip().lower()
notes = self.get_notes(file_hash) if len(normalized_hash) != 64:
if not notes:
return None
return notes.get("default")
except Exception as e:
logger.error(f"Error getting note for hash {file_hash}: {e}", exc_info=True)
return None return None
def get_notes(self, file_hash: str) -> Dict[str, str]: note_name = str(name or "default").strip() or "default"
"""Get all notes for a file by hash.""" max_attempts = 5
import time
for attempt in range(max_attempts):
try: try:
with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
"SELECT name, note FROM note WHERE hash = ? ORDER BY name ASC", "SELECT note FROM note WHERE hash = ? AND name = ?",
(file_hash, (normalized_hash,
note_name),
)
row = cursor.fetchone()
if row:
return row[0]
if note_name != "default":
return None
cursor.execute(
"SELECT note FROM note WHERE hash = ? ORDER BY updated_at DESC LIMIT 1",
(normalized_hash,
), ),
) )
out: Dict[str, row = cursor.fetchone()
str] = {} return row[0] if row else None
for name, note in cursor.fetchall() or []: except sqlite3.OperationalError as e:
if not name: msg = str(e or "").lower()
if "database is locked" in msg and attempt < (max_attempts - 1):
sleep_time = min(0.1 * (2 ** attempt), 1.0)
time.sleep(sleep_time)
continue continue
out[str(name)] = str(note or "")
return out
except Exception as e:
logger.error( logger.error(
f"Error getting notes for hash {file_hash}: {e}", f"Error getting note for hash {file_hash}: {e}",
exc_info=True exc_info=True
) )
return {} return None
except Exception as e:
logger.error(
f"Error getting note for hash {file_hash}: {e}",
exc_info=True
)
return None
return None
def save_note(self, file_path: Path, note: str) -> None: def set_note_by_hash(self, file_hash: str, name: str, note: str) -> None:
"""Save the default note for a file.""" """Set a named note using a known file hash (no re-hash)."""
self.set_note(file_path, "default", note)
def set_note(self, file_path: Path, name: str, note: str) -> None:
"""Set a named note for a file."""
with self._db_lock:
try:
note_name = str(name or "").strip() note_name = str(name or "").strip()
normalized_hash = str(file_hash or "").strip().lower()
if not note_name: if not note_name:
raise ValueError("Note name is required") raise ValueError("Note name is required")
if len(normalized_hash) != 64:
raise ValueError("File hash must be a 64-character hex string")
file_hash = self.get_or_create_file_entry(file_path) max_attempts = 5
import time
for attempt in range(max_attempts):
try:
with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(
"SELECT 1 FROM file WHERE hash = ?",
(normalized_hash,
),
)
exists = cursor.fetchone() is not None
if not exists:
raise ValueError(
f"Hash {normalized_hash} not found in file table"
)
cursor.execute( cursor.execute(
""" """
INSERT INTO note (hash, name, note) INSERT INTO note (hash, name, note)
@@ -1790,19 +1870,53 @@ class API_folder_store:
note = excluded.note, note = excluded.note,
updated_at = CURRENT_TIMESTAMP updated_at = CURRENT_TIMESTAMP
""", """,
(file_hash, (normalized_hash,
note_name, note_name,
note), note),
) )
self.connection.commit() self.connection.commit()
logger.debug(f"Saved note '{note_name}' for {file_path}") logger.debug(
f"Saved note '{note_name}' for hash {normalized_hash}"
)
return
except sqlite3.OperationalError as e:
msg = str(e or "").lower()
if "database is locked" in msg and attempt < (max_attempts - 1):
sleep_time = min(0.1 * (2 ** attempt), 1.0)
time.sleep(sleep_time)
continue
logger.error(
f"Error saving note for hash {normalized_hash}: {e}",
exc_info=True
)
raise
except Exception as e:
logger.error(
f"Error saving note for hash {normalized_hash}: {e}",
exc_info=True
)
raise
def set_note(self, file_path: Path, name: str, note: str) -> None:
"""Set a named note for a file path, computing hash if needed."""
note_name = str(name or "").strip()
if not note_name:
raise ValueError("Note name is required")
try:
file_hash = self.get_or_create_file_entry(file_path)
self.set_note_by_hash(file_hash, note_name, note)
except Exception as e: except Exception as e:
logger.error(f"Error saving note for {file_path}: {e}", exc_info=True) logger.error(f"Error saving note for {file_path}: {e}", exc_info=True)
raise raise
def save_note(self, file_path: Path, note: str, name: str = "default") -> None:
"""Backward-compatible helper to store a note for a file path."""
self.set_note(file_path, name, note)
def delete_note(self, file_hash: str, name: str) -> None: def delete_note(self, file_hash: str, name: str) -> None:
"""Delete a named note for a file by hash.""" """Delete a named note for a file by hash."""
with self._db_lock: with self._with_db_lock():
try: try:
note_name = str(name or "").strip() note_name = str(name or "").strip()
if not note_name: if not note_name:
@@ -1854,7 +1968,7 @@ class API_folder_store:
def search_hash(self, file_hash: str) -> Optional[Path]: def search_hash(self, file_hash: str) -> Optional[Path]:
"""Search for a file by hash.""" """Search for a file by hash."""
try: try:
with self._db_lock: with self._with_db_lock():
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
@@ -1939,7 +2053,7 @@ class API_folder_store:
backlinks in other files so no file retains dangling references to the backlinks in other files so no file retains dangling references to the
deleted hash. deleted hash.
""" """
with self._db_lock: with self._with_db_lock():
try: try:
abs_path = self._normalize_input_path(file_path) abs_path = self._normalize_input_path(file_path)
str_path = self._to_db_file_path(abs_path) str_path = self._to_db_file_path(abs_path)
@@ -2048,7 +2162,7 @@ class API_folder_store:
pipe: Optional[str] = None, pipe: Optional[str] = None,
) -> int: ) -> int:
"""Insert a new worker entry into the database.""" """Insert a new worker entry into the database."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute( cursor.execute(
@@ -2085,7 +2199,7 @@ class API_folder_store:
def update_worker(self, worker_id: str, **kwargs) -> bool: def update_worker(self, worker_id: str, **kwargs) -> bool:
"""Update worker entry with given fields.""" """Update worker entry with given fields."""
with self._db_lock: with self._with_db_lock():
try: try:
allowed_fields = { allowed_fields = {
"status", "status",
@@ -2129,7 +2243,7 @@ class API_folder_store:
def update_worker_status(self, worker_id: str, status: str) -> int: def update_worker_status(self, worker_id: str, status: str) -> int:
"""Update worker status and return its database ID.""" """Update worker status and return its database ID."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
@@ -2208,7 +2322,7 @@ class API_folder_store:
def delete_worker(self, worker_id: str) -> bool: def delete_worker(self, worker_id: str) -> bool:
"""Delete a worker entry.""" """Delete a worker entry."""
with self._db_lock: with self._with_db_lock():
try: try:
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute("DELETE FROM worker WHERE worker_id = ?", cursor.execute("DELETE FROM worker WHERE worker_id = ?",
@@ -2316,7 +2430,7 @@ class API_folder_store:
"""Append text to a worker's stdout log and timeline.""" """Append text to a worker's stdout log and timeline."""
if not text: if not text:
return True return True
with self._db_lock: with self._with_db_lock():
try: try:
# Check if connection is valid # Check if connection is valid
if not self.connection: if not self.connection:

View File

@@ -2002,18 +2002,26 @@ class Folder(Store):
if not self._location: if not self._location:
return False return False
file_hash = str(file_identifier or "").strip().lower() file_hash = str(file_identifier or "").strip().lower()
note_name = str(name or "").strip()
if not _normalize_hash(file_hash): if not _normalize_hash(file_hash):
return False return False
if not note_name:
return False
with API_folder_store(Path(self._location)) as db:
setter_hash = getattr(db, "set_note_by_hash", None)
if callable(setter_hash):
setter_hash(file_hash, note_name, str(text))
return True
file_path = self.get_file(file_hash, **kwargs) file_path = self.get_file(file_hash, **kwargs)
if not file_path or not isinstance(file_path, if not file_path or not isinstance(file_path,
Path) or not file_path.exists(): Path) or not file_path.exists():
return False return False
with API_folder_store(Path(self._location)) as db:
setter = getattr(db, "set_note", None) setter = getattr(db, "set_note", None)
if callable(setter): if callable(setter):
setter(file_path, str(name), str(text)) setter(file_path, note_name, str(text))
return True return True
db.save_note(file_path, str(text)) db.save_note(file_path, str(text))
return True return True

View File

@@ -691,6 +691,27 @@ class Add_File(Cmdlet):
# Fallback: at least show the add-file payloads as a display overlay # Fallback: at least show the add-file payloads as a display overlay
from SYS.result_table import ResultTable from SYS.result_table import ResultTable
# If this was a single-item ingest, render the detailed item display
# directly from the payload to avoid DB refresh contention.
detail_rendered = False
if len(collected_payloads) == 1:
try:
from SYS.rich_display import render_item_details_panel
render_item_details_panel(collected_payloads[0])
table = ResultTable("Result")
table.add_result(collected_payloads[0])
setattr(table, "_rendered_by_cmdlet", True)
ctx.set_last_result_table_overlay(
table,
collected_payloads,
subject=collected_payloads[0]
)
detail_rendered = True
except Exception:
detail_rendered = False
if not detail_rendered:
table = ResultTable("Result") table = ResultTable("Result")
for payload in collected_payloads: for payload in collected_payloads:
table.add_result(payload) table.add_result(payload)
@@ -1875,6 +1896,12 @@ class Add_File(Cmdlet):
except Exception: except Exception:
hydrus_like_backend = False hydrus_like_backend = False
is_folder_backend = False
try:
is_folder_backend = type(backend).__name__ == "Folder"
except Exception:
is_folder_backend = False
# Prepare metadata from pipe_obj and sidecars # Prepare metadata from pipe_obj and sidecars
tags, url, title, f_hash = Add_File._prepare_metadata( tags, url, title, f_hash = Add_File._prepare_metadata(
result, media_path, pipe_obj, config result, media_path, pipe_obj, config
@@ -1996,7 +2023,20 @@ class Add_File(Cmdlet):
# For Hydrus, get_file() returns a browser URL (often with an access key) and should # For Hydrus, get_file() returns a browser URL (often with an access key) and should
# only be invoked by explicit user commands (e.g. get-file). # only be invoked by explicit user commands (e.g. get-file).
try: try:
if type(backend).__name__ == "Folder": if is_folder_backend:
# Avoid extra DB round-trips for Folder; we can derive the stored path.
hash_for_path: Optional[str] = None
if isinstance(file_identifier, str) and len(file_identifier) == 64:
hash_for_path = file_identifier
elif f_hash and isinstance(f_hash, str) and len(f_hash) == 64:
hash_for_path = f_hash
if hash_for_path:
suffix = media_path.suffix if media_path else ""
filename = f"{hash_for_path}{suffix}" if suffix else hash_for_path
location_path = getattr(backend, "_location", None)
if location_path:
stored_path = str(Path(location_path) / filename)
else:
maybe_path = backend.get_file(file_identifier) maybe_path = backend.get_file(file_identifier)
if isinstance(maybe_path, Path): if isinstance(maybe_path, Path):
stored_path = str(maybe_path) stored_path = str(maybe_path)
@@ -2050,6 +2090,8 @@ class Add_File(Cmdlet):
pass pass
else: else:
try: try:
# Folder.add_file already persists URLs, avoid extra DB traffic here.
if not is_folder_backend:
backend.add_url(resolved_hash, list(url)) backend.add_url(resolved_hash, list(url))
except Exception: except Exception:
pass pass
@@ -2107,6 +2149,7 @@ class Add_File(Cmdlet):
meta: Dict[str, meta: Dict[str,
Any] = {} Any] = {}
try: try:
if not is_folder_backend:
meta = backend.get_metadata(resolved_hash) or {} meta = backend.get_metadata(resolved_hash) or {}
except Exception: except Exception:
meta = {} meta = {}