This commit is contained in:
nose
2025-12-01 01:10:16 -08:00
parent 2b93edac10
commit 6b9ed7d4ab
17 changed files with 1644 additions and 470 deletions

View File

@@ -11,11 +11,50 @@ from pathlib import Path
import models
import pipeline as ctx
from helper import hydrus as hydrus_wrapper
from ._shared import Cmdlet, CmdletArg, normalize_hash
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash
from config import get_local_storage_path
from helper.local_library import LocalLibraryDB
def _cleanup_relationships(db_path: Path, file_hash: str) -> int:
"""Remove references to file_hash from other files' relationships."""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# Find all metadata entries that contain this hash in relationships
cursor.execute("SELECT file_id, relationships FROM metadata WHERE relationships LIKE ?", (f'%{file_hash}%',))
rows = cursor.fetchall()
rel_update_count = 0
for row_fid, rel_json in rows:
try:
rels = json.loads(rel_json)
changed = False
if isinstance(rels, dict):
for r_type, hashes in rels.items():
if isinstance(hashes, list) and file_hash in hashes:
hashes.remove(file_hash)
changed = True
if changed:
cursor.execute("UPDATE metadata SET relationships = ? WHERE file_id = ?", (json.dumps(rels), row_fid))
rel_update_count += 1
except Exception:
pass
conn.commit()
conn.close()
if rel_update_count > 0:
debug(f"Removed relationship references from {rel_update_count} other files", file=sys.stderr)
return rel_update_count
except Exception as e:
debug(f"Error cleaning up relationships: {e}", file=sys.stderr)
return 0
def _delete_database_entry(db_path: Path, file_path: str) -> bool:
"""Delete file and related entries from local library database.
@@ -28,35 +67,31 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
"""
try:
if not db_path.exists():
log(f"Database not found at {db_path}", file=sys.stderr)
debug(f"Database not found at {db_path}", file=sys.stderr)
return False
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
log(f"Searching database for file_path: {file_path}", file=sys.stderr)
debug(f"Searching database for file_path: {file_path}", file=sys.stderr)
# Find the file_id using the exact file_path
cursor.execute('SELECT id FROM files WHERE file_path = ?', (file_path,))
result = cursor.fetchone()
if not result:
log(f"ERROR: File path not found in database", file=sys.stderr)
log(f"Expected: {file_path}", file=sys.stderr)
# Debug: show sample entries
cursor.execute('SELECT id, file_path FROM files LIMIT 3')
samples = cursor.fetchall()
if samples:
log(f"Sample DB entries:", file=sys.stderr)
for fid, fpath in samples:
log(f"{fid}: {fpath}", file=sys.stderr)
debug(f"File path not found in database: {file_path}", file=sys.stderr)
conn.close()
return False
file_id = result[0]
log(f"Found file_id={file_id}, deleting all related records", file=sys.stderr)
# Get file hash before deletion to clean up relationships
cursor.execute('SELECT file_hash FROM files WHERE id = ?', (file_id,))
hash_result = cursor.fetchone()
file_hash = hash_result[0] if hash_result else None
debug(f"Found file_id={file_id}, deleting all related records", file=sys.stderr)
# Delete related records
cursor.execute('DELETE FROM metadata WHERE file_id = ?', (file_id,))
@@ -74,7 +109,11 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
conn.commit()
conn.close()
log(f"Deleted: metadata={meta_count}, tags={tags_count}, notes={notes_count}, files={files_count}", file=sys.stderr)
# Clean up relationships in other files
if file_hash:
_cleanup_relationships(db_path, file_hash)
debug(f"Deleted: metadata={meta_count}, tags={tags_count}, notes={notes_count}, files={files_count}", file=sys.stderr)
return True
except Exception as exc:
@@ -106,6 +145,24 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
local_deleted = False
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
# Try to resolve local path if target looks like a hash and we have a library root
if local_target and looks_like_hash(str(target)) and lib_root:
try:
db_path = Path(lib_root) / ".downlow_library.db"
if db_path.exists():
# We can't use LocalLibraryDB context manager easily here without importing it,
# but we can use a quick sqlite connection or just use the class if imported.
# We imported LocalLibraryDB, so let's use it.
with LocalLibraryDB(Path(lib_root)) as db:
resolved = db.search_by_hash(str(target))
if resolved:
target = str(resolved)
# Also ensure we have the hash set for Hydrus deletion if needed
if not hash_hex:
hash_hex = normalize_hash(str(target))
except Exception as e:
debug(f"Failed to resolve hash to local path: {e}", file=sys.stderr)
if conserve != "local" and local_target:
path = Path(str(target))
file_path_str = str(target) # Keep the original string for DB matching
@@ -132,18 +189,59 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
if lib_root:
lib_root_path = Path(lib_root)
db_path = lib_root_path / ".downlow_library.db"
if _delete_database_entry(db_path, file_path_str):
# If file_path_str is a hash (because file was already deleted or target was hash),
# we need to find the path by hash in the DB first
if looks_like_hash(file_path_str):
try:
with LocalLibraryDB(lib_root_path) as db:
resolved = db.search_by_hash(file_path_str)
if resolved:
file_path_str = str(resolved)
except Exception:
pass
db_success = _delete_database_entry(db_path, file_path_str)
if not db_success:
# If deletion failed (e.g. not found), but we have a hash, try to clean up relationships anyway
effective_hash = None
if looks_like_hash(file_path_str):
effective_hash = file_path_str
elif hash_hex:
effective_hash = hash_hex
if effective_hash:
debug(f"Entry not found, but attempting to clean up relationships for hash: {effective_hash}", file=sys.stderr)
if _cleanup_relationships(db_path, effective_hash) > 0:
db_success = True
if db_success:
if ctx._PIPE_ACTIVE:
ctx.emit(f"Removed database entry: {path.name}")
log(f"Database entry cleaned up", file=sys.stderr)
local_deleted = True # Mark as deleted if DB cleanup succeeded
debug(f"Database entry cleaned up", file=sys.stderr)
local_deleted = True
else:
log(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
debug(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
else:
debug(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
hydrus_deleted = False
if conserve != "hydrus" and hash_hex:
# Only attempt Hydrus deletion if origin is explicitly Hydrus or if we failed to delete locally
# and we suspect it might be in Hydrus.
# If origin is local, we should default to NOT deleting from Hydrus unless requested?
# Or maybe we should check if it exists in Hydrus first?
# The user complaint is "its still trying to delete hydrus, this is a local file".
should_try_hydrus = True
if origin and origin.lower() == "local":
should_try_hydrus = False
# If conserve is set to hydrus, definitely don't delete
if conserve == "hydrus":
should_try_hydrus = False
if should_try_hydrus and hash_hex:
try:
client = hydrus_wrapper.get_client(config)
except Exception as exc:
@@ -153,6 +251,9 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
else:
if client is None:
if not local_deleted:
# If we deleted locally, we don't care if Hydrus is unavailable
pass
else:
log("Hydrus client unavailable", file=sys.stderr)
return False
else:
@@ -165,7 +266,8 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
preview = hash_hex[:12] + ('' if len(hash_hex) > 12 else '')
debug(f"Deleted from Hydrus: {preview}", file=sys.stderr)
except Exception as exc:
log(f"Hydrus delete failed: {exc}", file=sys.stderr)
# If it's not in Hydrus (e.g. 404 or similar), that's fine
# log(f"Hydrus delete failed: {exc}", file=sys.stderr)
if not local_deleted:
return False
@@ -218,6 +320,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
reason_tokens.append(token)
i += 1
if not lib_root:
# Try to get from config
p = get_local_storage_path(config)
if p:
lib_root = str(p)
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
items = []