Files
Medios-Macina/cmdlets/delete_file.py

399 lines
16 KiB
Python
Raw Normal View History

2025-11-25 20:09:33 -08:00
from __future__ import annotations
from typing import Any, Dict, Sequence
import json
import sys
2025-11-27 10:59:01 -08:00
from helper.logger import debug, log
2025-11-25 20:09:33 -08:00
import sqlite3
from pathlib import Path
import models
import pipeline as ctx
from helper import hydrus as hydrus_wrapper
2025-12-01 01:10:16 -08:00
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash
from config import get_local_storage_path
from helper.local_library import LocalLibraryDB
2025-11-25 20:09:33 -08:00
2025-12-07 00:21:30 -08:00
def _refresh_last_search(config: Dict[str, Any]) -> None:
"""Re-run the last search-file to refresh the table after deletes."""
try:
source_cmd = ctx.get_last_result_table_source_command() if hasattr(ctx, "get_last_result_table_source_command") else None
if source_cmd not in {"search-file", "search_file", "search"}:
return
args = ctx.get_last_result_table_source_args() if hasattr(ctx, "get_last_result_table_source_args") else []
try:
from cmdlets import search_file as search_file_cmd # type: ignore
except Exception:
return
# Re-run the prior search to refresh items/table without disturbing history
search_file_cmd._run(None, args, config)
# Set an overlay so action-command pipeline output displays the refreshed table
try:
new_table = ctx.get_last_result_table()
new_items = ctx.get_last_result_items()
subject = ctx.get_last_result_subject() if hasattr(ctx, "get_last_result_subject") else None
if hasattr(ctx, "set_last_result_table_overlay") and new_table and new_items is not None:
ctx.set_last_result_table_overlay(new_table, new_items, subject)
except Exception:
pass
except Exception as exc:
debug(f"[delete_file] search refresh failed: {exc}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
2025-12-01 01:10:16 -08:00
def _cleanup_relationships(db_path: Path, file_hash: str) -> int:
"""Remove references to file_hash from other files' relationships."""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# Find all metadata entries that contain this hash in relationships
cursor.execute("SELECT file_id, relationships FROM metadata WHERE relationships LIKE ?", (f'%{file_hash}%',))
rows = cursor.fetchall()
rel_update_count = 0
for row_fid, rel_json in rows:
try:
rels = json.loads(rel_json)
changed = False
if isinstance(rels, dict):
for r_type, hashes in rels.items():
if isinstance(hashes, list) and file_hash in hashes:
hashes.remove(file_hash)
changed = True
if changed:
cursor.execute("UPDATE metadata SET relationships = ? WHERE file_id = ?", (json.dumps(rels), row_fid))
rel_update_count += 1
except Exception:
pass
conn.commit()
conn.close()
if rel_update_count > 0:
debug(f"Removed relationship references from {rel_update_count} other files", file=sys.stderr)
return rel_update_count
except Exception as e:
debug(f"Error cleaning up relationships: {e}", file=sys.stderr)
return 0
2025-11-25 20:09:33 -08:00
def _delete_database_entry(db_path: Path, file_path: str) -> bool:
"""Delete file and related entries from local library database.
Args:
db_path: Path to the library.db file
file_path: Exact file path string as stored in database
Returns:
True if successful, False otherwise
"""
try:
if not db_path.exists():
2025-12-01 01:10:16 -08:00
debug(f"Database not found at {db_path}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
return False
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
2025-12-01 01:10:16 -08:00
debug(f"Searching database for file_path: {file_path}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
# Find the file_id using the exact file_path
cursor.execute('SELECT id FROM files WHERE file_path = ?', (file_path,))
result = cursor.fetchone()
if not result:
2025-12-01 01:10:16 -08:00
debug(f"File path not found in database: {file_path}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
conn.close()
return False
file_id = result[0]
2025-12-01 01:10:16 -08:00
# Get file hash before deletion to clean up relationships
cursor.execute('SELECT file_hash FROM files WHERE id = ?', (file_id,))
hash_result = cursor.fetchone()
file_hash = hash_result[0] if hash_result else None
debug(f"Found file_id={file_id}, deleting all related records", file=sys.stderr)
2025-11-25 20:09:33 -08:00
# Delete related records
cursor.execute('DELETE FROM metadata WHERE file_id = ?', (file_id,))
meta_count = cursor.rowcount
cursor.execute('DELETE FROM tags WHERE file_id = ?', (file_id,))
tags_count = cursor.rowcount
cursor.execute('DELETE FROM notes WHERE file_id = ?', (file_id,))
notes_count = cursor.rowcount
cursor.execute('DELETE FROM files WHERE id = ?', (file_id,))
files_count = cursor.rowcount
conn.commit()
conn.close()
2025-12-01 01:10:16 -08:00
# Clean up relationships in other files
if file_hash:
_cleanup_relationships(db_path, file_hash)
debug(f"Deleted: metadata={meta_count}, tags={tags_count}, notes={notes_count}, files={files_count}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
return True
except Exception as exc:
log(f"Database cleanup failed: {exc}", file=sys.stderr)
import traceback
traceback.print_exc(file=sys.stderr)
return False
2025-11-27 10:59:01 -08:00
def _process_single_item(item: Any, override_hash: str | None, conserve: str | None,
lib_root: str | None, reason: str, config: Dict[str, Any]) -> bool:
"""Process deletion for a single item."""
# Handle item as either dict or object
if isinstance(item, dict):
hash_hex_raw = item.get("hash_hex") or item.get("hash")
target = item.get("target")
origin = item.get("origin")
2025-11-25 20:09:33 -08:00
else:
2025-11-27 10:59:01 -08:00
hash_hex_raw = getattr(item, "hash_hex", None) or getattr(item, "hash", None)
target = getattr(item, "target", None)
origin = getattr(item, "origin", None)
2025-11-25 20:09:33 -08:00
# For Hydrus files, the target IS the hash
if origin and origin.lower() == "hydrus" and not hash_hex_raw:
hash_hex_raw = target
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_hex_raw)
local_deleted = False
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
2025-11-27 10:59:01 -08:00
2025-12-01 01:10:16 -08:00
# Try to resolve local path if target looks like a hash and we have a library root
if local_target and looks_like_hash(str(target)) and lib_root:
try:
db_path = Path(lib_root) / ".downlow_library.db"
if db_path.exists():
# We can't use LocalLibraryDB context manager easily here without importing it,
# but we can use a quick sqlite connection or just use the class if imported.
# We imported LocalLibraryDB, so let's use it.
with LocalLibraryDB(Path(lib_root)) as db:
resolved = db.search_by_hash(str(target))
if resolved:
target = str(resolved)
# Also ensure we have the hash set for Hydrus deletion if needed
if not hash_hex:
hash_hex = normalize_hash(str(target))
except Exception as e:
debug(f"Failed to resolve hash to local path: {e}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
if conserve != "local" and local_target:
path = Path(str(target))
file_path_str = str(target) # Keep the original string for DB matching
try:
if path.exists() and path.is_file():
path.unlink()
local_deleted = True
if ctx._PIPE_ACTIVE:
ctx.emit(f"Removed local file: {path}")
log(f"Deleted: {path.name}", file=sys.stderr)
except Exception as exc:
log(f"Local delete failed: {exc}", file=sys.stderr)
# Remove common sidecars regardless of file removal success
for sidecar in (path.with_suffix(".tags"), path.with_suffix(".tags.txt"),
path.with_suffix(".metadata"), path.with_suffix(".notes")):
try:
if sidecar.exists() and sidecar.is_file():
sidecar.unlink()
except Exception:
pass
# Clean up database entry if library root provided - do this regardless of file deletion success
if lib_root:
lib_root_path = Path(lib_root)
db_path = lib_root_path / ".downlow_library.db"
2025-12-01 01:10:16 -08:00
# If file_path_str is a hash (because file was already deleted or target was hash),
# we need to find the path by hash in the DB first
if looks_like_hash(file_path_str):
try:
with LocalLibraryDB(lib_root_path) as db:
resolved = db.search_by_hash(file_path_str)
if resolved:
file_path_str = str(resolved)
except Exception:
pass
db_success = _delete_database_entry(db_path, file_path_str)
if not db_success:
# If deletion failed (e.g. not found), but we have a hash, try to clean up relationships anyway
effective_hash = None
if looks_like_hash(file_path_str):
effective_hash = file_path_str
elif hash_hex:
effective_hash = hash_hex
if effective_hash:
debug(f"Entry not found, but attempting to clean up relationships for hash: {effective_hash}", file=sys.stderr)
if _cleanup_relationships(db_path, effective_hash) > 0:
db_success = True
if db_success:
2025-11-25 20:09:33 -08:00
if ctx._PIPE_ACTIVE:
ctx.emit(f"Removed database entry: {path.name}")
2025-12-01 01:10:16 -08:00
debug(f"Database entry cleaned up", file=sys.stderr)
local_deleted = True
2025-11-25 20:09:33 -08:00
else:
2025-12-01 01:10:16 -08:00
debug(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
else:
2025-11-27 10:59:01 -08:00
debug(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
2025-11-25 20:09:33 -08:00
hydrus_deleted = False
2025-12-01 01:10:16 -08:00
# Only attempt Hydrus deletion if origin is explicitly Hydrus or if we failed to delete locally
# and we suspect it might be in Hydrus.
# If origin is local, we should default to NOT deleting from Hydrus unless requested?
# Or maybe we should check if it exists in Hydrus first?
# The user complaint is "its still trying to delete hydrus, this is a local file".
should_try_hydrus = True
if origin and origin.lower() == "local":
should_try_hydrus = False
# If conserve is set to hydrus, definitely don't delete
if conserve == "hydrus":
should_try_hydrus = False
if should_try_hydrus and hash_hex:
2025-11-25 20:09:33 -08:00
try:
client = hydrus_wrapper.get_client(config)
except Exception as exc:
if not local_deleted:
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
2025-11-27 10:59:01 -08:00
return False
2025-11-25 20:09:33 -08:00
else:
if client is None:
if not local_deleted:
2025-12-01 01:10:16 -08:00
# If we deleted locally, we don't care if Hydrus is unavailable
pass
else:
2025-11-25 20:09:33 -08:00
log("Hydrus client unavailable", file=sys.stderr)
2025-11-27 10:59:01 -08:00
return False
2025-11-25 20:09:33 -08:00
else:
payload: Dict[str, Any] = {"hashes": [hash_hex]}
if reason:
payload["reason"] = reason
try:
client._post("/add_files/delete_files", data=payload) # type: ignore[attr-defined]
hydrus_deleted = True
preview = hash_hex[:12] + ('' if len(hash_hex) > 12 else '')
2025-11-27 10:59:01 -08:00
debug(f"Deleted from Hydrus: {preview}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
except Exception as exc:
2025-12-01 01:10:16 -08:00
# If it's not in Hydrus (e.g. 404 or similar), that's fine
# log(f"Hydrus delete failed: {exc}", file=sys.stderr)
2025-11-25 20:09:33 -08:00
if not local_deleted:
2025-11-27 10:59:01 -08:00
return False
2025-11-25 20:09:33 -08:00
if hydrus_deleted and hash_hex:
preview = hash_hex[:12] + ('' if len(hash_hex) > 12 else '')
if ctx._PIPE_ACTIVE:
if reason:
ctx.emit(f"Deleted {preview} (reason: {reason}).")
else:
ctx.emit(f"Deleted {preview}.")
if hydrus_deleted or local_deleted:
2025-11-27 10:59:01 -08:00
return True
2025-11-25 20:09:33 -08:00
log("Selected result has neither Hydrus hash nor local file target")
2025-11-27 10:59:01 -08:00
return False
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Help
try:
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
return 0
except Exception:
pass
override_hash: str | None = None
conserve: str | None = None
lib_root: str | None = None
reason_tokens: list[str] = []
i = 0
while i < len(args):
token = args[i]
low = str(token).lower()
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
override_hash = str(args[i + 1]).strip()
i += 2
continue
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
value = str(args[i + 1]).strip().lower()
if value in {"local", "hydrus"}:
conserve = value
i += 2
continue
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
lib_root = str(args[i + 1]).strip()
i += 2
continue
reason_tokens.append(token)
i += 1
2025-12-01 01:10:16 -08:00
if not lib_root:
# Try to get from config
p = get_local_storage_path(config)
if p:
lib_root = str(p)
2025-11-27 10:59:01 -08:00
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
items = []
if isinstance(result, list):
items = result
elif result:
items = [result]
if not items:
log("No items to delete", file=sys.stderr)
return 1
success_count = 0
for item in items:
if _process_single_item(item, override_hash, conserve, lib_root, reason, config):
success_count += 1
2025-12-07 00:21:30 -08:00
if success_count > 0:
_refresh_last_search(config)
2025-11-27 10:59:01 -08:00
return 0 if success_count > 0 else 1
2025-11-25 20:09:33 -08:00
CMDLET = Cmdlet(
name="delete-file",
summary="Delete a file locally and/or from Hydrus, including database entries.",
usage="delete-file [-hash <sha256>] [-conserve <local|hydrus>] [-lib-root <path>] [reason]",
aliases=["del-file"],
args=[
CmdletArg("hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
CmdletArg("conserve", description="Choose which copy to keep: 'local' or 'hydrus'."),
CmdletArg("lib-root", description="Path to local library root for database cleanup."),
CmdletArg("reason", description="Optional reason for deletion (free text)."),
],
details=[
"Default removes both the local file and Hydrus file.",
"Use -conserve local to keep the local file, or -conserve hydrus to keep it in Hydrus.",
"Database entries are automatically cleaned up for local files.",
"Any remaining arguments are treated as the Hydrus reason text.",
],
)