Files
Medios-Macina/cmdlets/delete_url.py

195 lines
7.3 KiB
Python
Raw Normal View History

2025-11-25 20:09:33 -08:00
from __future__ import annotations
from typing import Any, Dict, Sequence
import json
2025-12-01 01:10:16 -08:00
import sys
from pathlib import Path
2025-11-25 20:09:33 -08:00
from . import register
from helper import hydrus as hydrus_wrapper
from ._shared import Cmdlet, CmdletArg, normalize_hash
2025-12-06 00:10:19 -08:00
from helper.logger import debug, log
2025-12-01 01:10:16 -08:00
from config import get_local_storage_path
from helper.local_library import LocalLibraryDB
import pipeline as ctx
2025-11-25 20:09:33 -08:00
CMDLET = Cmdlet(
name="delete-url",
2025-12-01 01:10:16 -08:00
summary="Remove a URL association from a file (Hydrus or Local).",
2025-11-25 20:09:33 -08:00
usage="delete-url [-hash <sha256>] <url>",
args=[
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
2025-12-01 01:10:16 -08:00
CmdletArg("url", required=True, description="The URL to remove from the file."),
2025-11-25 20:09:33 -08:00
],
details=[
2025-12-01 01:10:16 -08:00
"- Removes the URL from the file's known URL list.",
2025-11-25 20:09:33 -08:00
],
)
def _parse_hash_and_rest(args: Sequence[str]) -> tuple[str | None, list[str]]:
override_hash: str | None = None
rest: list[str] = []
i = 0
while i < len(args):
a = args[i]
low = str(a).lower()
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
override_hash = str(args[i + 1]).strip()
i += 2
continue
rest.append(a)
i += 1
return override_hash, rest
@register(["del-url", "delete-url", "delete_url"]) # aliases
def delete(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Help
try:
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
return 0
except Exception:
pass
2025-12-01 01:10:16 -08:00
2025-11-25 20:09:33 -08:00
override_hash, rest = _parse_hash_and_rest(args)
2025-12-01 01:10:16 -08:00
url_arg = None
if rest:
url_arg = str(rest[0] or '').strip()
2025-11-25 20:09:33 -08:00
2025-12-01 01:10:16 -08:00
# Normalize result to a list
items = result if isinstance(result, list) else [result]
if not items:
log("No input provided.")
2025-11-25 20:09:33 -08:00
return 1
2025-12-01 01:10:16 -08:00
success_count = 0
2025-11-25 20:09:33 -08:00
2025-12-01 01:10:16 -08:00
for item in items:
target_url = url_arg
target_file = item
# Check for rich URL object from get-url
if isinstance(item, dict) and "url" in item and "source_file" in item:
if not target_url:
target_url = item["url"]
target_file = item["source_file"]
if not target_url:
continue
if _delete_single(target_file, target_url, override_hash, config):
success_count += 1
if success_count == 0:
if not url_arg:
log("Requires a URL argument or valid selection.")
else:
log("Failed to delete URL(s).")
2025-11-25 20:09:33 -08:00
return 1
2025-12-01 01:10:16 -08:00
2025-11-25 20:09:33 -08:00
return 0
2025-12-01 01:10:16 -08:00
def _delete_single(result: Any, url: str, override_hash: str | None, config: Dict[str, Any]) -> bool:
# Helper to get field from both dict and object
def get_field(obj: Any, field: str, default: Any = None) -> Any:
if isinstance(obj, dict):
return obj.get(field, default)
else:
return getattr(obj, field, default)
success = False
# 1. Try Local Library
file_path = get_field(result, "file_path") or get_field(result, "path")
if file_path and not override_hash:
try:
path_obj = Path(file_path)
if path_obj.exists():
storage_path = get_local_storage_path(config)
if storage_path:
with LocalLibraryDB(storage_path) as db:
metadata = db.get_metadata(path_obj) or {}
known_urls = metadata.get("known_urls") or []
# Handle comma-separated URLs if passed as arg
# But first check if the exact url string exists (e.g. if it contains commas itself)
urls_to_process = []
if url in known_urls:
urls_to_process = [url]
else:
urls_to_process = [u.strip() for u in url.split(',') if u.strip()]
local_changed = False
for u in urls_to_process:
if u in known_urls:
known_urls.remove(u)
local_changed = True
ctx.emit(f"Deleted URL from local file {path_obj.name}: {u}")
if local_changed:
metadata["known_urls"] = known_urls
db.save_metadata(path_obj, metadata)
success = True
except Exception as e:
log(f"Error updating local library: {e}", file=sys.stderr)
# 2. Try Hydrus
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(result, "hash_hex", None))
if hash_hex:
try:
client = hydrus_wrapper.get_client(config)
if client:
urls_to_delete = [u.strip() for u in url.split(',') if u.strip()]
for u in urls_to_delete:
client.delete_url(hash_hex, u)
preview = hash_hex[:12] + ('' if len(hash_hex) > 12 else '')
ctx.emit(f"Deleted URL from Hydrus file {preview}: {u}")
success = True
except Exception as exc:
log(f"Hydrus del-url failed: {exc}", file=sys.stderr)
2025-12-06 00:10:19 -08:00
if success:
try:
from cmdlets import get_url as get_url_cmd # type: ignore
except Exception:
get_url_cmd = None
if get_url_cmd:
try:
subject = ctx.get_last_result_subject()
if subject is not None:
def norm(val: Any) -> str:
return str(val).lower()
target_hash = norm(hash_hex) if hash_hex else None
target_path = norm(file_path) if file_path else None
subj_hashes = []
subj_paths = []
if isinstance(subject, dict):
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
else:
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
is_match = False
if target_hash and target_hash in subj_hashes:
is_match = True
if target_path and target_path in subj_paths:
is_match = True
if is_match:
refresh_args: list[str] = []
if hash_hex:
refresh_args.extend(["-hash", hash_hex])
get_url_cmd._run(subject, refresh_args, config)
except Exception:
debug("URL refresh skipped (error)")
2025-12-01 01:10:16 -08:00
return success