dfdsf
This commit is contained in:
@@ -8,12 +8,12 @@ import sys
|
||||
from . import register
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, fetch_hydrus_metadata, should_show_help, get_field
|
||||
from helper.logger import debug, log
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, should_show_help, get_field
|
||||
from SYS.logger import debug, log
|
||||
from Store import Store
|
||||
|
||||
|
||||
def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, config: Dict[str, Any]) -> None:
|
||||
def _refresh_tag_view_if_current(file_hash: str | None, store_name: str | None, path: str | None, config: Dict[str, Any]) -> None:
|
||||
"""If the current subject matches the target, refresh tags via get-tag."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
@@ -28,17 +28,17 @@ def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, co
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
target_path = norm(file_path) if file_path else None
|
||||
target_hash = norm(file_hash) if file_hash else None
|
||||
target_path = norm(path) if path else None
|
||||
|
||||
subj_hashes: list[str] = []
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
subj_hashes = [norm(v) for v in [subject.get("hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(get_field(subject, f)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if get_field(subject, f)]
|
||||
subj_paths = [norm(get_field(subject, f)) for f in ("file_path", "path", "target") if get_field(subject, f)]
|
||||
subj_hashes = [norm(get_field(subject, f)) for f in ("hash",) if get_field(subject, f)]
|
||||
subj_paths = [norm(get_field(subject, f)) for f in ("path", "target") if get_field(subject, f)]
|
||||
|
||||
is_match = False
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
@@ -49,20 +49,20 @@ def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, co
|
||||
return
|
||||
|
||||
refresh_args: list[str] = []
|
||||
if hash_hex:
|
||||
refresh_args.extend(["-hash", hash_hex])
|
||||
if file_hash:
|
||||
refresh_args.extend(["-hash", file_hash])
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-tags",
|
||||
summary="Remove tags from a Hydrus file.",
|
||||
usage="del-tags [-hash <sha256>] <tag>[,<tag>...]",
|
||||
alias=["del-tag", "del-tags", "delete-tag"],
|
||||
name="delete-tag",
|
||||
summary="Remove tags from a file in a store.",
|
||||
usage="delete-tag -store <store> [-hash <sha256>] <tag>[,<tag>...]",
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
SharedArgs.STORE,
|
||||
CmdletArg("<tag>[,<tag>...]", required=True, description="One or more tags to remove. Comma- or space-separated."),
|
||||
],
|
||||
detail=[
|
||||
@@ -71,7 +71,7 @@ CMDLET = Cmdlet(
|
||||
],
|
||||
)
|
||||
|
||||
@register(["del-tag", "del-tags", "delete-tag", "delete-tags"]) # Still needed for backward compatibility
|
||||
@register(["delete-tag"])
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
if should_show_help(args):
|
||||
@@ -94,6 +94,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Parse -hash override and collect tags from remaining args
|
||||
override_hash: str | None = None
|
||||
override_store: str | None = None
|
||||
rest: list[str] = []
|
||||
i = 0
|
||||
while i < len(args):
|
||||
@@ -103,6 +104,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
override_hash = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-store", "--store", "store"} and i + 1 < len(args):
|
||||
override_store = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
rest.append(a)
|
||||
i += 1
|
||||
|
||||
@@ -110,7 +115,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# @5 or @{2,5,8} to delete tags from ResultTable by index
|
||||
tags_from_at_syntax = []
|
||||
hash_from_at_syntax = None
|
||||
file_path_from_at_syntax = None
|
||||
path_from_at_syntax = None
|
||||
store_from_at_syntax = None
|
||||
|
||||
if rest and str(rest[0]).startswith("@"):
|
||||
selector_arg = str(rest[0])
|
||||
@@ -142,9 +148,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tags_from_at_syntax.append(tag_name)
|
||||
# Also get hash from first item for consistency
|
||||
if not hash_from_at_syntax:
|
||||
hash_from_at_syntax = get_field(item, 'hash_hex')
|
||||
if not file_path_from_at_syntax:
|
||||
file_path_from_at_syntax = get_field(item, 'file_path')
|
||||
hash_from_at_syntax = get_field(item, 'hash')
|
||||
if not path_from_at_syntax:
|
||||
path_from_at_syntax = get_field(item, 'path')
|
||||
if not store_from_at_syntax:
|
||||
store_from_at_syntax = get_field(item, 'store')
|
||||
|
||||
if not tags_from_at_syntax:
|
||||
log(f"No tags found at indices: {indices}")
|
||||
@@ -201,10 +209,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# This preserves the existing logic for @ selection.
|
||||
|
||||
tags = tags_from_at_syntax
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||
file_path = file_path_from_at_syntax
|
||||
file_hash = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||
path = path_from_at_syntax
|
||||
store_name = override_store or store_from_at_syntax
|
||||
|
||||
if _process_deletion(tags, hash_hex, file_path, config):
|
||||
if _process_deletion(tags, file_hash, path, store_name, config):
|
||||
success_count += 1
|
||||
|
||||
else:
|
||||
@@ -216,13 +225,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
for item in items_to_process:
|
||||
tags_to_delete = []
|
||||
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(item, "hash_hex"))
|
||||
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(item, "hash"))
|
||||
item_path = (
|
||||
get_field(item, "path")
|
||||
or get_field(item, "file_path")
|
||||
or get_field(item, "target")
|
||||
)
|
||||
item_source = get_field(item, "source")
|
||||
item_store = override_store or get_field(item, "store")
|
||||
|
||||
if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem':
|
||||
# It's a TagItem
|
||||
@@ -248,66 +256,43 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# but inside the loop we might have mixed items? Unlikely.
|
||||
continue
|
||||
|
||||
if tags_to_delete and (item_hash or item_path):
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, config, source=item_source):
|
||||
if tags_to_delete:
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, item_store, config):
|
||||
success_count += 1
|
||||
|
||||
if success_count > 0:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | None, config: Dict[str, Any], source: str | None = None) -> bool:
|
||||
def _process_deletion(tags: list[str], file_hash: str | None, path: str | None, store_name: str | None, config: Dict[str, Any]) -> bool:
|
||||
"""Helper to execute the deletion logic for a single target."""
|
||||
|
||||
if not tags:
|
||||
return False
|
||||
|
||||
if not store_name:
|
||||
log("Store is required (use -store or pipe a result with store)", file=sys.stderr)
|
||||
return False
|
||||
|
||||
resolved_hash = normalize_hash(file_hash) if file_hash else None
|
||||
if not resolved_hash and path:
|
||||
try:
|
||||
from SYS.utils import sha256_file
|
||||
resolved_hash = sha256_file(Path(path))
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not resolved_hash:
|
||||
log("Item does not include a usable hash (and hash could not be derived from path)", file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _fetch_existing_tags() -> list[str]:
|
||||
existing: list[str] = []
|
||||
# Prefer local DB when we have a path and not explicitly hydrus
|
||||
if file_path and (source == "local" or (source != "hydrus" and not hash_hex)):
|
||||
try:
|
||||
from helper.folder_store import FolderDB
|
||||
from config import get_local_storage_path
|
||||
path_obj = Path(file_path)
|
||||
local_root = get_local_storage_path(config) or path_obj.parent
|
||||
with FolderDB(local_root) as db:
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
existing = db.get_tags(file_hash) if file_hash else []
|
||||
except Exception:
|
||||
existing = []
|
||||
elif hash_hex:
|
||||
meta, _ = fetch_hydrus_metadata(
|
||||
config, hash_hex,
|
||||
include_service_keys_to_tags=True,
|
||||
include_file_url=False,
|
||||
)
|
||||
if isinstance(meta, dict):
|
||||
tags_payload = meta.get("tags")
|
||||
if isinstance(tags_payload, dict):
|
||||
seen: set[str] = set()
|
||||
for svc_data in tags_payload.values():
|
||||
if not isinstance(svc_data, dict):
|
||||
continue
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list):
|
||||
for t in display:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
storage = svc_data.get("storage_tags")
|
||||
if isinstance(storage, dict):
|
||||
current_list = storage.get("0") or storage.get(0)
|
||||
if isinstance(current_list, list):
|
||||
for t in current_list:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
return existing
|
||||
try:
|
||||
backend = Store(config)[store_name]
|
||||
existing, _src = backend.get_tag(resolved_hash, config=config)
|
||||
return list(existing or [])
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
# Safety: only block if this deletion would remove the final title tag
|
||||
title_tags = [t for t in tags if isinstance(t, str) and t.lower().startswith("title:")]
|
||||
@@ -320,61 +305,17 @@ def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | No
|
||||
log("Cannot delete the last title: tag. Add a replacement title first (add-tag \"title:new title\").", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Item does not include a hash or file path")
|
||||
return False
|
||||
|
||||
# Handle local file tag deletion
|
||||
if file_path and (source == "local" or (not hash_hex and source != "hydrus")):
|
||||
try:
|
||||
from helper.folder_store import FolderDB
|
||||
from pathlib import Path
|
||||
|
||||
path_obj = Path(file_path)
|
||||
if not path_obj.exists():
|
||||
log(f"File not found: {file_path}")
|
||||
return False
|
||||
|
||||
# Try to get local storage path from config
|
||||
from config import get_local_storage_path
|
||||
local_root = get_local_storage_path(config)
|
||||
|
||||
if not local_root:
|
||||
# Fallback: assume file is in a library root or use its parent
|
||||
local_root = path_obj.parent
|
||||
|
||||
with FolderDB(local_root) as db:
|
||||
db.remove_tags(path_obj, tags)
|
||||
debug(f"Removed {len(tags)} tag(s) from {path_obj.name} (local)")
|
||||
_refresh_tag_view_if_current(hash_hex, file_path, config)
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Failed to remove local tags: {exc}")
|
||||
return False
|
||||
|
||||
# Hydrus deletion logic
|
||||
if not hash_hex:
|
||||
return False
|
||||
|
||||
try:
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return False
|
||||
|
||||
debug(f"Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
||||
client.delete_tags(hash_hex, tags, service_name)
|
||||
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||
_refresh_tag_view_if_current(hash_hex, None, config)
|
||||
return True
|
||||
|
||||
backend = Store(config)[store_name]
|
||||
ok = backend.delete_tag(resolved_hash, list(tags), config=config)
|
||||
if ok:
|
||||
preview = resolved_hash[:12] + ('…' if len(resolved_hash) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via store '{store_name}'.")
|
||||
_refresh_tag_view_if_current(resolved_hash, store_name, path, config)
|
||||
return True
|
||||
return False
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-tag failed: {exc}")
|
||||
log(f"del-tag failed: {exc}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user