dfd
This commit is contained in:
@@ -177,7 +177,7 @@ class SharedArgs:
|
||||
LIBRARY = CmdletArg(
|
||||
"library",
|
||||
type="string",
|
||||
choices=["hydrus", "local", "soulseek", "libgen", "debrid", "ftp"],
|
||||
choices=["hydrus", "local", "soulseek", "libgen", "ftp"],
|
||||
description="Search library or source location."
|
||||
)
|
||||
|
||||
@@ -209,7 +209,7 @@ class SharedArgs:
|
||||
STORAGE = CmdletArg(
|
||||
"storage",
|
||||
type="enum",
|
||||
choices=["hydrus", "local", "debrid", "ftp", "matrix"],
|
||||
choices=["hydrus", "local", "ftp", "matrix"],
|
||||
required=False,
|
||||
description="Storage location or destination for saving/uploading files.",
|
||||
alias="s",
|
||||
@@ -240,12 +240,12 @@ class SharedArgs:
|
||||
def resolve_storage(storage_value: Optional[str], default: Optional[Path] = None) -> Path:
|
||||
"""Resolve a storage location name to a filesystem Path.
|
||||
|
||||
Maps storage identifiers (hydrus, local, debrid, ftp) to their actual
|
||||
Maps storage identifiers (hydrus, local, ftp) to their actual
|
||||
filesystem paths. This is the single source of truth for storage location resolution.
|
||||
Note: 0x0.st is now accessed via file providers (-provider 0x0), not storage.
|
||||
|
||||
Args:
|
||||
storage_value: One of 'hydrus', 'local', 'debrid', 'ftp', or None
|
||||
storage_value: One of 'hydrus', 'local', 'ftp', or None
|
||||
default: Path to return if storage_value is None (defaults to Videos)
|
||||
|
||||
Returns:
|
||||
@@ -266,7 +266,6 @@ class SharedArgs:
|
||||
storage_map = {
|
||||
'local': Path.home() / "Videos",
|
||||
'hydrus': Path.home() / ".hydrus" / "client_files",
|
||||
'debrid': Path.home() / "Debrid",
|
||||
'ftp': Path.home() / "FTP",
|
||||
'matrix': Path.home() / "Matrix", # Placeholder, not used for upload path
|
||||
}
|
||||
|
||||
@@ -185,7 +185,13 @@ def _persist_local_metadata(
|
||||
log(traceback.format_exc(), file=sys.stderr)
|
||||
|
||||
|
||||
def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any, config: Optional[Dict[str, Any]] = None) -> Tuple[int, Optional[Path]]:
|
||||
def _handle_local_transfer(
|
||||
media_path: Path,
|
||||
destination_root: Path,
|
||||
result: Any,
|
||||
config: Optional[Dict[str, Any]] = None,
|
||||
export_mode: bool = False,
|
||||
) -> Tuple[int, Optional[Path]]:
|
||||
"""Transfer a file to local storage and return (exit_code, destination_path).
|
||||
|
||||
Args:
|
||||
@@ -246,34 +252,60 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
|
||||
relationships = extract_relationships(result)
|
||||
duration = extract_duration(result)
|
||||
|
||||
# Rename source file if title tag is present (to ensure destination has correct name)
|
||||
title_tag = next((t for t in merged_tags if str(t).strip().lower().startswith("title:")), None)
|
||||
if title_tag:
|
||||
try:
|
||||
from helper.utils import unique_path
|
||||
title_val = title_tag.split(":", 1)[1].strip()
|
||||
# Sanitize filename (keep spaces, but remove illegal chars)
|
||||
safe_title = "".join(c for c in title_val if c.isalnum() or c in " ._-()[]").strip()
|
||||
if safe_title:
|
||||
new_name = safe_title + media_path.suffix
|
||||
new_path = media_path.parent / new_name
|
||||
if new_path != media_path:
|
||||
# Ensure we don't overwrite existing files
|
||||
new_path = unique_path(new_path)
|
||||
media_path.rename(new_path)
|
||||
media_path = new_path
|
||||
debug(f"Renamed source file to match title: {media_path.name}")
|
||||
except Exception as e:
|
||||
log(f"Warning: Failed to rename file to match title: {e}", file=sys.stderr)
|
||||
# Skip title-based renaming for library mode (hash-based) but allow for export mode below
|
||||
|
||||
try:
|
||||
# Ensure filename is the hash when adding to local storage
|
||||
resolved_hash = _resolve_file_hash(result, sidecar_hash, media_path)
|
||||
if resolved_hash:
|
||||
hashed_name = resolved_hash + media_path.suffix
|
||||
target_path = destination_root / hashed_name
|
||||
media_path = media_path.rename(target_path) if media_path != target_path else media_path
|
||||
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
|
||||
if export_mode:
|
||||
title_tag = next((t for t in merged_tags if str(t).strip().lower().startswith("title:")), None)
|
||||
title_value = ""
|
||||
if title_tag:
|
||||
title_value = title_tag.split(":", 1)[1].strip()
|
||||
if not title_value:
|
||||
title_value = media_path.stem.replace("_", " ").strip()
|
||||
# Sanitize filename
|
||||
safe_title = "".join(c for c in title_value if c.isalnum() or c in " ._-()[]{}'`").strip()
|
||||
base_name = safe_title or media_path.stem
|
||||
new_name = base_name + media_path.suffix
|
||||
target_path = destination_root / new_name
|
||||
destination_root.mkdir(parents=True, exist_ok=True)
|
||||
if target_path.exists():
|
||||
from helper.utils import unique_path
|
||||
target_path = unique_path(target_path)
|
||||
shutil.move(str(media_path), target_path)
|
||||
|
||||
# Move/copy sidecar files alongside
|
||||
possible_sidecars = [
|
||||
media_path.with_suffix(media_path.suffix + ".json"),
|
||||
media_path.with_name(media_path.name + ".tags"),
|
||||
media_path.with_name(media_path.name + ".tags.txt"),
|
||||
media_path.with_name(media_path.name + ".metadata"),
|
||||
media_path.with_name(media_path.name + ".notes"),
|
||||
]
|
||||
for sc in possible_sidecars:
|
||||
try:
|
||||
if sc.exists():
|
||||
suffix_part = sc.name.replace(media_path.name, "", 1)
|
||||
dest_sidecar = target_path.parent / f"{target_path.name}{suffix_part}"
|
||||
dest_sidecar.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.move(str(sc), dest_sidecar)
|
||||
except Exception:
|
||||
pass
|
||||
media_path = target_path
|
||||
dest_file = str(target_path)
|
||||
else:
|
||||
# Ensure filename is the hash when adding to local storage
|
||||
resolved_hash = _resolve_file_hash(result, sidecar_hash, media_path)
|
||||
if resolved_hash:
|
||||
hashed_name = resolved_hash + media_path.suffix
|
||||
target_path = destination_root / hashed_name
|
||||
try:
|
||||
if target_path.exists():
|
||||
target_path.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
if media_path != target_path:
|
||||
media_path = media_path.rename(target_path)
|
||||
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
|
||||
except Exception as exc:
|
||||
log(f"❌ Failed to move file into {destination_root}: {exc}", file=sys.stderr)
|
||||
return 1, None
|
||||
@@ -291,9 +323,12 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
|
||||
if filename_title:
|
||||
final_tags.insert(0, f"title:{filename_title}")
|
||||
|
||||
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
|
||||
_cleanup_sidecar_files(media_path, sidecar_path)
|
||||
debug(f"✅ Moved to local library: {dest_path}")
|
||||
if not export_mode:
|
||||
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
|
||||
_cleanup_sidecar_files(media_path, sidecar_path)
|
||||
debug(f"✅ Moved to local library: {dest_path}")
|
||||
else:
|
||||
debug(f"✅ Exported to destination: {dest_path}")
|
||||
return 0, dest_path
|
||||
|
||||
|
||||
@@ -333,17 +368,26 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
provider_name: Optional[str] = None
|
||||
delete_after_upload = False
|
||||
|
||||
# Check if -path argument was provided to use direct file path instead of piped result
|
||||
# Check if -path argument was provided
|
||||
path_arg = parsed.get("path")
|
||||
if path_arg:
|
||||
# Create a pseudo-result object from the file path
|
||||
media_path = Path(str(path_arg).strip())
|
||||
if not media_path.exists():
|
||||
log(f"❌ File not found: {media_path}")
|
||||
return 1
|
||||
# Create result dict with the file path and origin 'wild' for direct path inputs
|
||||
result = {"target": str(media_path), "origin": "wild"}
|
||||
log(f"Using direct file path: {media_path}")
|
||||
path_value = Path(str(path_arg).strip())
|
||||
# If there is no piped result, treat -path as the source file (existing behavior)
|
||||
if result is None:
|
||||
if not path_value.exists():
|
||||
log(f"❌ File not found: {path_value}")
|
||||
return 1
|
||||
result = {"target": str(path_value), "origin": "wild"}
|
||||
log(f"Using direct file path: {path_value}")
|
||||
else:
|
||||
# Piped result present: treat -path as destination (export)
|
||||
if not path_value.exists():
|
||||
try:
|
||||
path_value.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc:
|
||||
log(f"❌ Cannot create destination directory {path_value}: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
location = str(path_value)
|
||||
|
||||
# Get location from parsed args - now uses SharedArgs.STORAGE so key is "storage"
|
||||
location = parsed.get("storage")
|
||||
@@ -714,7 +758,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
log(f"Moving to local path: {destination_root}", file=sys.stderr)
|
||||
exit_code, dest_path = _handle_local_transfer(media_path, destination_root, result, config)
|
||||
exit_code, dest_path = _handle_local_transfer(media_path, destination_root, result, config, export_mode=True)
|
||||
|
||||
# After successful local transfer, emit result for pipeline continuation
|
||||
if exit_code == 0 and dest_path:
|
||||
|
||||
@@ -79,6 +79,31 @@ def add(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as exc:
|
||||
log(f"Hydrus add-note failed: {exc}")
|
||||
return 1
|
||||
|
||||
# Refresh notes view if we're operating on the currently selected subject
|
||||
try:
|
||||
from cmdlets import get_note as get_note_cmd # type: ignore
|
||||
except Exception:
|
||||
get_note_cmd = None
|
||||
if get_note_cmd:
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is not None:
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
subj_hashes = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
get_note_cmd.get_notes(subject, ["-hash", hash_hex], config)
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ctx.emit(f"Added note '{name}' ({len(text)} chars)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -145,6 +145,49 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _refresh_relationship_view_if_current(target_hash: Optional[str], target_path: Optional[str], other: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""If the current subject matches the target, refresh relationships via get-relationship."""
|
||||
try:
|
||||
from cmdlets import get_relationship as get_rel_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is None:
|
||||
return
|
||||
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hashes = [norm(v) for v in [target_hash, other] if v]
|
||||
target_paths = [norm(v) for v in [target_path, other] if v]
|
||||
|
||||
subj_hashes: list[str] = []
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
|
||||
|
||||
is_match = False
|
||||
if target_hashes and any(h in subj_hashes for h in target_hashes):
|
||||
is_match = True
|
||||
if target_paths and any(p in subj_paths for p in target_paths):
|
||||
is_match = True
|
||||
if not is_match:
|
||||
return
|
||||
|
||||
refresh_args: list[str] = []
|
||||
if target_hash:
|
||||
refresh_args.extend(["-hash", target_hash])
|
||||
get_rel_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@register(["add-relationship", "add-rel"]) # primary name and alias
|
||||
def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Associate file relationships in Hydrus.
|
||||
@@ -253,6 +296,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
f"[add-relationship] Set {rel_type} relationship: {file_hash} <-> {king_hash}",
|
||||
file=sys.stderr
|
||||
)
|
||||
_refresh_relationship_view_if_current(file_hash, file_path_from_result, king_hash, config)
|
||||
except Exception as exc:
|
||||
log(f"Failed to set relationship: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
@@ -280,6 +324,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
f"[add-relationship] Set {rel_type} relationship: {file_hash} <-> {existing_king}",
|
||||
file=sys.stderr
|
||||
)
|
||||
_refresh_relationship_view_if_current(file_hash, file_path_from_result, existing_king, config)
|
||||
except Exception as exc:
|
||||
log(f"Failed to set relationship: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
@@ -300,6 +345,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
with LocalLibrarySearchOptimizer(local_storage_path) as db:
|
||||
db.set_relationship(file_path_obj, king_file_path, rel_type)
|
||||
log(f"Set {rel_type} relationship: {file_path_obj.name} -> {king_file_path.name}", file=sys.stderr)
|
||||
_refresh_relationship_view_if_current(None, str(file_path_obj), str(king_file_path), config)
|
||||
else:
|
||||
log(f"King file not found or invalid: {king_hash}", file=sys.stderr)
|
||||
return 1
|
||||
@@ -323,6 +369,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
with LocalLibrarySearchOptimizer(local_storage_path) as db:
|
||||
db.set_relationship(file_path_obj, Path(king_path), rel_type)
|
||||
log(f"Set {rel_type} relationship: {file_path_obj.name} -> {Path(king_path).name}", file=sys.stderr)
|
||||
_refresh_relationship_view_if_current(None, str(file_path_obj), str(king_path), config)
|
||||
except Exception as exc:
|
||||
log(f"Failed to set relationship: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
@@ -28,6 +28,171 @@ def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
"""Update result object/dict title fields and columns in-place."""
|
||||
if not title_value:
|
||||
return
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.title = title_value
|
||||
# Update columns if present (Title column assumed index 0)
|
||||
if hasattr(res, "columns") and isinstance(res.columns, list) and res.columns:
|
||||
label, *_ = res.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
res.columns[0] = (res.columns[0][0], title_value)
|
||||
elif isinstance(res, dict):
|
||||
res["title"] = title_value
|
||||
cols = res.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated.append((label, title_value))
|
||||
changed = True
|
||||
else:
|
||||
updated.append(col)
|
||||
else:
|
||||
updated.append(col)
|
||||
if changed:
|
||||
res["columns"] = updated
|
||||
|
||||
|
||||
def _matches_target(item: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target."""
|
||||
hydrus_hash_l = hydrus_hash.lower() if hydrus_hash else None
|
||||
file_hash_l = file_hash.lower() if file_hash else None
|
||||
file_path_l = file_path.lower() if file_path else None
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [
|
||||
norm(item.get("hydrus_hash")),
|
||||
norm(item.get("hash")),
|
||||
norm(item.get("hash_hex")),
|
||||
norm(item.get("file_hash")),
|
||||
]
|
||||
paths = [
|
||||
norm(item.get("path")),
|
||||
norm(item.get("file_path")),
|
||||
norm(item.get("target")),
|
||||
]
|
||||
else:
|
||||
hashes = [
|
||||
norm(getattr(item, "hydrus_hash", None)),
|
||||
norm(getattr(item, "hash_hex", None)),
|
||||
norm(getattr(item, "file_hash", None)),
|
||||
]
|
||||
paths = [
|
||||
norm(getattr(item, "path", None)),
|
||||
norm(getattr(item, "file_path", None)),
|
||||
norm(getattr(item, "target", None)),
|
||||
]
|
||||
|
||||
if hydrus_hash_l and hydrus_hash_l in hashes:
|
||||
return True
|
||||
if file_hash_l and file_hash_l in hashes:
|
||||
return True
|
||||
if file_path_l and file_path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _update_item_title_fields(item: Any, new_title: str) -> None:
|
||||
"""Mutate an item to reflect a new title in plain fields and columns."""
|
||||
if isinstance(item, models.PipeObject):
|
||||
item.title = new_title
|
||||
if hasattr(item, "columns") and isinstance(item.columns, list) and item.columns:
|
||||
label, *_ = item.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
item.columns[0] = (label, new_title)
|
||||
elif isinstance(item, dict):
|
||||
item["title"] = new_title
|
||||
cols = item.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated_cols = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated_cols.append((label, new_title))
|
||||
changed = True
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
if changed:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
|
||||
def _refresh_result_table_title(new_title: str, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
items = ctx.get_last_result_items()
|
||||
if not last_table or not items:
|
||||
return
|
||||
|
||||
updated_items = []
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if _matches_target(item, hydrus_hash, file_hash, file_path):
|
||||
_update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
pass
|
||||
updated_items.append(item)
|
||||
|
||||
if not match_found:
|
||||
return
|
||||
|
||||
from result_table import ResultTable # Local import to avoid circular dependency
|
||||
|
||||
new_table = ResultTable(getattr(last_table, "title", ""), title_width=getattr(last_table, "title_width", 80), max_columns=getattr(last_table, "max_columns", None))
|
||||
if getattr(last_table, "source_command", None):
|
||||
new_table.set_source_command(last_table.source_command, getattr(last_table, "source_args", []))
|
||||
|
||||
for item in updated_items:
|
||||
new_table.add_result(item)
|
||||
|
||||
ctx.set_last_result_table_preserve_history(new_table, updated_items)
|
||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _refresh_tags_view(res: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
target_hash = hydrus_hash or file_hash
|
||||
refresh_args: List[str] = []
|
||||
if target_hash:
|
||||
refresh_args = ["-hash", target_hash]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and _matches_target(subject, hydrus_hash, file_hash, file_path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if target_hash:
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
@register(["add-tag", "add-tags"])
|
||||
@@ -148,7 +313,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Tags ARE provided - append them to each result and write sidecar files or add to Hydrus
|
||||
sidecar_count = 0
|
||||
removed_tags: List[str] = []
|
||||
total_new_tags = 0
|
||||
total_modified = 0
|
||||
for res in results:
|
||||
# Handle both dict and PipeObject formats
|
||||
file_path = None
|
||||
@@ -180,9 +346,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
hydrus_hash = file_hash
|
||||
if not storage_source and hydrus_hash and not file_path:
|
||||
storage_source = 'hydrus'
|
||||
# If we have a file path but no storage source, assume local to avoid sidecar spam
|
||||
if not storage_source and file_path:
|
||||
storage_source = 'local'
|
||||
else:
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
original_tags_lower = {str(t).lower() for t in existing_tags if isinstance(t, str)}
|
||||
original_tags_snapshot = list(existing_tags)
|
||||
original_title = _extract_title_tag(original_tags_snapshot)
|
||||
removed_tags: List[str] = []
|
||||
|
||||
# Apply hash override if provided
|
||||
if hash_override:
|
||||
@@ -239,35 +413,47 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if new_tag not in existing_tags:
|
||||
existing_tags.append(new_tag)
|
||||
|
||||
# Compute new tags relative to original
|
||||
new_tags_added = [t for t in existing_tags if isinstance(t, str) and t.lower() not in original_tags_lower]
|
||||
total_new_tags += len(new_tags_added)
|
||||
|
||||
# Update the result's tags
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.extra['tags'] = existing_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = existing_tags
|
||||
|
||||
# If a title: tag was added, update the in-memory title so downstream display reflects it immediately
|
||||
# If a title: tag was added, update the in-memory title and columns so downstream display reflects it immediately
|
||||
title_value = _extract_title_tag(existing_tags)
|
||||
if title_value:
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.title = title_value
|
||||
elif isinstance(res, dict):
|
||||
res['title'] = title_value
|
||||
_apply_title_to_result(res, title_value)
|
||||
|
||||
final_tags = existing_tags
|
||||
|
||||
# Determine where to add tags: Hydrus, local DB, or sidecar
|
||||
if storage_source and storage_source.lower() == 'hydrus':
|
||||
# Add tags to Hydrus using the API
|
||||
target_hash = hydrus_hash or file_hash
|
||||
if target_hash:
|
||||
try:
|
||||
log(f"[add_tags] Adding {len(existing_tags)} tag(s) to Hydrus file: {target_hash}", file=sys.stderr)
|
||||
tags_to_send = [t for t in existing_tags if isinstance(t, str) and t.lower() not in original_tags_lower]
|
||||
hydrus_client = hydrus_wrapper.get_client(config)
|
||||
hydrus_client.add_tags(target_hash, existing_tags, "my tags")
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
if tags_to_send:
|
||||
log(f"[add_tags] Adding {len(tags_to_send)} new tag(s) to Hydrus file: {target_hash}", file=sys.stderr)
|
||||
hydrus_client.add_tags(target_hash, tags_to_send, service_name)
|
||||
else:
|
||||
log(f"[add_tags] No new tags to add for Hydrus file: {target_hash}", file=sys.stderr)
|
||||
# Delete old namespace tags we replaced (e.g., previous title:)
|
||||
if removed_tags:
|
||||
unique_removed = sorted(set(removed_tags))
|
||||
hydrus_client.delete_tags(target_hash, unique_removed, "my tags")
|
||||
log(f"[add_tags] ✓ Tags added to Hydrus", file=sys.stderr)
|
||||
hydrus_client.delete_tags(target_hash, unique_removed, service_name)
|
||||
if tags_to_send:
|
||||
log(f"[add_tags] ✓ Tags added to Hydrus", file=sys.stderr)
|
||||
elif removed_tags:
|
||||
log(f"[add_tags] ✓ Removed {len(unique_removed)} tag(s) from Hydrus", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
if tags_to_send or removed_tags:
|
||||
total_modified += 1
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to add tags to Hydrus: {e}", file=sys.stderr)
|
||||
else:
|
||||
@@ -278,10 +464,25 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
library_root = get_local_storage_path(config)
|
||||
if library_root:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
with LocalLibraryDB(library_root) as db:
|
||||
db.save_tags(Path(file_path), existing_tags)
|
||||
log(f"[add_tags] Saved {len(existing_tags)} tag(s) to local DB", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
db.save_tags(path_obj, existing_tags)
|
||||
# Reload tags to reflect DB state (preserves auto-title logic)
|
||||
refreshed_tags = db.get_tags(path_obj) or existing_tags
|
||||
# Recompute title from refreshed tags for accurate display
|
||||
refreshed_title = _extract_title_tag(refreshed_tags)
|
||||
if refreshed_title:
|
||||
_apply_title_to_result(res, refreshed_title)
|
||||
res_tags = refreshed_tags or existing_tags
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.extra['tags'] = res_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = res_tags
|
||||
log(f"[add_tags] Added {len(new_tags_added)} new tag(s); {len(res_tags)} total tag(s) stored locally", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
if new_tags_added or removed_tags:
|
||||
total_modified += 1
|
||||
final_tags = res_tags
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to save tags to local DB: {e}", file=sys.stderr)
|
||||
else:
|
||||
@@ -289,19 +490,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
else:
|
||||
log(f"[add_tags] Warning: No file path for local storage, skipping", file=sys.stderr)
|
||||
else:
|
||||
# For other storage types or unknown sources, write sidecar file if we have a file path
|
||||
if file_path:
|
||||
try:
|
||||
sidecar_path = write_sidecar(Path(file_path), existing_tags, [], file_hash)
|
||||
log(f"[add_tags] Wrote {len(existing_tags)} tag(s) to sidecar: {sidecar_path}", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to write sidecar for {file_path}: {e}", file=sys.stderr)
|
||||
# For other storage types or unknown sources, avoid writing sidecars to reduce clutter
|
||||
# (local/hydrus are handled above).
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# If title changed, refresh the cached result table so the display reflects the new name
|
||||
final_title = _extract_title_tag(final_tags)
|
||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
||||
_refresh_result_table_title(final_title, hydrus_hash or file_hash, file_hash, file_path)
|
||||
|
||||
# If tags changed, refresh tag view via get-tag (prefer current subject; fall back to hash refresh)
|
||||
if new_tags_added or removed_tags:
|
||||
_refresh_tags_view(res, hydrus_hash, file_hash, file_path, config)
|
||||
|
||||
# Emit the modified result
|
||||
ctx.emit(res)
|
||||
|
||||
log(f"[add_tags] Processed {len(results)} result(s)", file=sys.stderr)
|
||||
log(f"[add_tags] Added {total_new_tags} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
|
||||
@@ -13,6 +13,7 @@ from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
from helper.logger import debug
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="add-url",
|
||||
@@ -124,6 +125,39 @@ def add(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
if success:
|
||||
# If we just mutated the currently displayed item, refresh URLs via get-url
|
||||
try:
|
||||
from cmdlets import get_url as get_url_cmd # type: ignore
|
||||
except Exception:
|
||||
get_url_cmd = None
|
||||
if get_url_cmd:
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is not None:
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
target_path = norm(file_path) if 'file_path' in locals() else None
|
||||
subj_hashes = []
|
||||
subj_paths = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
|
||||
is_match = False
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
is_match = True
|
||||
if target_path and target_path in subj_paths:
|
||||
is_match = True
|
||||
if is_match:
|
||||
refresh_args: list[str] = []
|
||||
if hash_hex:
|
||||
refresh_args.extend(["-hash", hash_hex])
|
||||
get_url_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
debug("URL refresh skipped (error)")
|
||||
return 0
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
@@ -75,5 +76,30 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as exc:
|
||||
log(f"Hydrus delete-note failed: {exc}")
|
||||
return 1
|
||||
|
||||
# Refresh notes view if we're operating on the current subject
|
||||
try:
|
||||
from cmdlets import get_note as get_note_cmd # type: ignore
|
||||
except Exception:
|
||||
get_note_cmd = None
|
||||
if get_note_cmd:
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is not None:
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
subj_hashes = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
get_note_cmd.get_notes(subject, ["-hash", hash_hex], config)
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log(f"Deleted note '{name}'")
|
||||
|
||||
return 0
|
||||
|
||||
@@ -15,6 +15,49 @@ from helper.local_library import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path
|
||||
|
||||
|
||||
def _refresh_relationship_view_if_current(target_hash: Optional[str], target_path: Optional[str], other: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""If the current subject matches the target, refresh relationships via get-relationship."""
|
||||
try:
|
||||
from cmdlets import get_relationship as get_rel_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is None:
|
||||
return
|
||||
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hashes = [norm(v) for v in [target_hash, other] if v]
|
||||
target_paths = [norm(v) for v in [target_path, other] if v]
|
||||
|
||||
subj_hashes: list[str] = []
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
|
||||
|
||||
is_match = False
|
||||
if target_hashes and any(h in subj_hashes for h in target_hashes):
|
||||
is_match = True
|
||||
if target_paths and any(p in subj_paths for p in target_paths):
|
||||
is_match = True
|
||||
if not is_match:
|
||||
return
|
||||
|
||||
refresh_args: list[str] = []
|
||||
if target_hash:
|
||||
refresh_args.extend(["-hash", target_hash])
|
||||
get_rel_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Delete relationships from files.
|
||||
|
||||
@@ -137,6 +180,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
""", (file_id, json.dumps(relationships) if relationships else None))
|
||||
|
||||
db.db.connection.commit()
|
||||
_refresh_relationship_view_if_current(None, str(file_path_obj), None, config)
|
||||
deleted_count += 1
|
||||
|
||||
except Exception as exc:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
from pathlib import Path
|
||||
import json
|
||||
import sys
|
||||
|
||||
@@ -12,6 +13,49 @@ from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments
|
||||
from helper.logger import debug, log
|
||||
|
||||
|
||||
def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, config: Dict[str, Any]) -> None:
|
||||
"""If the current subject matches the target, refresh tags via get-tag."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is None:
|
||||
return
|
||||
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
target_path = norm(file_path) if file_path else None
|
||||
|
||||
subj_hashes: list[str] = []
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
|
||||
|
||||
is_match = False
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
is_match = True
|
||||
if target_path and target_path in subj_paths:
|
||||
is_match = True
|
||||
if not is_match:
|
||||
return
|
||||
|
||||
refresh_args: list[str] = []
|
||||
if hash_hex:
|
||||
refresh_args.extend(["-hash", hash_hex])
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-tags",
|
||||
summary="Remove tags from a Hydrus file.",
|
||||
@@ -220,12 +264,69 @@ def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | No
|
||||
|
||||
if not tags:
|
||||
return False
|
||||
|
||||
def _fetch_existing_tags() -> list[str]:
|
||||
existing: list[str] = []
|
||||
# Prefer local DB when we have a path and not explicitly hydrus
|
||||
if file_path and (source == "local" or (source != "hydrus" and not hash_hex)):
|
||||
try:
|
||||
from helper.local_library import LocalLibraryDB
|
||||
from config import get_local_storage_path
|
||||
path_obj = Path(file_path)
|
||||
local_root = get_local_storage_path(config) or path_obj.parent
|
||||
with LocalLibraryDB(local_root) as db:
|
||||
existing = db.get_tags(path_obj) or []
|
||||
except Exception:
|
||||
existing = []
|
||||
elif hash_hex:
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
payload = client.fetch_file_metadata(
|
||||
hashes=[hash_hex],
|
||||
include_service_keys_to_tags=True,
|
||||
include_file_urls=False,
|
||||
)
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
meta = items[0] if isinstance(items, list) and items else None
|
||||
if isinstance(meta, dict):
|
||||
tags_payload = meta.get("tags")
|
||||
if isinstance(tags_payload, dict):
|
||||
seen: set[str] = set()
|
||||
for svc_data in tags_payload.values():
|
||||
if not isinstance(svc_data, dict):
|
||||
continue
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list):
|
||||
for t in display:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
storage = svc_data.get("storage_tags")
|
||||
if isinstance(storage, dict):
|
||||
current_list = storage.get("0") or storage.get(0)
|
||||
if isinstance(current_list, list):
|
||||
for t in current_list:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
except Exception:
|
||||
existing = []
|
||||
return existing
|
||||
|
||||
# Safety: block deleting title: without replacement to avoid untitled files
|
||||
# Safety: only block if this deletion would remove the final title tag
|
||||
title_tags = [t for t in tags if isinstance(t, str) and t.lower().startswith("title:")]
|
||||
if title_tags:
|
||||
log("Cannot delete title: tag without replacement. Use add-tag \"title:new title\" instead.", file=sys.stderr)
|
||||
return False
|
||||
existing_tags = _fetch_existing_tags()
|
||||
current_titles = [t for t in existing_tags if isinstance(t, str) and t.lower().startswith("title:")]
|
||||
del_title_set = {t.lower() for t in title_tags}
|
||||
remaining_titles = [t for t in current_titles if t.lower() not in del_title_set]
|
||||
if current_titles and not remaining_titles:
|
||||
log("Cannot delete the last title: tag. Add a replacement title first (add-tag \"title:new title\").", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Item does not include a hash or file path")
|
||||
@@ -253,6 +354,7 @@ def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | No
|
||||
with LocalLibraryDB(local_root) as db:
|
||||
db.remove_tags(path_obj, tags)
|
||||
debug(f"Removed {len(tags)} tag(s) from {path_obj.name} (local)")
|
||||
_refresh_tag_view_if_current(hash_hex, file_path, config)
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
@@ -276,6 +378,7 @@ def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | No
|
||||
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||
_refresh_tag_view_if_current(hash_hex, None, config)
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
|
||||
@@ -8,7 +8,7 @@ from pathlib import Path
|
||||
from . import register
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
from helper.logger import debug, log
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
import pipeline as ctx
|
||||
@@ -152,5 +152,43 @@ def _delete_single(result: Any, url: str, override_hash: str | None, config: Dic
|
||||
success = True
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-url failed: {exc}", file=sys.stderr)
|
||||
|
||||
|
||||
if success:
|
||||
try:
|
||||
from cmdlets import get_url as get_url_cmd # type: ignore
|
||||
except Exception:
|
||||
get_url_cmd = None
|
||||
if get_url_cmd:
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject is not None:
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
target_path = norm(file_path) if file_path else None
|
||||
|
||||
subj_hashes = []
|
||||
subj_paths = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(getattr(subject, f, None)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if getattr(subject, f, None)]
|
||||
subj_paths = [norm(getattr(subject, f, None)) for f in ("file_path", "path", "target") if getattr(subject, f, None)]
|
||||
|
||||
is_match = False
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
is_match = True
|
||||
if target_path and target_path in subj_paths:
|
||||
is_match = True
|
||||
|
||||
if is_match:
|
||||
refresh_args: list[str] = []
|
||||
if hash_hex:
|
||||
refresh_args.extend(["-hash", hash_hex])
|
||||
get_url_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
debug("URL refresh skipped (error)")
|
||||
|
||||
return success
|
||||
|
||||
@@ -21,7 +21,7 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
import pipeline as ctx
|
||||
from helper import hydrus
|
||||
from helper.local_library import read_sidecar, write_sidecar, find_sidecar, LocalLibraryDB
|
||||
from ._shared import normalize_hash, Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args
|
||||
from ._shared import normalize_hash, looks_like_hash, Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args
|
||||
from config import get_local_storage_path
|
||||
|
||||
|
||||
@@ -105,7 +105,8 @@ def _emit_tags_as_table(
|
||||
service_name: Optional[str] = None,
|
||||
config: Dict[str, Any] = None,
|
||||
item_title: Optional[str] = None,
|
||||
file_path: Optional[str] = None
|
||||
file_path: Optional[str] = None,
|
||||
subject: Optional[Any] = None,
|
||||
) -> None:
|
||||
"""Emit tags as TagItem objects and display via ResultTable.
|
||||
|
||||
@@ -144,9 +145,9 @@ def _emit_tags_as_table(
|
||||
# Use overlay mode so it doesn't push the previous search to history stack
|
||||
# This makes get-tag behave like a transient view
|
||||
try:
|
||||
ctx.set_last_result_table_overlay(table, tag_items)
|
||||
ctx.set_last_result_table_overlay(table, tag_items, subject)
|
||||
except AttributeError:
|
||||
ctx.set_last_result_table(table, tag_items)
|
||||
ctx.set_last_result_table(table, tag_items, subject)
|
||||
# Note: CLI will handle displaying the table via ResultTable formatting
|
||||
def _summarize_tags(tags_list: List[str], limit: int = 8) -> str:
|
||||
"""Create a summary of tags for display."""
|
||||
@@ -443,7 +444,10 @@ def _emit_tag_payload(source: str, tags_list: List[str], *, hash_value: Optional
|
||||
def _extract_scrapable_identifiers(tags_list: List[str]) -> Dict[str, str]:
|
||||
"""Extract scrapable identifiers from tags."""
|
||||
identifiers = {}
|
||||
scrapable_prefixes = {'openlibrary', 'isbn_10', 'isbn', 'musicbrainz', 'musicbrainzalbum', 'imdb', 'tmdb', 'tvdb'}
|
||||
scrapable_prefixes = {
|
||||
'openlibrary', 'isbn', 'isbn_10', 'isbn_13',
|
||||
'musicbrainz', 'musicbrainzalbum', 'imdb', 'tmdb', 'tvdb'
|
||||
}
|
||||
|
||||
for tag in tags_list:
|
||||
if not isinstance(tag, str) or ':' not in tag:
|
||||
@@ -453,9 +457,18 @@ def _extract_scrapable_identifiers(tags_list: List[str]) -> Dict[str, str]:
|
||||
if len(parts) != 2:
|
||||
continue
|
||||
|
||||
key = parts[0].strip().lower()
|
||||
key_raw = parts[0].strip().lower()
|
||||
key = key_raw.replace('-', '_')
|
||||
if key == 'isbn10':
|
||||
key = 'isbn_10'
|
||||
elif key == 'isbn13':
|
||||
key = 'isbn_13'
|
||||
value = parts[1].strip()
|
||||
|
||||
# Normalize ISBN values by removing hyphens for API friendliness
|
||||
if key.startswith('isbn'):
|
||||
value = value.replace('-', '')
|
||||
|
||||
if key in scrapable_prefixes and value:
|
||||
identifiers[key] = value
|
||||
|
||||
@@ -965,8 +978,8 @@ def _perform_scraping(tags_list: List[str]) -> List[str]:
|
||||
if olid:
|
||||
log(f"Scraping OpenLibrary: {olid}")
|
||||
new_tags.extend(_scrape_openlibrary_metadata(olid))
|
||||
elif 'isbn_10' in identifiers or 'isbn' in identifiers:
|
||||
isbn = identifiers.get('isbn_10') or identifiers.get('isbn')
|
||||
elif 'isbn_13' in identifiers or 'isbn_10' in identifiers or 'isbn' in identifiers:
|
||||
isbn = identifiers.get('isbn_13') or identifiers.get('isbn_10') or identifiers.get('isbn')
|
||||
if isbn:
|
||||
log(f"Scraping ISBN: {isbn}")
|
||||
new_tags.extend(_scrape_isbn_metadata(isbn))
|
||||
@@ -991,13 +1004,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
Usage:
|
||||
get-tag [-hash <sha256>] [--store <key>] [--emit]
|
||||
get-tag -scrape <url>
|
||||
get-tag -scrape <url|provider>
|
||||
|
||||
Options:
|
||||
-hash <sha256>: Override hash to use instead of result's hash_hex
|
||||
--store <key>: Store result to this key for pipeline
|
||||
--emit: Emit result without interactive prompt (quiet mode)
|
||||
-scrape <url>: Scrape metadata from URL (returns tags as JSON)
|
||||
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks)
|
||||
"""
|
||||
# Helper to get field from both dict and object
|
||||
def get_field(obj: Any, field: str, default: Any = None) -> Any:
|
||||
@@ -1008,13 +1021,26 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Parse arguments using shared parser
|
||||
parsed_args = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
|
||||
# Detect if -scrape flag was provided without a value (parse_cmdlet_args skips missing values)
|
||||
scrape_flag_present = any(str(arg).lower() in {"-scrape", "--scrape"} for arg in args)
|
||||
|
||||
# Extract values
|
||||
hash_override = normalize_hash(parsed_args.get("hash"))
|
||||
hash_override_raw = parsed_args.get("hash")
|
||||
hash_override = normalize_hash(hash_override_raw)
|
||||
store_key = parsed_args.get("store")
|
||||
emit_requested = parsed_args.get("emit", False)
|
||||
scrape_url = parsed_args.get("scrape")
|
||||
scrape_requested = scrape_url is not None
|
||||
scrape_requested = scrape_flag_present or scrape_url is not None
|
||||
|
||||
if hash_override_raw is not None:
|
||||
if not hash_override or not looks_like_hash(hash_override):
|
||||
log("Invalid hash format: expected 64 hex characters", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if scrape_requested and (not scrape_url or str(scrape_url).strip() == ""):
|
||||
log("-scrape requires a URL or provider name", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Handle URL or provider scraping mode
|
||||
if scrape_requested and scrape_url:
|
||||
@@ -1041,18 +1067,51 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log(f"Unknown metadata provider: {scrape_url}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Determine query from title on the result or filename
|
||||
# Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename
|
||||
identifier_tags: List[str] = []
|
||||
result_tags = get_field(result, "tags", None)
|
||||
if isinstance(result_tags, list):
|
||||
identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))]
|
||||
|
||||
# Try local sidecar if no tags present on result
|
||||
if not identifier_tags:
|
||||
file_path = get_field(result, "target", None) or get_field(result, "path", None) or get_field(result, "file_path", None) or get_field(result, "filename", None)
|
||||
if isinstance(file_path, str) and file_path and not file_path.lower().startswith(("http://", "https://")):
|
||||
try:
|
||||
media_path = Path(str(file_path))
|
||||
if media_path.exists():
|
||||
tags_from_sidecar = read_sidecar(media_path)
|
||||
if isinstance(tags_from_sidecar, list):
|
||||
identifier_tags = [str(t) for t in tags_from_sidecar if isinstance(t, (str, bytes))]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
identifiers = _extract_scrapable_identifiers(identifier_tags)
|
||||
identifier_query: Optional[str] = None
|
||||
if identifiers:
|
||||
if provider.name in {"openlibrary", "googlebooks", "google"}:
|
||||
identifier_query = identifiers.get("isbn_13") or identifiers.get("isbn_10") or identifiers.get("isbn") or identifiers.get("openlibrary")
|
||||
elif provider.name == "itunes":
|
||||
identifier_query = identifiers.get("musicbrainz") or identifiers.get("musicbrainzalbum")
|
||||
|
||||
# Determine query from identifier first, else title on the result or filename
|
||||
title_hint = get_field(result, "title", None) or get_field(result, "name", None)
|
||||
if not title_hint:
|
||||
file_path = get_field(result, "path", None) or get_field(result, "filename", None)
|
||||
if file_path:
|
||||
title_hint = Path(str(file_path)).stem
|
||||
|
||||
if not title_hint:
|
||||
log("No title available to search for metadata", file=sys.stderr)
|
||||
query_hint = identifier_query or title_hint
|
||||
if not query_hint:
|
||||
log("No title or identifier available to search for metadata", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
items = provider.search(title_hint, limit=10)
|
||||
if identifier_query:
|
||||
log(f"Using identifier for metadata search: {identifier_query}")
|
||||
else:
|
||||
log(f"Using title for metadata search: {query_hint}")
|
||||
|
||||
items = provider.search(query_hint, limit=10)
|
||||
if not items:
|
||||
log("No metadata results found", file=sys.stderr)
|
||||
return 1
|
||||
@@ -1212,11 +1271,46 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Always output to ResultTable (pipeline mode only)
|
||||
# Extract title for table header
|
||||
item_title = get_field(result, "title", None) or get_field(result, "name", None) or get_field(result, "filename", None)
|
||||
|
||||
# Build a subject payload representing the file whose tags are being shown
|
||||
subject_origin = get_field(result, "origin", None) or get_field(result, "source", None) or source
|
||||
subject_payload: Dict[str, Any] = {
|
||||
"tags": list(current),
|
||||
"title": item_title,
|
||||
"name": item_title,
|
||||
"origin": subject_origin,
|
||||
"source": subject_origin,
|
||||
"storage_source": subject_origin,
|
||||
"service_name": service_name,
|
||||
"extra": {
|
||||
"tags": list(current),
|
||||
"storage_source": subject_origin,
|
||||
"hydrus_hash": hash_hex,
|
||||
},
|
||||
}
|
||||
if hash_hex:
|
||||
subject_payload.update({
|
||||
"hash": hash_hex,
|
||||
"hash_hex": hash_hex,
|
||||
"file_hash": hash_hex,
|
||||
"hydrus_hash": hash_hex,
|
||||
})
|
||||
if local_path:
|
||||
try:
|
||||
path_text = str(local_path)
|
||||
subject_payload.update({
|
||||
"file_path": path_text,
|
||||
"path": path_text,
|
||||
"target": path_text,
|
||||
})
|
||||
subject_payload["extra"]["file_path"] = path_text
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if source == "hydrus":
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config, item_title=item_title)
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config, item_title=item_title, subject=subject_payload)
|
||||
else:
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config, item_title=item_title, file_path=str(local_path) if local_path else None)
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config, item_title=item_title, file_path=str(local_path) if local_path else None, subject=subject_payload)
|
||||
|
||||
# If emit requested or store key provided, emit payload
|
||||
if emit_mode:
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import Any, Dict, Sequence, List, Optional, Tuple, Callable
|
||||
from fnmatch import fnmatchcase
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
@@ -135,45 +136,46 @@ STORAGE_ORIGINS = {"local", "hydrus", "debrid"}
|
||||
|
||||
|
||||
def _ensure_storage_columns(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Attach Title/Store columns for storage-origin results to keep CLI display compact."""
|
||||
origin_value = str(payload.get("origin") or payload.get("source") or "").lower()
|
||||
if origin_value not in STORAGE_ORIGINS:
|
||||
return payload
|
||||
title = payload.get("title") or payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
store_label = payload.get("origin") or payload.get("source") or origin_value
|
||||
|
||||
# Handle extension
|
||||
extension = payload.get("ext", "")
|
||||
if not extension and title:
|
||||
path_obj = Path(str(title))
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
title = path_obj.stem
|
||||
"""Attach Title/Store columns for storage-origin results to keep CLI display compact."""
|
||||
origin_value = str(payload.get("origin") or payload.get("source") or "").lower()
|
||||
if origin_value not in STORAGE_ORIGINS:
|
||||
return payload
|
||||
|
||||
# Handle size
|
||||
size_val = payload.get("size") or payload.get("size_bytes")
|
||||
size_str = ""
|
||||
if size_val:
|
||||
try:
|
||||
size_bytes = int(size_val)
|
||||
size_mb = size_bytes / (1024 * 1024)
|
||||
size_str = f"{int(size_mb)} MB"
|
||||
except (ValueError, TypeError):
|
||||
size_str = str(size_val)
|
||||
title = payload.get("title") or payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
store_label = payload.get("origin") or payload.get("source") or origin_value
|
||||
|
||||
normalized = dict(payload)
|
||||
normalized["columns"] = [
|
||||
("Title", str(title)),
|
||||
("Ext", str(extension)),
|
||||
("Store", str(store_label)),
|
||||
("Size", str(size_str))
|
||||
]
|
||||
return normalized
|
||||
# Handle extension
|
||||
extension = payload.get("ext", "")
|
||||
if not extension and title:
|
||||
path_obj = Path(str(title))
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
title = path_obj.stem
|
||||
|
||||
# Handle size as integer MB (header will include units)
|
||||
size_val = payload.get("size") or payload.get("size_bytes")
|
||||
size_str = ""
|
||||
if size_val is not None:
|
||||
try:
|
||||
size_bytes = int(size_val)
|
||||
size_mb = int(size_bytes / (1024 * 1024))
|
||||
size_str = str(size_mb)
|
||||
except (ValueError, TypeError):
|
||||
size_str = str(size_val)
|
||||
|
||||
normalized = dict(payload)
|
||||
normalized["columns"] = [
|
||||
("Title", str(title)),
|
||||
("Ext", str(extension)),
|
||||
("Store", str(store_label)),
|
||||
("Size(Mb)", str(size_str)),
|
||||
]
|
||||
return normalized
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="search-file",
|
||||
summary="Unified search cmdlet for searchable backends (Hydrus, Local, Debrid, LibGen, OpenLibrary, Soulseek).",
|
||||
summary="Unified search cmdlet for storage (Hydrus, Local) and providers (Debrid, LibGen, OpenLibrary, Soulseek).",
|
||||
usage="search-file [query] [-tag TAG] [-size >100MB|<50MB] [-type audio|video|image] [-duration >10:00] [-storage BACKEND] [-provider PROVIDER]",
|
||||
args=[
|
||||
CmdletArg("query", description="Search query string"),
|
||||
@@ -182,11 +184,11 @@ CMDLET = Cmdlet(
|
||||
CmdletArg("type", description="Filter by type: audio, video, image, document"),
|
||||
CmdletArg("duration", description="Filter by duration: >10:00, <1:30:00"),
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 45)"),
|
||||
CmdletArg("storage", description="Search storage backend: hydrus, local, debrid (default: all searchable)"),
|
||||
CmdletArg("provider", description="Search provider: libgen, openlibrary, soulseek, debrid, local (overrides -storage)"),
|
||||
CmdletArg("storage", description="Search storage backend: hydrus, local (default: all searchable storages)"),
|
||||
CmdletArg("provider", description="Search provider: libgen, openlibrary, soulseek, debrid, local (overrides -storage)"),
|
||||
],
|
||||
details=[
|
||||
"Search across multiple providers: File storage (Hydrus, Local, Debrid), Books (LibGen, OpenLibrary), Music (Soulseek)",
|
||||
"Search across storage (Hydrus, Local) and providers (Debrid, LibGen, OpenLibrary, Soulseek)",
|
||||
"Use -provider to search a specific source, or -storage to search file backends",
|
||||
"Filter results by: tag, size, type, duration",
|
||||
"Results can be piped to other commands",
|
||||
@@ -216,6 +218,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
storage_backend: Optional[str] = None
|
||||
provider_name: Optional[str] = None
|
||||
limit = 45
|
||||
searched_backends: List[str] = []
|
||||
|
||||
# Simple argument parsing
|
||||
i = 0
|
||||
@@ -249,6 +252,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
i += 1
|
||||
else:
|
||||
i += 1
|
||||
|
||||
# Debrid is provider-only now
|
||||
if storage_backend and storage_backend.lower() == "debrid":
|
||||
log("Use -provider debrid instead of -storage debrid (debrid is provider-only)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Handle piped input (e.g. from @N selection) if query is empty
|
||||
if not query and result:
|
||||
@@ -351,7 +359,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
return 0
|
||||
|
||||
# Otherwise search using FileStorage (Hydrus, Local, Debrid backends)
|
||||
# Otherwise search using storage backends (Hydrus, Local)
|
||||
from helper.file_storage import FileStorage
|
||||
storage = FileStorage(config=config or {})
|
||||
|
||||
@@ -364,6 +372,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log(f"Backend 'hydrus' is not available (Hydrus service not running)", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
return 1
|
||||
searched_backends.append(backend_to_search)
|
||||
if not storage.supports_search(backend_to_search):
|
||||
log(f"Backend '{backend_to_search}' does not support searching", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
@@ -379,6 +388,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Skip hydrus if not available
|
||||
if backend_name == "hydrus" and not hydrus_available:
|
||||
continue
|
||||
searched_backends.append(backend_name)
|
||||
try:
|
||||
backend_results = storage[backend_name].search(query, limit=limit - len(all_results))
|
||||
if backend_results:
|
||||
@@ -388,25 +398,65 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as exc:
|
||||
log(f"Backend {backend_name} search failed: {exc}", file=sys.stderr)
|
||||
results = all_results[:limit]
|
||||
|
||||
# Also query Debrid provider by default (provider-only, but keep legacy coverage when no explicit provider given)
|
||||
if not provider_name and not storage_backend:
|
||||
try:
|
||||
debrid_provider = get_provider("debrid", config)
|
||||
if debrid_provider and debrid_provider.validate():
|
||||
remaining = max(0, limit - len(results)) if isinstance(results, list) else limit
|
||||
if remaining > 0:
|
||||
debrid_results = debrid_provider.search(query, limit=remaining)
|
||||
if debrid_results:
|
||||
if "debrid" not in searched_backends:
|
||||
searched_backends.append("debrid")
|
||||
if results is None:
|
||||
results = []
|
||||
results.extend(debrid_results)
|
||||
except Exception as exc:
|
||||
log(f"Debrid provider search failed: {exc}", file=sys.stderr)
|
||||
|
||||
def _format_storage_label(name: str) -> str:
|
||||
clean = str(name or "").strip()
|
||||
if not clean:
|
||||
return "Unknown"
|
||||
return clean.replace("_", " ").title()
|
||||
|
||||
storage_counts: OrderedDict[str, int] = OrderedDict((name, 0) for name in searched_backends)
|
||||
for item in results or []:
|
||||
origin = getattr(item, 'origin', None)
|
||||
if origin is None and isinstance(item, dict):
|
||||
origin = item.get('origin') or item.get('source')
|
||||
if not origin:
|
||||
continue
|
||||
key = str(origin).lower()
|
||||
if key not in storage_counts:
|
||||
storage_counts[key] = 0
|
||||
storage_counts[key] += 1
|
||||
|
||||
if storage_counts or query:
|
||||
display_counts = OrderedDict((_format_storage_label(name), count) for name, count in storage_counts.items())
|
||||
summary_line = table.set_storage_summary(display_counts, query, inline=True)
|
||||
if summary_line:
|
||||
table.title = summary_line
|
||||
|
||||
# Emit results and collect for workers table
|
||||
if results:
|
||||
for item in results:
|
||||
# Add to table
|
||||
table.add_result(item)
|
||||
|
||||
if isinstance(item, dict):
|
||||
normalized = _ensure_storage_columns(item)
|
||||
results_list.append(normalized)
|
||||
ctx.emit(normalized)
|
||||
elif isinstance(item, ResultItem):
|
||||
item_dict = item.to_dict()
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
else:
|
||||
item_dict = {"title": str(item)}
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
def _as_dict(obj: Any) -> Dict[str, Any]:
|
||||
if isinstance(obj, dict):
|
||||
return dict(obj)
|
||||
if hasattr(obj, "to_dict") and callable(getattr(obj, "to_dict")):
|
||||
return obj.to_dict() # type: ignore[arg-type]
|
||||
return {"title": str(obj)}
|
||||
|
||||
item_dict = _as_dict(item)
|
||||
normalized = _ensure_storage_columns(item_dict)
|
||||
# Add to table using normalized columns to avoid extra fields (e.g., Tags/Name)
|
||||
table.add_result(normalized)
|
||||
|
||||
results_list.append(normalized)
|
||||
ctx.emit(normalized)
|
||||
|
||||
# Set the result table in context for TUI/CLI display
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
|
||||
Reference in New Issue
Block a user