dfdsf
This commit is contained in:
@@ -72,6 +72,6 @@ for _root_mod in ("select_cmdlet",):
|
||||
|
||||
# Also import helper modules that register cmdlets
|
||||
try:
|
||||
import helper.alldebrid as _alldebrid
|
||||
import API.alldebrid as _alldebrid
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -11,7 +11,7 @@ import sys
|
||||
import inspect
|
||||
from collections.abc import Iterable as IterableABC
|
||||
|
||||
from helper.logger import log, debug
|
||||
from SYS.logger import log, debug
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
|
||||
from dataclasses import dataclass, field
|
||||
@@ -149,7 +149,7 @@ class SharedArgs:
|
||||
|
||||
@staticmethod
|
||||
def get_store_choices(config: Optional[Dict[str, Any]] = None) -> List[str]:
|
||||
"""Get list of available storage backend names from FileStorage.
|
||||
"""Get list of available store backend names.
|
||||
|
||||
This method dynamically discovers all configured storage backends
|
||||
instead of using a static list. Should be called when building
|
||||
@@ -162,13 +162,10 @@ class SharedArgs:
|
||||
List of backend names (e.g., ['default', 'test', 'home', 'work'])
|
||||
|
||||
Example:
|
||||
# In a cmdlet that needs dynamic choices
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
SharedArgs.STORE.choices = SharedArgs.get_store_choices(config)
|
||||
"""
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
from Store import Store
|
||||
|
||||
# If no config provided, try to load it
|
||||
if config is None:
|
||||
@@ -178,8 +175,8 @@ class SharedArgs:
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
file_storage = FileStorage(config)
|
||||
return file_storage.list_backends()
|
||||
store = Store(config)
|
||||
return store.list_backends()
|
||||
except Exception:
|
||||
# Fallback to empty list if FileStorage isn't available
|
||||
return []
|
||||
@@ -609,7 +606,7 @@ def normalize_hash(hash_hex: Optional[str]) -> Optional[str]:
|
||||
return text.lower() if text else None
|
||||
|
||||
|
||||
def get_hash_for_operation(override_hash: Optional[str], result: Any, field_name: str = "hash_hex") -> Optional[str]:
|
||||
def get_hash_for_operation(override_hash: Optional[str], result: Any, field_name: str = "hash") -> Optional[str]:
|
||||
"""Get normalized hash from override or result object, consolidating common pattern.
|
||||
|
||||
Eliminates repeated pattern: normalize_hash(override) if override else normalize_hash(get_field(result, ...))
|
||||
@@ -617,15 +614,14 @@ def get_hash_for_operation(override_hash: Optional[str], result: Any, field_name
|
||||
Args:
|
||||
override_hash: Hash passed as command argument (takes precedence)
|
||||
result: Object containing hash field (fallback)
|
||||
field_name: Name of hash field in result object (default: "hash_hex")
|
||||
field_name: Name of hash field in result object (default: "hash")
|
||||
|
||||
Returns:
|
||||
Normalized hash string, or None if neither override nor result provides valid hash
|
||||
"""
|
||||
if override_hash:
|
||||
return normalize_hash(override_hash)
|
||||
# Try multiple field names for robustness
|
||||
hash_value = get_field(result, field_name) or getattr(result, field_name, None) or getattr(result, "hash", None) or result.get("file_hash") if isinstance(result, dict) else None
|
||||
hash_value = get_field(result, field_name) or getattr(result, field_name, None) or getattr(result, "hash", None)
|
||||
return normalize_hash(hash_value)
|
||||
|
||||
|
||||
@@ -645,8 +641,8 @@ def fetch_hydrus_metadata(config: Any, hash_hex: str, **kwargs) -> tuple[Optiona
|
||||
- metadata_dict: Dict from Hydrus (first item in metadata list) or None if unavailable
|
||||
- error_code: 0 on success, 1 on any error (suitable for returning from cmdlet execute())
|
||||
"""
|
||||
from helper import hydrus
|
||||
hydrus_wrapper = hydrus
|
||||
from API import HydrusNetwork
|
||||
hydrus_wrapper = HydrusNetwork
|
||||
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
@@ -670,24 +666,6 @@ def fetch_hydrus_metadata(config: Any, hash_hex: str, **kwargs) -> tuple[Optiona
|
||||
return meta, 0
|
||||
|
||||
|
||||
def get_origin(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
||||
"""Extract origin field with fallback to store/source field, consolidating common pattern.
|
||||
|
||||
Supports both dict and object access patterns.
|
||||
|
||||
Args:
|
||||
obj: Object (dict or dataclass) with 'store', 'origin', or 'source' field
|
||||
default: Default value if none of the fields are found
|
||||
|
||||
Returns:
|
||||
Store/origin/source string, or default if none exist
|
||||
"""
|
||||
if isinstance(obj, dict):
|
||||
return obj.get("store") or obj.get("origin") or obj.get("source") or default
|
||||
else:
|
||||
return getattr(obj, "store", None) or getattr(obj, "origin", None) or getattr(obj, "source", None) or default
|
||||
|
||||
|
||||
def get_field(obj: Any, field: str, default: Optional[Any] = None) -> Any:
|
||||
"""Extract a field from either a dict or object with fallback default.
|
||||
|
||||
@@ -706,56 +684,19 @@ def get_field(obj: Any, field: str, default: Optional[Any] = None) -> Any:
|
||||
|
||||
Examples:
|
||||
get_field(result, "hash") # From dict or object
|
||||
get_field(result, "origin", "unknown") # With default
|
||||
get_field(result, "table", "unknown") # With default
|
||||
"""
|
||||
# Handle lists by accessing the first element
|
||||
if isinstance(obj, list) and obj:
|
||||
obj = obj[0]
|
||||
|
||||
if isinstance(obj, dict):
|
||||
# Direct lookup first
|
||||
val = obj.get(field, default)
|
||||
if val is not None:
|
||||
return val
|
||||
# Fallback aliases for common fields
|
||||
if field == "path":
|
||||
for alt in ("file_path", "target", "filepath", "file"):
|
||||
v = obj.get(alt)
|
||||
if v:
|
||||
return v
|
||||
if field == "hash":
|
||||
for alt in ("file_hash", "hash_hex"):
|
||||
v = obj.get(alt)
|
||||
if v:
|
||||
return v
|
||||
if field == "store":
|
||||
for alt in ("storage", "storage_source", "origin"):
|
||||
v = obj.get(alt)
|
||||
if v:
|
||||
return v
|
||||
return default
|
||||
return obj.get(field, default)
|
||||
else:
|
||||
# Try direct attribute access first
|
||||
value = getattr(obj, field, None)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# Attribute fallback aliases for common fields
|
||||
if field == "path":
|
||||
for alt in ("file_path", "target", "filepath", "file", "url"):
|
||||
v = getattr(obj, alt, None)
|
||||
if v:
|
||||
return v
|
||||
if field == "hash":
|
||||
for alt in ("file_hash", "hash_hex"):
|
||||
v = getattr(obj, alt, None)
|
||||
if v:
|
||||
return v
|
||||
if field == "store":
|
||||
for alt in ("storage", "storage_source", "origin"):
|
||||
v = getattr(obj, alt, None)
|
||||
if v:
|
||||
return v
|
||||
|
||||
# For PipeObjects, also check the extra field
|
||||
if hasattr(obj, 'extra') and isinstance(obj.extra, dict):
|
||||
@@ -1148,7 +1089,7 @@ def create_pipe_object_result(
|
||||
file_path: str,
|
||||
cmdlet_name: str,
|
||||
title: Optional[str] = None,
|
||||
file_hash: Optional[str] = None,
|
||||
hash_value: Optional[str] = None,
|
||||
is_temp: bool = False,
|
||||
parent_hash: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
@@ -1165,7 +1106,7 @@ def create_pipe_object_result(
|
||||
file_path: Path to the file
|
||||
cmdlet_name: Name of the cmdlet that created this (e.g., 'download-data', 'screen-shot')
|
||||
title: Human-readable title
|
||||
file_hash: SHA-256 hash of file (for integrity)
|
||||
hash_value: SHA-256 hash of file (for integrity)
|
||||
is_temp: If True, this is a temporary/intermediate artifact
|
||||
parent_hash: Hash of the parent file in the chain (for provenance)
|
||||
tags: List of tags to apply
|
||||
@@ -1183,13 +1124,12 @@ def create_pipe_object_result(
|
||||
|
||||
if title:
|
||||
result['title'] = title
|
||||
if file_hash:
|
||||
result['file_hash'] = file_hash
|
||||
result['hash'] = file_hash
|
||||
if hash_value:
|
||||
result['hash'] = hash_value
|
||||
if is_temp:
|
||||
result['is_temp'] = True
|
||||
if parent_hash:
|
||||
result['parent_id'] = parent_hash # parent_id is the parent's file_hash
|
||||
result['parent_hash'] = parent_hash
|
||||
if tags:
|
||||
result['tags'] = tags
|
||||
|
||||
@@ -1219,17 +1159,17 @@ def mark_as_temp(pipe_object: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return pipe_object
|
||||
|
||||
|
||||
def set_parent_id(pipe_object: Dict[str, Any], parent_hash: str) -> Dict[str, Any]:
|
||||
"""Set the parent_id for provenance tracking.
|
||||
def set_parent_hash(pipe_object: Dict[str, Any], parent_hash: str) -> Dict[str, Any]:
|
||||
"""Set the parent_hash for provenance tracking.
|
||||
|
||||
Args:
|
||||
pipe_object: Result dict
|
||||
parent_hash: Parent file's hash
|
||||
|
||||
Returns:
|
||||
Modified dict with parent_id set to the hash
|
||||
Modified dict with parent_hash set to the hash
|
||||
"""
|
||||
pipe_object['parent_id'] = parent_hash
|
||||
pipe_object['parent_hash'] = parent_hash
|
||||
return pipe_object
|
||||
|
||||
|
||||
@@ -1254,13 +1194,13 @@ def get_pipe_object_hash(pipe_object: Any) -> Optional[str]:
|
||||
"""Extract file hash from PipeObject, dict, or pipeline-friendly object."""
|
||||
if pipe_object is None:
|
||||
return None
|
||||
for attr in ('file_hash', 'hash_hex', 'hash'):
|
||||
for attr in ('hash',):
|
||||
if hasattr(pipe_object, attr):
|
||||
value = getattr(pipe_object, attr)
|
||||
if value:
|
||||
return value
|
||||
if isinstance(pipe_object, dict):
|
||||
for key in ('file_hash', 'hash_hex', 'hash'):
|
||||
for key in ('hash',):
|
||||
value = pipe_object.get(key)
|
||||
if value:
|
||||
return value
|
||||
@@ -1522,13 +1462,12 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
"""
|
||||
# Debug: Print ResultItem details if coming from search_file.py
|
||||
try:
|
||||
from helper.logger import is_debug_enabled, debug
|
||||
from SYS.logger import is_debug_enabled, debug
|
||||
if is_debug_enabled() and hasattr(value, '__class__') and value.__class__.__name__ == 'ResultItem':
|
||||
debug("[ResultItem -> PipeObject conversion]")
|
||||
debug(f" origin={getattr(value, 'origin', None)}")
|
||||
debug(f" title={getattr(value, 'title', None)}")
|
||||
debug(f" target={getattr(value, 'target', None)}")
|
||||
debug(f" hash_hex={getattr(value, 'hash_hex', None)}")
|
||||
debug(f" hash={getattr(value, 'hash', None)}")
|
||||
debug(f" media_kind={getattr(value, 'media_kind', None)}")
|
||||
debug(f" tags={getattr(value, 'tags', None)}")
|
||||
debug(f" tag_summary={getattr(value, 'tag_summary', None)}")
|
||||
@@ -1554,14 +1493,11 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
|
||||
if isinstance(value, dict):
|
||||
# Extract hash and store (canonical identifiers)
|
||||
hash_val = value.get("hash") or value.get("file_hash")
|
||||
# Recognize multiple possible store naming conventions (store, origin, storage, storage_source)
|
||||
store_val = value.get("store") or value.get("origin") or value.get("storage") or value.get("storage_source") or "PATH"
|
||||
# If the store value is embedded under extra, also detect it
|
||||
if not store_val or store_val in ("local", "PATH"):
|
||||
extra_store = None
|
||||
hash_val = value.get("hash")
|
||||
store_val = value.get("store") or "PATH"
|
||||
if not store_val or store_val == "PATH":
|
||||
try:
|
||||
extra_store = value.get("extra", {}).get("store") or value.get("extra", {}).get("storage") or value.get("extra", {}).get("storage_source")
|
||||
extra_store = value.get("extra", {}).get("store")
|
||||
except Exception:
|
||||
extra_store = None
|
||||
if extra_store:
|
||||
@@ -1572,7 +1508,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
path_val = value.get("path")
|
||||
if path_val:
|
||||
try:
|
||||
from helper.utils import sha256_file
|
||||
from SYS.utils import sha256_file
|
||||
from pathlib import Path
|
||||
hash_val = sha256_file(Path(path_val))
|
||||
except Exception:
|
||||
@@ -1655,7 +1591,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
relationships=rels,
|
||||
is_temp=bool(value.get("is_temp", False)),
|
||||
action=value.get("action"),
|
||||
parent_hash=value.get("parent_hash") or value.get("parent_id"),
|
||||
parent_hash=value.get("parent_hash"),
|
||||
extra=extra,
|
||||
)
|
||||
|
||||
@@ -1671,7 +1607,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
|
||||
if path_val and path_val != "unknown":
|
||||
try:
|
||||
from helper.utils import sha256_file
|
||||
from SYS.utils import sha256_file
|
||||
from pathlib import Path
|
||||
path_obj = Path(path_val)
|
||||
hash_val = sha256_file(path_obj)
|
||||
@@ -1714,7 +1650,7 @@ def register_url_with_local_library(pipe_obj: models.PipeObject, config: Dict[st
|
||||
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from helper.folder_store import FolderDB
|
||||
from API.folder import API_folder_store
|
||||
|
||||
file_path = get_field(pipe_obj, "path")
|
||||
url_field = get_field(pipe_obj, "url", [])
|
||||
@@ -1735,7 +1671,7 @@ def register_url_with_local_library(pipe_obj: models.PipeObject, config: Dict[st
|
||||
if not storage_path:
|
||||
return False
|
||||
|
||||
with FolderDB(storage_path) as db:
|
||||
with API_folder_store(storage_path) as db:
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
if not file_hash:
|
||||
return False
|
||||
|
||||
@@ -7,26 +7,22 @@ import shutil
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from helper.logger import log, debug
|
||||
from helper.store import FileStorage
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from ._shared import (
|
||||
Cmdlet, CmdletArg, parse_cmdlet_args, SharedArgs,
|
||||
extract_tags_from_result, extract_title_from_result, extract_url_from_result,
|
||||
merge_sequences, extract_relationships, extract_duration, get_origin, coerce_to_pipe_object
|
||||
merge_sequences, extract_relationships, extract_duration, coerce_to_pipe_object
|
||||
)
|
||||
from ._shared import collapse_namespace_tags
|
||||
from helper.folder_store import read_sidecar, find_sidecar, write_sidecar, FolderDB
|
||||
from helper.utils import sha256_file, unique_path
|
||||
from API.folder import read_sidecar, find_sidecar, write_sidecar, API_folder_store
|
||||
from SYS.utils import sha256_file, unique_path
|
||||
from metadata import write_metadata
|
||||
|
||||
# Use official Hydrus supported filetypes from hydrus_wrapper
|
||||
SUPPORTED_MEDIA_EXTENSIONS = hydrus_wrapper.ALL_SUPPORTED_EXTENSIONS
|
||||
|
||||
# Initialize file storage system
|
||||
storage = FileStorage()
|
||||
|
||||
|
||||
class Add_File(Cmdlet):
|
||||
"""Add file into the DB"""
|
||||
|
||||
@@ -96,8 +92,11 @@ class Add_File(Cmdlet):
|
||||
media_path_or_url, file_hash = self._resolve_source(result, path_arg, pipe_obj, config)
|
||||
debug(f"[add-file] RESOLVED source: path={media_path_or_url}, hash={file_hash[:12] if file_hash else 'N/A'}...")
|
||||
if not media_path_or_url:
|
||||
debug(f"[add-file] ERROR: Could not resolve source file/URL")
|
||||
return 1
|
||||
debug(f"[add-file] ERROR: Could not resolve source file/URL")
|
||||
return 1
|
||||
|
||||
# Update pipe_obj with resolved path
|
||||
pipe_obj.path = str(media_path_or_url) if isinstance(media_path_or_url, (str, Path)) else str(media_path_or_url)
|
||||
|
||||
# Check if it's a URL before validating as file
|
||||
if isinstance(media_path_or_url, str) and media_path_or_url.lower().startswith(("http://", "https://", "magnet:", "torrent:")):
|
||||
@@ -116,15 +115,15 @@ class Add_File(Cmdlet):
|
||||
debug(f"[add-file] DECISION POINT: provider={provider_name}, location={location}")
|
||||
debug(f" media_path={media_path}, exists={media_path.exists()}")
|
||||
|
||||
# Execute transfer based on destination (using class-based FileStorage system)
|
||||
# Execute transfer based on destination (using Store registry)
|
||||
if provider_name:
|
||||
debug(f"[add-file] ROUTE: file provider upload")
|
||||
return self._handle_provider_upload(media_path, provider_name, pipe_obj, config, delete_after)
|
||||
elif location:
|
||||
# Check if location is a registered backend name using FileStorage
|
||||
# Check if location is a registered backend name
|
||||
try:
|
||||
storage = FileStorage(config)
|
||||
backends = storage.list_backends()
|
||||
store = Store(config)
|
||||
backends = store.list_backends()
|
||||
|
||||
if location in backends:
|
||||
debug(f"[add-file] ROUTE: storage backend '{location}'")
|
||||
@@ -165,15 +164,19 @@ class Add_File(Cmdlet):
|
||||
debug(f"[add-file] Using hash+store from result: hash={result_hash[:12]}..., store={result_store}")
|
||||
# Use get_file to retrieve from the specific store
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
if result_store in storage.list_backends():
|
||||
backend = storage[result_store]
|
||||
store = Store(config)
|
||||
if result_store in store.list_backends():
|
||||
backend = store[result_store]
|
||||
media_path = backend.get_file(result_hash)
|
||||
if media_path and media_path.exists():
|
||||
if isinstance(media_path, Path) and media_path.exists():
|
||||
pipe_obj.path = str(media_path)
|
||||
debug(f"[add-file] Retrieved file from {result_store}: {media_path}")
|
||||
return media_path, result_hash
|
||||
|
||||
if isinstance(media_path, str) and media_path.lower().startswith(("http://", "https://")):
|
||||
pipe_obj.path = media_path
|
||||
debug(f"[add-file] Retrieved URL from {result_store}: {media_path}")
|
||||
return media_path, result_hash
|
||||
except Exception as exc:
|
||||
debug(f"[add-file] Failed to retrieve via hash+store: {exc}")
|
||||
|
||||
@@ -385,20 +388,6 @@ class Add_File(Cmdlet):
|
||||
url = list(extract_url_from_result(result) or [])
|
||||
return url
|
||||
|
||||
@staticmethod
|
||||
def _get_origin(result: Any, pipe_obj: models.PipeObject) -> Optional[str]:
|
||||
try:
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
origin = get_origin(pipe_obj.extra)
|
||||
if origin:
|
||||
return origin
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(result, dict):
|
||||
return get_origin(result)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_relationships(result: Any, pipe_obj: models.PipeObject) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
@@ -427,16 +416,16 @@ class Add_File(Cmdlet):
|
||||
def _update_pipe_object_destination(
|
||||
pipe_obj: models.PipeObject,
|
||||
*,
|
||||
hash: str,
|
||||
hash_value: str,
|
||||
store: str,
|
||||
file_path: str,
|
||||
path: Optional[str],
|
||||
tags: List[str],
|
||||
title: Optional[str],
|
||||
extra_updates: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
pipe_obj.hash = hash
|
||||
pipe_obj.hash = hash_value
|
||||
pipe_obj.store = store
|
||||
pipe_obj.path = file_path
|
||||
pipe_obj.path = path
|
||||
pipe_obj.tags = tags
|
||||
if title:
|
||||
pipe_obj.title = title
|
||||
@@ -485,9 +474,9 @@ class Add_File(Cmdlet):
|
||||
if preferred_title:
|
||||
preferred_title = preferred_title.replace("_", " ").strip()
|
||||
|
||||
result_origin = Add_File._get_origin(result, pipe_obj)
|
||||
store = getattr(pipe_obj, "store", None)
|
||||
_, sidecar_hash, sidecar_tags, sidecar_url = Add_File._load_sidecar_bundle(
|
||||
media_path, result_origin, config
|
||||
media_path, store, config
|
||||
)
|
||||
|
||||
def normalize_title_tag(tag: str) -> str:
|
||||
@@ -589,7 +578,6 @@ class Add_File(Cmdlet):
|
||||
|
||||
# Update PipeObject and emit
|
||||
extra_updates = {
|
||||
"storage_source": "local",
|
||||
"url": url,
|
||||
"export_path": str(destination_root),
|
||||
}
|
||||
@@ -600,9 +588,9 @@ class Add_File(Cmdlet):
|
||||
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=f_hash or "unknown",
|
||||
hash_value=f_hash or "unknown",
|
||||
store="local",
|
||||
file_path=str(target_path),
|
||||
path=str(target_path),
|
||||
tags=tags,
|
||||
title=chosen_title,
|
||||
extra_updates=extra_updates,
|
||||
@@ -615,6 +603,78 @@ class Add_File(Cmdlet):
|
||||
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _download_soulseek_file(
|
||||
result: Any,
|
||||
config: Dict[str, Any]
|
||||
) -> Optional[Path]:
|
||||
"""
|
||||
Download a file from Soulseek peer.
|
||||
|
||||
Extracts username and filename from soulseek result metadata and initiates download.
|
||||
"""
|
||||
try:
|
||||
import asyncio
|
||||
from Provider.registry import download_soulseek_file
|
||||
from pathlib import Path
|
||||
|
||||
# Extract metadata from result
|
||||
full_metadata = {}
|
||||
if isinstance(result, dict):
|
||||
full_metadata = result.get("full_metadata", {})
|
||||
elif hasattr(result, "extra") and isinstance(result.extra, dict) and "full_metadata" in result.extra:
|
||||
full_metadata = result.extra.get("full_metadata", {})
|
||||
elif hasattr(result, "full_metadata"):
|
||||
# Direct attribute access (fallback)
|
||||
val = getattr(result, "full_metadata", {})
|
||||
if isinstance(val, dict):
|
||||
full_metadata = val
|
||||
|
||||
username = full_metadata.get("username")
|
||||
filename = full_metadata.get("filename")
|
||||
|
||||
if not username or not filename:
|
||||
debug(f"[add-file] ERROR: Could not extract soulseek metadata from result (type={type(result).__name__})")
|
||||
if hasattr(result, "extra"):
|
||||
debug(f"[add-file] Result extra keys: {list(result.extra.keys())}")
|
||||
return None
|
||||
|
||||
if not username or not filename:
|
||||
debug(f"[add-file] ERROR: Missing soulseek metadata (username={username}, filename={filename})")
|
||||
return None
|
||||
|
||||
debug(f"[add-file] Starting soulseek download: {username} -> {filename}")
|
||||
|
||||
# Determine output directory (prefer downloads folder in config)
|
||||
output_dir = Path(config.get("output_dir", "./downloads")) if isinstance(config.get("output_dir"), str) else Path("./downloads")
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Run async download in event loop
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_closed():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
downloaded_path = loop.run_until_complete(
|
||||
download_soulseek_file(
|
||||
username=username,
|
||||
filename=filename,
|
||||
output_dir=output_dir,
|
||||
timeout=1200 # 20 minutes
|
||||
)
|
||||
)
|
||||
|
||||
return downloaded_path
|
||||
|
||||
except Exception as e:
|
||||
log(f"[add-file] Soulseek download error: {type(e).__name__}: {e}", file=sys.stderr)
|
||||
debug(f"[add-file] Soulseek download traceback: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _handle_provider_upload(
|
||||
media_path: Path,
|
||||
@@ -624,7 +684,7 @@ class Add_File(Cmdlet):
|
||||
delete_after: bool,
|
||||
) -> int:
|
||||
"""Handle uploading to a file provider (e.g. 0x0)."""
|
||||
from helper.provider import get_file_provider
|
||||
from Provider.registry import get_file_provider
|
||||
|
||||
log(f"Uploading via {provider_name}: {media_path.name}", file=sys.stderr)
|
||||
|
||||
@@ -666,9 +726,9 @@ class Add_File(Cmdlet):
|
||||
file_path = pipe_obj.path or (str(media_path) if media_path else None) or ""
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=f_hash or "unknown",
|
||||
hash_value=f_hash or "unknown",
|
||||
store=provider_name or "provider",
|
||||
file_path=file_path,
|
||||
path=file_path,
|
||||
tags=pipe_obj.tags,
|
||||
title=pipe_obj.title or (media_path.name if media_path else None),
|
||||
extra_updates=extra_updates,
|
||||
@@ -687,14 +747,11 @@ class Add_File(Cmdlet):
|
||||
delete_after: bool,
|
||||
) -> int:
|
||||
"""Handle uploading to a registered storage backend (e.g., 'test' folder store, 'hydrus', etc.)."""
|
||||
from config import load_config
|
||||
|
||||
log(f"Adding file to storage backend '{backend_name}': {media_path.name}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
cfg = load_config()
|
||||
storage = FileStorage(cfg)
|
||||
backend = storage[backend_name]
|
||||
store = Store(config)
|
||||
backend = store[backend_name]
|
||||
|
||||
# Prepare metadata from pipe_obj and sidecars
|
||||
tags, url, title, f_hash = Add_File._prepare_metadata(None, media_path, pipe_obj, config)
|
||||
@@ -708,24 +765,26 @@ class Add_File(Cmdlet):
|
||||
url=url
|
||||
)
|
||||
log(f"✓ File added to '{backend_name}': {file_identifier}", file=sys.stderr)
|
||||
|
||||
# Update pipe object with result
|
||||
# For backends that return paths, file_path = identifier
|
||||
# For backends that return hashes, file_path = "backend:hash"
|
||||
file_path_str = str(file_identifier)
|
||||
if len(file_identifier) == 64 and all(c in '0123456789abcdef' for c in file_identifier.lower()):
|
||||
# It's a hash - use backend:hash format
|
||||
file_path_str = f"{backend_name}:{file_identifier}"
|
||||
|
||||
stored_path: Optional[str] = None
|
||||
try:
|
||||
maybe_path = backend.get_file(file_identifier)
|
||||
if isinstance(maybe_path, Path):
|
||||
stored_path = str(maybe_path)
|
||||
elif isinstance(maybe_path, str) and maybe_path:
|
||||
# Some backends may return a browser URL
|
||||
stored_path = maybe_path
|
||||
except Exception:
|
||||
stored_path = None
|
||||
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=file_identifier if len(file_identifier) == 64 else f_hash or "unknown",
|
||||
hash_value=file_identifier if len(file_identifier) == 64 else f_hash or "unknown",
|
||||
store=backend_name,
|
||||
file_path=file_path_str,
|
||||
path=stored_path,
|
||||
tags=tags,
|
||||
title=title or pipe_obj.title or media_path.name,
|
||||
extra_updates={
|
||||
"storage_source": backend_name,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
@@ -745,16 +804,16 @@ class Add_File(Cmdlet):
|
||||
@staticmethod
|
||||
def _load_sidecar_bundle(
|
||||
media_path: Path,
|
||||
origin: Optional[str],
|
||||
store: Optional[str],
|
||||
config: Dict[str, Any],
|
||||
) -> Tuple[Optional[Path], Optional[str], List[str], List[str]]:
|
||||
"""Load sidecar metadata."""
|
||||
if origin and origin.lower() == "local":
|
||||
if store and store.lower() == "local":
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
db_root = get_local_storage_path(config)
|
||||
if db_root:
|
||||
with FolderDB(Path(db_root)) as db:
|
||||
with API_folder_store(Path(db_root)) as db:
|
||||
file_hash = db.get_file_hash(media_path)
|
||||
if file_hash:
|
||||
tags = db.get_tags(file_hash) or []
|
||||
@@ -837,7 +896,7 @@ class Add_File(Cmdlet):
|
||||
except OSError:
|
||||
payload['size'] = None
|
||||
|
||||
with FolderDB(library_root) as db:
|
||||
with API_folder_store(library_root) as db:
|
||||
try:
|
||||
db.save_file_info(dest_path, payload, tags)
|
||||
except Exception as exc:
|
||||
|
||||
@@ -6,9 +6,9 @@ import json
|
||||
from . import register
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, should_show_help
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="add-note",
|
||||
|
||||
@@ -8,14 +8,14 @@ import re
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
from . import register
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args, normalize_result_input, should_show_help, get_field
|
||||
from helper.folder_store import read_sidecar, find_sidecar
|
||||
from API.folder import read_sidecar, find_sidecar
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
@@ -228,7 +228,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
items_to_process = [{"file_path": arg_path}]
|
||||
|
||||
# Import local storage utilities
|
||||
from helper.folder_store import LocalLibrarySearchOptimizer
|
||||
from API.folder import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path
|
||||
|
||||
local_storage_path = get_local_storage_path(config) if config else None
|
||||
|
||||
@@ -4,13 +4,13 @@ from typing import Any, Dict, List, Sequence, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from ._shared import normalize_result_input, filter_results_by_temp
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from helper.folder_store import write_sidecar, FolderDB
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from API.folder import write_sidecar, API_folder_store
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args, collapse_namespace_tags, should_show_help, get_field
|
||||
from config import get_local_storage_path
|
||||
|
||||
@@ -91,17 +91,16 @@ class Add_Tag(Cmdlet):
|
||||
res["columns"] = updated
|
||||
|
||||
@staticmethod
|
||||
def _matches_target(item: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> bool:
|
||||
def _matches_target(item: Any, file_hash: Optional[str], path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target."""
|
||||
hydrus_hash_l = hydrus_hash.lower() if hydrus_hash else None
|
||||
file_hash_l = file_hash.lower() if file_hash else None
|
||||
file_path_l = file_path.lower() if file_path else None
|
||||
path_l = path.lower() if path else None
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
hash_fields = ["hydrus_hash", "hash", "hash_hex", "file_hash"]
|
||||
path_fields = ["path", "file_path", "target"]
|
||||
hash_fields = ["hash"]
|
||||
path_fields = ["path", "target"]
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [norm(item.get(field)) for field in hash_fields]
|
||||
@@ -110,11 +109,9 @@ class Add_Tag(Cmdlet):
|
||||
hashes = [norm(get_field(item, field)) for field in hash_fields]
|
||||
paths = [norm(get_field(item, field)) for field in path_fields]
|
||||
|
||||
if hydrus_hash_l and hydrus_hash_l in hashes:
|
||||
return True
|
||||
if file_hash_l and file_hash_l in hashes:
|
||||
return True
|
||||
if file_path_l and file_path_l in paths:
|
||||
if path_l and path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -146,7 +143,7 @@ class Add_Tag(Cmdlet):
|
||||
if changed:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
def _refresh_result_table_title(self, new_title: str, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> None:
|
||||
def _refresh_result_table_title(self, new_title: str, file_hash: Optional[str], path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
@@ -158,7 +155,7 @@ class Add_Tag(Cmdlet):
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if self._matches_target(item, hydrus_hash, file_hash, file_path):
|
||||
if self._matches_target(item, file_hash, path):
|
||||
self._update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
@@ -178,21 +175,21 @@ class Add_Tag(Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _refresh_tags_view(self, res: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
def _refresh_tags_view(self, res: Any, file_hash: Optional[str], path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
target_hash = hydrus_hash or file_hash
|
||||
target_hash = file_hash
|
||||
refresh_args: List[str] = []
|
||||
if target_hash:
|
||||
refresh_args = ["-hash", target_hash, "-store", target_hash]
|
||||
refresh_args = ["-hash", target_hash]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and self._matches_target(subject, hydrus_hash, file_hash, file_path):
|
||||
if subject and self._matches_target(subject, file_hash, path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
@@ -217,7 +214,7 @@ class Add_Tag(Cmdlet):
|
||||
|
||||
# Get explicit -hash and -store overrides from CLI
|
||||
hash_override = normalize_hash(parsed.get("hash"))
|
||||
store_override = parsed.get("store") or parsed.get("storage")
|
||||
store_override = parsed.get("store")
|
||||
|
||||
# Normalize input to list
|
||||
results = normalize_result_input(result)
|
||||
@@ -327,7 +324,7 @@ class Add_Tag(Cmdlet):
|
||||
file_path = get_field(res, "path")
|
||||
# Try tags from top-level 'tags' or from 'extra.tags'
|
||||
tags = get_field(res, "tags") or (get_field(res, "extra") or {}).get("tags", [])
|
||||
file_hash = get_field(res, "hash") or get_field(res, "file_hash") or get_field(res, "hash_hex") or ""
|
||||
file_hash = get_field(res, "hash") or ""
|
||||
if not file_path:
|
||||
log(f"[add_tag] Warning: Result has no path, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
@@ -362,16 +359,8 @@ class Add_Tag(Cmdlet):
|
||||
existing_tags = get_field(res, "tags") or []
|
||||
if not existing_tags:
|
||||
existing_tags = (get_field(res, "extra", {}) or {}).get("tags") or []
|
||||
file_hash = get_field(res, "hash") or get_field(res, "file_hash") or get_field(res, "hash_hex") or ""
|
||||
storage_source = get_field(res, "store") or get_field(res, "storage") or get_field(res, "storage_source") or get_field(res, "origin")
|
||||
hydrus_hash = get_field(res, "hydrus_hash") or file_hash
|
||||
|
||||
# Infer storage source from result if not found
|
||||
if not storage_source:
|
||||
if file_path:
|
||||
storage_source = 'local'
|
||||
elif file_hash and file_hash != "unknown":
|
||||
storage_source = 'hydrus'
|
||||
file_hash = get_field(res, "hash") or ""
|
||||
store_name = store_override or get_field(res, "store")
|
||||
|
||||
original_tags_lower = {str(t).lower() for t in existing_tags if isinstance(t, str)}
|
||||
original_title = self._extract_title_tag(list(existing_tags))
|
||||
@@ -379,8 +368,10 @@ class Add_Tag(Cmdlet):
|
||||
# Apply CLI overrides if provided
|
||||
if hash_override and not file_hash:
|
||||
file_hash = hash_override
|
||||
if store_override and not storage_source:
|
||||
storage_source = store_override
|
||||
if not store_name:
|
||||
log("[add_tag] Missing store (use -store or pipe a result with store)", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# Check if we have sufficient identifier (file_path OR file_hash)
|
||||
if not file_path and not file_hash:
|
||||
@@ -418,146 +409,78 @@ class Add_Tag(Cmdlet):
|
||||
new_tags_added = []
|
||||
final_tags = list(existing_tags) if existing_tags else []
|
||||
|
||||
# Determine where to add tags: Hydrus or Folder storage
|
||||
if storage_source and storage_source.lower() == 'hydrus':
|
||||
# Add tags to Hydrus using the API
|
||||
target_hash = file_hash
|
||||
if target_hash:
|
||||
try:
|
||||
hydrus_client = hydrus_wrapper.get_client(config)
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
|
||||
# For namespaced tags, remove old tags in same namespace
|
||||
removed_tags = []
|
||||
for new_tag in tags_to_add:
|
||||
if ':' in new_tag:
|
||||
namespace = new_tag.split(':', 1)[0]
|
||||
to_remove = [t for t in existing_tags if t.startswith(namespace + ':') and t.lower() != new_tag.lower()]
|
||||
removed_tags.extend(to_remove)
|
||||
|
||||
# Add new tags
|
||||
if tags_to_add:
|
||||
log(f"[add_tag] Adding {len(tags_to_add)} tag(s) to Hydrus file: {target_hash}", file=sys.stderr)
|
||||
hydrus_client.add_tags(target_hash, tags_to_add, service_name)
|
||||
|
||||
# Delete replaced namespace tags
|
||||
if removed_tags:
|
||||
unique_removed = sorted(set(removed_tags))
|
||||
hydrus_client.delete_tags(target_hash, unique_removed, service_name)
|
||||
|
||||
if tags_to_add or removed_tags:
|
||||
total_new_tags += len(tags_to_add)
|
||||
total_modified += 1
|
||||
log(f"[add_tag] ✓ Added {len(tags_to_add)} tag(s) to Hydrus", file=sys.stderr)
|
||||
# Refresh final tag list from the backend for accurate display
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
if storage and storage_source in storage.list_backends():
|
||||
backend = storage[storage_source]
|
||||
refreshed_tags, _ = backend.get_tag(target_hash)
|
||||
if refreshed_tags is not None:
|
||||
final_tags = refreshed_tags
|
||||
new_tags_added = [t for t in refreshed_tags if t.lower() not in original_tags_lower]
|
||||
# Update result tags for downstream cmdlets/UI
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = refreshed_tags
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = refreshed_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = refreshed_tags
|
||||
except Exception:
|
||||
# Ignore failures - this is best-effort for refreshing tag state
|
||||
pass
|
||||
except Exception as e:
|
||||
log(f"[add_tag] Warning: Failed to add tags to Hydrus: {e}", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tag] Warning: No hash available for Hydrus file, skipping", file=sys.stderr)
|
||||
elif storage_source:
|
||||
# For any Folder-based storage (local, test, default, etc.), delegate to backend
|
||||
# If storage_source is not a registered backend, fallback to writing a sidecar
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
# Resolve hash from path if needed
|
||||
if not file_hash and file_path:
|
||||
try:
|
||||
if storage and storage_source in storage.list_backends():
|
||||
backend = storage[storage_source]
|
||||
if file_hash and backend.add_tag(file_hash, tags_to_add):
|
||||
# Refresh tags from backend to get merged result
|
||||
refreshed_tags, _ = backend.get_tag(file_hash)
|
||||
if refreshed_tags:
|
||||
# Update result tags
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = refreshed_tags
|
||||
# Also keep as extra for compatibility
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = refreshed_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = refreshed_tags
|
||||
from SYS.utils import sha256_file
|
||||
file_hash = sha256_file(Path(file_path))
|
||||
except Exception:
|
||||
file_hash = ""
|
||||
|
||||
# Update title if changed
|
||||
title_value = self._extract_title_tag(refreshed_tags)
|
||||
self._apply_title_to_result(res, title_value)
|
||||
|
||||
# Compute stats
|
||||
new_tags_added = [t for t in refreshed_tags if t.lower() not in original_tags_lower]
|
||||
total_new_tags += len(new_tags_added)
|
||||
if new_tags_added:
|
||||
total_modified += 1
|
||||
|
||||
log(f"[add_tag] Added {len(new_tags_added)} new tag(s); {len(refreshed_tags)} total tag(s) stored in {storage_source}", file=sys.stderr)
|
||||
final_tags = refreshed_tags
|
||||
else:
|
||||
log(f"[add_tag] Warning: Failed to add tags to {storage_source}", file=sys.stderr)
|
||||
else:
|
||||
# Not a registered backend - fallback to sidecar if we have a path
|
||||
if file_path:
|
||||
try:
|
||||
sidecar_path = write_sidecar(Path(file_path), tags_to_add, [], file_hash)
|
||||
log(f"[add_tag] Wrote {len(tags_to_add)} tag(s) to sidecar: {sidecar_path}", file=sys.stderr)
|
||||
total_new_tags += len(tags_to_add)
|
||||
total_modified += 1
|
||||
# Update res tags
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = (res.tags or []) + tags_to_add
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = res.tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = list(set((res.get('tags') or []) + tags_to_add))
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Warning: Failed to write sidecar for {file_path}: {exc}", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tag] Warning: Storage backend '{storage_source}' not found in config", file=sys.stderr)
|
||||
except KeyError:
|
||||
# storage[storage_source] raised KeyError - treat as absent backend
|
||||
if file_path:
|
||||
try:
|
||||
sidecar_path = write_sidecar(Path(file_path), tags_to_add, [], file_hash)
|
||||
log(f"[add_tag] Wrote {len(tags_to_add)} tag(s) to sidecar: {sidecar_path}", file=sys.stderr)
|
||||
total_new_tags += len(tags_to_add)
|
||||
total_modified += 1
|
||||
# Update res tags for downstream
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = (res.tags or []) + tags_to_add
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = res.tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = list(set((res.get('tags') or []) + tags_to_add))
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Warning: Failed to write sidecar for {file_path}: {exc}", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tag] Warning: Storage backend '{storage_source}' not found in config", file=sys.stderr)
|
||||
else:
|
||||
# For other storage types or unknown sources, avoid writing sidecars to reduce clutter
|
||||
# (local/hydrus are handled above).
|
||||
if not file_hash:
|
||||
log("[add_tag] Warning: No hash available, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# Route tag updates through the configured store backend
|
||||
try:
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
# For namespaced tags, compute old tags in same namespace to remove
|
||||
removed_tags = []
|
||||
for new_tag in tags_to_add:
|
||||
if ':' in new_tag:
|
||||
namespace = new_tag.split(':', 1)[0]
|
||||
to_remove = [t for t in existing_tags if t.startswith(namespace + ':') and t.lower() != new_tag.lower()]
|
||||
removed_tags.extend(to_remove)
|
||||
|
||||
ok = backend.add_tag(file_hash, tags_to_add, config=config)
|
||||
if removed_tags:
|
||||
unique_removed = sorted(set(removed_tags))
|
||||
backend.delete_tag(file_hash, unique_removed, config=config)
|
||||
|
||||
if not ok:
|
||||
log(f"[add_tag] Warning: Failed to add tags via store '{store_name}'", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
refreshed_tags, _ = backend.get_tag(file_hash, config=config)
|
||||
refreshed_tags = list(refreshed_tags or [])
|
||||
final_tags = refreshed_tags
|
||||
new_tags_added = [t for t in refreshed_tags if t.lower() not in original_tags_lower]
|
||||
|
||||
# Update result tags for downstream cmdlets/UI
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = refreshed_tags
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = refreshed_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = refreshed_tags
|
||||
|
||||
# Update title if changed
|
||||
title_value = self._extract_title_tag(refreshed_tags)
|
||||
self._apply_title_to_result(res, title_value)
|
||||
|
||||
total_new_tags += len(new_tags_added)
|
||||
if new_tags_added:
|
||||
total_modified += 1
|
||||
except KeyError:
|
||||
log(f"[add_tag] Store '{store_name}' not configured", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
except Exception as e:
|
||||
log(f"[add_tag] Warning: Backend error for store '{store_name}': {e}", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# If title changed, refresh the cached result table so the display reflects the new name
|
||||
final_title = self._extract_title_tag(final_tags)
|
||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
||||
self._refresh_result_table_title(final_title, hydrus_hash or file_hash, file_hash, file_path)
|
||||
# If tags changed, refresh tag view via get-tag (prefer current subject; fall back to hash refresh)
|
||||
self._refresh_result_table_title(final_title, file_hash, file_path)
|
||||
# If tags changed, refresh tag view via get-tag
|
||||
if new_tags_added or removed_tags:
|
||||
self._refresh_tags_view(res, hydrus_hash, file_hash, file_path, config)
|
||||
self._refresh_tags_view(res, file_hash, file_path, config)
|
||||
# Emit the modified result
|
||||
ctx.emit(res)
|
||||
log(f"[add_tag] Added {total_new_tags} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)", file=sys.stderr)
|
||||
|
||||
@@ -4,16 +4,25 @@ from typing import Any, Dict, List, Sequence, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from ._shared import normalize_result_input, filter_results_by_temp
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from helper.folder_store import read_sidecar, write_sidecar, find_sidecar, has_sidecar, FolderDB
|
||||
from metadata import rename
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args, collapse_namespace_tags, should_show_help, get_field
|
||||
from config import get_local_storage_path
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
normalize_hash,
|
||||
parse_tag_arguments,
|
||||
expand_tag_groups,
|
||||
parse_cmdlet_args,
|
||||
collapse_namespace_tags,
|
||||
should_show_help,
|
||||
get_field,
|
||||
)
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
@@ -57,31 +66,25 @@ def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
res["columns"] = updated
|
||||
|
||||
|
||||
def _matches_target(item: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target."""
|
||||
hydrus_hash_l = hydrus_hash.lower() if hydrus_hash else None
|
||||
file_hash_l = file_hash.lower() if file_hash else None
|
||||
file_path_l = file_path.lower() if file_path else None
|
||||
def _matches_target(item: Any, target_hash: Optional[str], target_path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target (canonical fields only)."""
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
# Define field names to check for hashes and paths
|
||||
hash_fields = ["hydrus_hash", "hash", "hash_hex", "file_hash"]
|
||||
path_fields = ["path", "file_path", "target"]
|
||||
target_hash_l = target_hash.lower() if target_hash else None
|
||||
target_path_l = target_path.lower() if target_path else None
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [norm(item.get(field)) for field in hash_fields]
|
||||
paths = [norm(item.get(field)) for field in path_fields]
|
||||
hashes = [norm(item.get("hash"))]
|
||||
paths = [norm(item.get("path"))]
|
||||
else:
|
||||
hashes = [norm(get_field(item, field)) for field in hash_fields]
|
||||
paths = [norm(get_field(item, field)) for field in path_fields]
|
||||
hashes = [norm(get_field(item, "hash"))]
|
||||
paths = [norm(get_field(item, "path"))]
|
||||
|
||||
if hydrus_hash_l and hydrus_hash_l in hashes:
|
||||
if target_hash_l and target_hash_l in hashes:
|
||||
return True
|
||||
if file_hash_l and file_hash_l in hashes:
|
||||
return True
|
||||
if file_path_l and file_path_l in paths:
|
||||
if target_path_l and target_path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -114,7 +117,7 @@ def _update_item_title_fields(item: Any, new_title: str) -> None:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
|
||||
def _refresh_result_table_title(new_title: str, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str]) -> None:
|
||||
def _refresh_result_table_title(new_title: str, target_hash: Optional[str], target_path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
@@ -126,7 +129,7 @@ def _refresh_result_table_title(new_title: str, hydrus_hash: Optional[str], file
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if _matches_target(item, hydrus_hash, file_hash, file_path):
|
||||
if _matches_target(item, target_hash, target_path):
|
||||
_update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
@@ -149,31 +152,30 @@ def _refresh_result_table_title(new_title: str, hydrus_hash: Optional[str], file
|
||||
pass
|
||||
|
||||
|
||||
def _refresh_tags_view(res: Any, hydrus_hash: Optional[str], file_hash: Optional[str], file_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
def _refresh_tags_view(res: Any, target_hash: Optional[str], store_name: Optional[str], target_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
target_hash = hydrus_hash or file_hash
|
||||
refresh_args: List[str] = []
|
||||
if target_hash:
|
||||
refresh_args = ["-hash", target_hash, "-store", target_hash]
|
||||
if not target_hash or not store_name:
|
||||
return
|
||||
|
||||
refresh_args: List[str] = ["-hash", target_hash, "-store", store_name]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and _matches_target(subject, hydrus_hash, file_hash, file_path):
|
||||
if subject and _matches_target(subject, target_hash, target_path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if target_hash:
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -183,10 +185,11 @@ class Add_Tag(Cmdlet):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="add-tags",
|
||||
summary="Add tags to a Hydrus file or write them to a local .tags sidecar.",
|
||||
usage="add-tags [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
||||
summary="Add tags to a file in a store.",
|
||||
usage="add-tags -store <store> [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
SharedArgs.STORE,
|
||||
CmdletArg("-duplicate", type="string", description="Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)"),
|
||||
CmdletArg("-list", type="string", description="Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult)."),
|
||||
CmdletArg("--all", type="flag", description="Include temporary files in tagging (by default, only tags non-temporary files)."),
|
||||
@@ -194,11 +197,11 @@ class Add_Tag(Cmdlet):
|
||||
],
|
||||
detail=[
|
||||
"- By default, only tags non-temporary files (from pipelines). Use --all to tag everything.",
|
||||
"- Without -hash and when the selection is a local file, tags are written to <file>.tags.",
|
||||
"- With a Hydrus hash, tags are sent to the 'my tags' service.",
|
||||
"- Requires a store backend: use -store or pipe items that include store.",
|
||||
"- If -hash is not provided, uses the piped item's hash (or derives from its path when possible).",
|
||||
"- Multiple tags can be comma-separated or space-separated.",
|
||||
"- Use -list to include predefined tag lists from adjective.json: -list philosophy,occult",
|
||||
"- Tags can also reference lists with curly braces: add-tag {philosophy} \"other:tag\"",
|
||||
"- Tags can also reference lists with curly braces: add-tags {philosophy} \"other:tag\"",
|
||||
"- Use -duplicate to copy EXISTING tag values to new namespaces:",
|
||||
" Explicit format: -duplicate title:album,artist (copies title: to album: and artist:)",
|
||||
" Inferred format: -duplicate title,album,artist (first is source, rest are targets)",
|
||||
@@ -245,10 +248,8 @@ class Add_Tag(Cmdlet):
|
||||
|
||||
# Try multiple tag lookup strategies in order
|
||||
tag_lookups = [
|
||||
lambda x: x.extra.get("tags") if isinstance(x, models.PipeObject) and isinstance(x.extra, dict) else None,
|
||||
lambda x: x.get("tags") if isinstance(x, dict) else None,
|
||||
lambda x: x.get("extra", {}).get("tags") if isinstance(x, dict) and isinstance(x.get("extra"), dict) else None,
|
||||
lambda x: getattr(x, "tags", None),
|
||||
lambda x: x.get("tags") if isinstance(x, dict) else None,
|
||||
]
|
||||
|
||||
for lookup in tag_lookups:
|
||||
@@ -297,288 +298,158 @@ class Add_Tag(Cmdlet):
|
||||
hash_override = normalize_hash(parsed.get("hash")) or extracted_hash
|
||||
duplicate_arg = parsed.get("duplicate")
|
||||
|
||||
# If no tags provided (and no list), write sidecar files with embedded tags
|
||||
# Note: Since 'tags' is required=False in the cmdlet arg, this block can be reached via CLI
|
||||
# when no tag arguments are provided.
|
||||
if not tags_to_add and not duplicate_arg:
|
||||
# Write sidecar files with the tags that are already in the result dicts
|
||||
sidecar_count = 0
|
||||
for res in results:
|
||||
# Handle both dict and PipeObject formats
|
||||
file_path = None
|
||||
tags = []
|
||||
file_hash = ""
|
||||
|
||||
if isinstance(res, models.PipeObject):
|
||||
file_path = res.file_path
|
||||
tags = res.extra.get('tags', [])
|
||||
file_hash = res.hash or ""
|
||||
elif isinstance(res, dict):
|
||||
file_path = res.get('file_path')
|
||||
# Try multiple tag locations in order
|
||||
tag_sources = [lambda: res.get('tags', []), lambda: res.get('extra', {}).get('tags', [])]
|
||||
for source in tag_sources:
|
||||
tags = source()
|
||||
if tags:
|
||||
break
|
||||
file_hash = res.get('hash', "")
|
||||
|
||||
if not file_path:
|
||||
log(f"[add_tags] Warning: Result has no file_path, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
if tags:
|
||||
# Write sidecar file for this file with its tags
|
||||
try:
|
||||
sidecar_path = write_sidecar(Path(file_path), tags, [], file_hash)
|
||||
log(f"[add_tags] Wrote {len(tags)} tag(s) to sidecar: {sidecar_path}", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to write sidecar for {file_path}: {e}", file=sys.stderr)
|
||||
|
||||
ctx.emit(res)
|
||||
|
||||
if sidecar_count > 0:
|
||||
log(f"[add_tags] Wrote {sidecar_count} sidecar file(s) with embedded tags", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tags] No tags to write - passed {len(results)} result(s) through unchanged", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
# Tags ARE provided - append them to each result and write sidecar files or add to Hydrus
|
||||
sidecar_count = 0
|
||||
total_new_tags = 0
|
||||
# Tags ARE provided - apply them to each store-backed result
|
||||
total_added = 0
|
||||
total_modified = 0
|
||||
for res in results:
|
||||
# Handle both dict and PipeObject formats
|
||||
file_path = None
|
||||
existing_tags = []
|
||||
file_hash = ""
|
||||
storage_source = None
|
||||
hydrus_hash = None
|
||||
|
||||
# Define field name aliases to check
|
||||
path_field_names = ['file_path', 'path']
|
||||
source_field_names = ['storage_source', 'source', 'origin']
|
||||
hash_field_names = ['hydrus_hash', 'hash', 'hash_hex']
|
||||
store_override = parsed.get("store")
|
||||
|
||||
for res in results:
|
||||
store_name: Optional[str]
|
||||
raw_hash: Optional[str]
|
||||
raw_path: Optional[str]
|
||||
|
||||
if isinstance(res, models.PipeObject):
|
||||
file_path = res.file_path
|
||||
existing_tags = res.extra.get('tags', [])
|
||||
file_hash = res.file_hash or ""
|
||||
for field in source_field_names:
|
||||
storage_source = res.extra.get(field)
|
||||
if storage_source:
|
||||
break
|
||||
hydrus_hash = res.extra.get('hydrus_hash')
|
||||
store_name = store_override or res.store
|
||||
raw_hash = res.hash
|
||||
raw_path = res.path
|
||||
elif isinstance(res, dict):
|
||||
# Try path field names in order
|
||||
for field in path_field_names:
|
||||
file_path = res.get(field)
|
||||
if file_path:
|
||||
break
|
||||
|
||||
# Try tag locations in order
|
||||
tag_sources = [lambda: res.get('tags', []), lambda: res.get('extra', {}).get('tags', [])]
|
||||
for source in tag_sources:
|
||||
existing_tags = source()
|
||||
if existing_tags:
|
||||
break
|
||||
|
||||
file_hash = res.get('file_hash', "")
|
||||
|
||||
# Try source field names in order (top-level then extra)
|
||||
for field in source_field_names:
|
||||
storage_source = res.get(field)
|
||||
if storage_source:
|
||||
break
|
||||
if not storage_source and 'extra' in res:
|
||||
for field in source_field_names:
|
||||
storage_source = res.get('extra', {}).get(field)
|
||||
if storage_source:
|
||||
break
|
||||
|
||||
# Try hash field names in order (top-level then extra)
|
||||
for field in hash_field_names:
|
||||
hydrus_hash = res.get(field)
|
||||
if hydrus_hash:
|
||||
break
|
||||
if not hydrus_hash and 'extra' in res:
|
||||
for field in hash_field_names:
|
||||
hydrus_hash = res.get('extra', {}).get(field)
|
||||
if hydrus_hash:
|
||||
break
|
||||
|
||||
if not hydrus_hash and file_hash:
|
||||
hydrus_hash = file_hash
|
||||
if not storage_source and hydrus_hash and not file_path:
|
||||
storage_source = 'hydrus'
|
||||
# If we have a file path but no storage source, assume local to avoid sidecar spam
|
||||
if not storage_source and file_path:
|
||||
storage_source = 'local'
|
||||
store_name = store_override or res.get("store")
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
else:
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
original_tags_lower = {str(t).lower() for t in existing_tags if isinstance(t, str)}
|
||||
original_tags_snapshot = list(existing_tags)
|
||||
original_title = _extract_title_tag(original_tags_snapshot)
|
||||
removed_tags: List[str] = []
|
||||
if not store_name:
|
||||
log("[add_tags] Error: Missing -store and item has no store field", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Apply hash override if provided
|
||||
if hash_override:
|
||||
hydrus_hash = hash_override
|
||||
# If we have a hash override, we treat it as a Hydrus target
|
||||
storage_source = "hydrus"
|
||||
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
|
||||
if not resolved_hash and raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
resolved_hash = stem.lower()
|
||||
elif p.exists() and p.is_file():
|
||||
resolved_hash = sha256_file(p)
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not file_path and not hydrus_hash:
|
||||
log(f"[add_tags] Warning: Result has neither file_path nor hash available, skipping", file=sys.stderr)
|
||||
if not resolved_hash:
|
||||
log("[add_tags] Warning: Item missing usable hash (and could not derive from path); skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
try:
|
||||
backend = Store(config)[str(store_name)]
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
existing_tags, _src = backend.get_tag(resolved_hash, config=config)
|
||||
except Exception:
|
||||
existing_tags = []
|
||||
|
||||
existing_tags_list = [t for t in (existing_tags or []) if isinstance(t, str)]
|
||||
existing_lower = {t.lower() for t in existing_tags_list}
|
||||
original_title = _extract_title_tag(existing_tags_list)
|
||||
|
||||
# Per-item tag list (do not mutate shared list)
|
||||
item_tags_to_add = list(tags_to_add)
|
||||
item_tags_to_add = collapse_namespace_tags(item_tags_to_add, "title", prefer="last")
|
||||
|
||||
# Handle -duplicate logic (copy existing tags to new namespaces)
|
||||
if duplicate_arg:
|
||||
# Parse duplicate format: source:target1,target2 or source,target1,target2
|
||||
parts = duplicate_arg.split(':')
|
||||
parts = str(duplicate_arg).split(':')
|
||||
source_ns = ""
|
||||
targets = []
|
||||
targets: list[str] = []
|
||||
|
||||
if len(parts) > 1:
|
||||
# Explicit format: source:target1,target2
|
||||
source_ns = parts[0]
|
||||
targets = parts[1].split(',')
|
||||
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
|
||||
else:
|
||||
# Inferred format: source,target1,target2
|
||||
parts = duplicate_arg.split(',')
|
||||
if len(parts) > 1:
|
||||
source_ns = parts[0]
|
||||
targets = parts[1:]
|
||||
parts2 = str(duplicate_arg).split(',')
|
||||
if len(parts2) > 1:
|
||||
source_ns = parts2[0]
|
||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
||||
|
||||
if source_ns and targets:
|
||||
# Find tags in source namespace
|
||||
source_tags = [t for t in existing_tags if t.startswith(source_ns + ':')]
|
||||
for t in source_tags:
|
||||
value = t.split(':', 1)[1]
|
||||
source_prefix = source_ns.lower() + ":"
|
||||
for t in existing_tags_list:
|
||||
if not t.lower().startswith(source_prefix):
|
||||
continue
|
||||
value = t.split(":", 1)[1]
|
||||
for target_ns in targets:
|
||||
new_tag = f"{target_ns}:{value}"
|
||||
if new_tag not in existing_tags and new_tag not in tags_to_add:
|
||||
tags_to_add.append(new_tag)
|
||||
if new_tag.lower() not in existing_lower:
|
||||
item_tags_to_add.append(new_tag)
|
||||
|
||||
# Merge new tags with existing tags, handling namespace overwrites
|
||||
# When adding a tag like "namespace:value", remove any existing "namespace:*" tags
|
||||
for new_tag in tags_to_add:
|
||||
# Check if this is a namespaced tag (format: "namespace:value")
|
||||
if ':' in new_tag:
|
||||
namespace = new_tag.split(':', 1)[0]
|
||||
# Track removals for Hydrus: delete old tags in same namespace (except identical)
|
||||
to_remove = [t for t in existing_tags if t.startswith(namespace + ':') and t.lower() != new_tag.lower()]
|
||||
removed_tags.extend(to_remove)
|
||||
# Remove any existing tags with the same namespace
|
||||
existing_tags = [t for t in existing_tags if not (t.startswith(namespace + ':'))]
|
||||
# Namespace replacement: delete old namespace:* when adding namespace:value
|
||||
removed_namespace_tags: list[str] = []
|
||||
for new_tag in item_tags_to_add:
|
||||
if not isinstance(new_tag, str) or ":" not in new_tag:
|
||||
continue
|
||||
ns = new_tag.split(":", 1)[0].strip()
|
||||
if not ns:
|
||||
continue
|
||||
ns_prefix = ns.lower() + ":"
|
||||
for t in existing_tags_list:
|
||||
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
|
||||
removed_namespace_tags.append(t)
|
||||
|
||||
# Add the new tag if not already present
|
||||
if new_tag not in existing_tags:
|
||||
existing_tags.append(new_tag)
|
||||
removed_namespace_tags = sorted({t for t in removed_namespace_tags})
|
||||
|
||||
# Ensure only one tag per namespace (e.g., single title:) with latest preferred
|
||||
existing_tags = collapse_namespace_tags(existing_tags, "title", prefer="last")
|
||||
actual_tags_to_add = [t for t in item_tags_to_add if isinstance(t, str) and t.lower() not in existing_lower]
|
||||
|
||||
# Compute new tags relative to original
|
||||
new_tags_added = [t for t in existing_tags if isinstance(t, str) and t.lower() not in original_tags_lower]
|
||||
total_new_tags += len(new_tags_added)
|
||||
changed = False
|
||||
if removed_namespace_tags:
|
||||
try:
|
||||
backend.delete_tag(resolved_hash, removed_namespace_tags, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Warning: Failed deleting namespace tags: {exc}", file=sys.stderr)
|
||||
|
||||
# Update the result's tags
|
||||
if actual_tags_to_add:
|
||||
try:
|
||||
backend.add_tag(resolved_hash, actual_tags_to_add, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Warning: Failed adding tags: {exc}", file=sys.stderr)
|
||||
|
||||
if changed:
|
||||
total_added += len(actual_tags_to_add)
|
||||
total_modified += 1
|
||||
|
||||
try:
|
||||
refreshed_tags, _src2 = backend.get_tag(resolved_hash, config=config)
|
||||
refreshed_list = [t for t in (refreshed_tags or []) if isinstance(t, str)]
|
||||
except Exception:
|
||||
refreshed_list = existing_tags_list
|
||||
|
||||
# Update the result's tags using canonical field
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.extra['tags'] = existing_tags
|
||||
res.tags = refreshed_list
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = existing_tags
|
||||
res["tags"] = refreshed_list
|
||||
|
||||
# If a title: tag was added, update the in-memory title and columns so downstream display reflects it immediately
|
||||
title_value = _extract_title_tag(existing_tags)
|
||||
_apply_title_to_result(res, title_value)
|
||||
final_title = _extract_title_tag(refreshed_list)
|
||||
_apply_title_to_result(res, final_title)
|
||||
|
||||
final_tags = existing_tags
|
||||
|
||||
# Determine where to add tags: Hydrus, local DB, or sidecar
|
||||
if storage_source and storage_source.lower() == 'hydrus':
|
||||
# Add tags to Hydrus using the API
|
||||
target_hash = hydrus_hash or file_hash
|
||||
if target_hash:
|
||||
try:
|
||||
tags_to_send = [t for t in existing_tags if isinstance(t, str) and t.lower() not in original_tags_lower]
|
||||
hydrus_client = hydrus_wrapper.get_client(config)
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
if tags_to_send:
|
||||
log(f"[add_tags] Adding {len(tags_to_send)} new tag(s) to Hydrus file: {target_hash}", file=sys.stderr)
|
||||
hydrus_client.add_tags(target_hash, tags_to_send, service_name)
|
||||
else:
|
||||
log(f"[add_tags] No new tags to add for Hydrus file: {target_hash}", file=sys.stderr)
|
||||
# Delete old namespace tags we replaced (e.g., previous title:)
|
||||
if removed_tags:
|
||||
unique_removed = sorted(set(removed_tags))
|
||||
hydrus_client.delete_tags(target_hash, unique_removed, service_name)
|
||||
if tags_to_send:
|
||||
log(f"[add_tags] ✓ Tags added to Hydrus", file=sys.stderr)
|
||||
elif removed_tags:
|
||||
log(f"[add_tags] ✓ Removed {len(unique_removed)} tag(s) from Hydrus", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
if tags_to_send or removed_tags:
|
||||
total_modified += 1
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to add tags to Hydrus: {e}", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tags] Warning: No hash available for Hydrus file, skipping", file=sys.stderr)
|
||||
elif storage_source and storage_source.lower() == 'local':
|
||||
# For local storage, save directly to DB (no sidecar needed)
|
||||
if file_path:
|
||||
library_root = get_local_storage_path(config)
|
||||
if library_root:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
with FolderDB(library_root) as db:
|
||||
db.save_tags(path_obj, existing_tags)
|
||||
# Reload tags to reflect DB state (preserves auto-title logic)
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
refreshed_tags = db.get_tags(file_hash) if file_hash else existing_tags
|
||||
# Recompute title from refreshed tags for accurate display
|
||||
refreshed_title = _extract_title_tag(refreshed_tags)
|
||||
if refreshed_title:
|
||||
_apply_title_to_result(res, refreshed_title)
|
||||
res_tags = refreshed_tags or existing_tags
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.extra['tags'] = res_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = res_tags
|
||||
log(f"[add_tags] Added {len(new_tags_added)} new tag(s); {len(res_tags)} total tag(s) stored locally", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
if new_tags_added or removed_tags:
|
||||
total_modified += 1
|
||||
final_tags = res_tags
|
||||
except Exception as e:
|
||||
log(f"[add_tags] Warning: Failed to save tags to local DB: {e}", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tags] Warning: No library root configured for local storage, skipping", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tags] Warning: No file path for local storage, skipping", file=sys.stderr)
|
||||
else:
|
||||
# For other storage types or unknown sources, avoid writing sidecars to reduce clutter
|
||||
# (local/hydrus are handled above).
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# If title changed, refresh the cached result table so the display reflects the new name
|
||||
final_title = _extract_title_tag(final_tags)
|
||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
||||
_refresh_result_table_title(final_title, hydrus_hash or file_hash, file_hash, file_path)
|
||||
_refresh_result_table_title(final_title, resolved_hash, raw_path)
|
||||
|
||||
# If tags changed, refresh tag view via get-tag (prefer current subject; fall back to hash refresh)
|
||||
if new_tags_added or removed_tags:
|
||||
_refresh_tags_view(res, hydrus_hash, file_hash, file_path, config)
|
||||
if changed:
|
||||
_refresh_tags_view(res, resolved_hash, str(store_name), raw_path, config)
|
||||
|
||||
# Emit the modified result
|
||||
ctx.emit(res)
|
||||
|
||||
log(f"[add_tags] Added {total_new_tags} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)", file=sys.stderr)
|
||||
log(
|
||||
f"[add_tags] Added {total_added} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import sys
|
||||
from . import register
|
||||
import pipeline as ctx
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash
|
||||
from helper.logger import log
|
||||
from helper.store import FileStorage
|
||||
from SYS.logger import log
|
||||
from Store import Store
|
||||
|
||||
|
||||
class Add_Url(Cmdlet):
|
||||
@@ -54,19 +54,19 @@ class Add_Url(Cmdlet):
|
||||
return 1
|
||||
|
||||
# Parse url (comma-separated)
|
||||
url = [u.strip() for u in str(url_arg).split(',') if u.strip()]
|
||||
if not url:
|
||||
urls = [u.strip() for u in str(url_arg).split(',') if u.strip()]
|
||||
if not urls:
|
||||
log("Error: No valid url provided")
|
||||
return 1
|
||||
|
||||
# Get backend and add url
|
||||
try:
|
||||
storage = FileStorage(config)
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
for url in url:
|
||||
backend.add_url(file_hash, url)
|
||||
ctx.emit(f"Added URL: {url}")
|
||||
|
||||
backend.add_url(file_hash, urls)
|
||||
for u in urls:
|
||||
ctx.emit(f"Added URL: {u}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
222
cmdlets/catalog.py
Normal file
222
cmdlets/catalog.py
Normal file
@@ -0,0 +1,222 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from importlib import import_module
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
try:
|
||||
from cmdlets import REGISTRY
|
||||
except Exception:
|
||||
REGISTRY = {} # type: ignore
|
||||
|
||||
try:
|
||||
from cmdnats import register_native_commands as _register_native_commands
|
||||
except Exception:
|
||||
_register_native_commands = None
|
||||
|
||||
|
||||
def ensure_registry_loaded() -> None:
|
||||
"""Ensure native commands are registered into REGISTRY (idempotent)."""
|
||||
if _register_native_commands and REGISTRY is not None:
|
||||
try:
|
||||
_register_native_commands(REGISTRY)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _normalize_mod_name(mod_name: str) -> str:
|
||||
"""Normalize a command/module name for import resolution."""
|
||||
normalized = (mod_name or "").strip()
|
||||
if normalized.startswith('.'):
|
||||
normalized = normalized.lstrip('.')
|
||||
normalized = normalized.replace('-', '_')
|
||||
return normalized
|
||||
|
||||
|
||||
def import_cmd_module(mod_name: str):
|
||||
"""Import a cmdlet/native module from cmdnats or cmdlets packages."""
|
||||
normalized = _normalize_mod_name(mod_name)
|
||||
if not normalized:
|
||||
return None
|
||||
for package in ("cmdnats", "cmdlets", None):
|
||||
try:
|
||||
qualified = f"{package}.{normalized}" if package else normalized
|
||||
return import_module(qualified)
|
||||
except ModuleNotFoundError:
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _normalize_arg(arg: Any) -> Dict[str, Any]:
|
||||
"""Convert a CmdletArg/dict into a plain metadata dict."""
|
||||
if isinstance(arg, dict):
|
||||
name = arg.get("name", "")
|
||||
return {
|
||||
"name": str(name).lstrip("-"),
|
||||
"type": arg.get("type", "string"),
|
||||
"required": bool(arg.get("required", False)),
|
||||
"description": arg.get("description", ""),
|
||||
"choices": arg.get("choices", []) or [],
|
||||
"alias": arg.get("alias", ""),
|
||||
"variadic": arg.get("variadic", False),
|
||||
}
|
||||
|
||||
name = getattr(arg, "name", "") or ""
|
||||
return {
|
||||
"name": str(name).lstrip("-"),
|
||||
"type": getattr(arg, "type", "string"),
|
||||
"required": bool(getattr(arg, "required", False)),
|
||||
"description": getattr(arg, "description", ""),
|
||||
"choices": getattr(arg, "choices", []) or [],
|
||||
"alias": getattr(arg, "alias", ""),
|
||||
"variadic": getattr(arg, "variadic", False),
|
||||
}
|
||||
|
||||
|
||||
def get_cmdlet_metadata(cmd_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Return normalized metadata for a cmdlet, if available (aliases supported)."""
|
||||
ensure_registry_loaded()
|
||||
normalized = cmd_name.replace("-", "_")
|
||||
mod = import_cmd_module(normalized)
|
||||
data = getattr(mod, "CMDLET", None) if mod else None
|
||||
|
||||
if data is None:
|
||||
try:
|
||||
reg_fn = (REGISTRY or {}).get(cmd_name.replace('_', '-').lower())
|
||||
if reg_fn:
|
||||
owner_mod = getattr(reg_fn, "__module__", "")
|
||||
if owner_mod:
|
||||
owner = import_module(owner_mod)
|
||||
data = getattr(owner, "CMDLET", None)
|
||||
except Exception:
|
||||
data = None
|
||||
|
||||
if not data:
|
||||
return None
|
||||
|
||||
if hasattr(data, "to_dict"):
|
||||
base = data.to_dict()
|
||||
elif isinstance(data, dict):
|
||||
base = data
|
||||
else:
|
||||
base = {}
|
||||
|
||||
name = getattr(data, "name", base.get("name", cmd_name)) or cmd_name
|
||||
aliases = getattr(data, "aliases", base.get("aliases", [])) or []
|
||||
usage = getattr(data, "usage", base.get("usage", ""))
|
||||
summary = getattr(data, "summary", base.get("summary", ""))
|
||||
details = getattr(data, "details", base.get("details", [])) or []
|
||||
args_list = getattr(data, "args", base.get("args", [])) or []
|
||||
args = [_normalize_arg(arg) for arg in args_list]
|
||||
|
||||
return {
|
||||
"name": str(name).replace("_", "-").lower(),
|
||||
"aliases": [str(a).replace("_", "-").lower() for a in aliases if a],
|
||||
"usage": usage,
|
||||
"summary": summary,
|
||||
"details": details,
|
||||
"args": args,
|
||||
"raw": data,
|
||||
}
|
||||
|
||||
|
||||
def list_cmdlet_metadata() -> Dict[str, Dict[str, Any]]:
|
||||
"""Collect metadata for all registered cmdlets keyed by canonical name."""
|
||||
ensure_registry_loaded()
|
||||
entries: Dict[str, Dict[str, Any]] = {}
|
||||
for reg_name in (REGISTRY or {}).keys():
|
||||
meta = get_cmdlet_metadata(reg_name)
|
||||
canonical = str(reg_name).replace("_", "-").lower()
|
||||
|
||||
if meta:
|
||||
canonical = meta.get("name", canonical)
|
||||
aliases = meta.get("aliases", [])
|
||||
base = entries.get(
|
||||
canonical,
|
||||
{
|
||||
"name": canonical,
|
||||
"aliases": [],
|
||||
"usage": "",
|
||||
"summary": "",
|
||||
"details": [],
|
||||
"args": [],
|
||||
"raw": meta.get("raw"),
|
||||
},
|
||||
)
|
||||
merged_aliases = set(base.get("aliases", [])) | set(aliases)
|
||||
if canonical != reg_name:
|
||||
merged_aliases.add(reg_name)
|
||||
base["aliases"] = sorted(a for a in merged_aliases if a and a != canonical)
|
||||
if not base.get("usage") and meta.get("usage"):
|
||||
base["usage"] = meta["usage"]
|
||||
if not base.get("summary") and meta.get("summary"):
|
||||
base["summary"] = meta["summary"]
|
||||
if not base.get("details") and meta.get("details"):
|
||||
base["details"] = meta["details"]
|
||||
if not base.get("args") and meta.get("args"):
|
||||
base["args"] = meta["args"]
|
||||
if not base.get("raw"):
|
||||
base["raw"] = meta.get("raw")
|
||||
entries[canonical] = base
|
||||
else:
|
||||
entries.setdefault(
|
||||
canonical,
|
||||
{"name": canonical, "aliases": [], "usage": "", "summary": "", "details": [], "args": [], "raw": None},
|
||||
)
|
||||
return entries
|
||||
|
||||
|
||||
def list_cmdlet_names(include_aliases: bool = True) -> List[str]:
|
||||
"""Return sorted cmdlet names (optionally including aliases)."""
|
||||
ensure_registry_loaded()
|
||||
entries = list_cmdlet_metadata()
|
||||
names = set()
|
||||
for meta in entries.values():
|
||||
names.add(meta.get("name", ""))
|
||||
if include_aliases:
|
||||
for alias in meta.get("aliases", []):
|
||||
names.add(alias)
|
||||
return sorted(n for n in names if n)
|
||||
|
||||
|
||||
def get_cmdlet_arg_flags(cmd_name: str) -> List[str]:
|
||||
"""Return flag variants for cmdlet arguments (e.g., -name/--name)."""
|
||||
meta = get_cmdlet_metadata(cmd_name)
|
||||
if not meta:
|
||||
return []
|
||||
|
||||
raw = meta.get("raw")
|
||||
if raw and hasattr(raw, "build_flag_registry"):
|
||||
try:
|
||||
registry = raw.build_flag_registry()
|
||||
flags: List[str] = []
|
||||
for flag_set in registry.values():
|
||||
flags.extend(flag_set)
|
||||
return sorted(set(flags))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
flags: List[str] = []
|
||||
for arg in meta.get("args", []):
|
||||
name = arg.get("name")
|
||||
if not name:
|
||||
continue
|
||||
flags.append(f"-{name}")
|
||||
flags.append(f"--{name}")
|
||||
alias = arg.get("alias")
|
||||
if alias:
|
||||
flags.append(f"-{alias}")
|
||||
return flags
|
||||
|
||||
|
||||
def get_cmdlet_arg_choices(cmd_name: str, arg_name: str) -> List[str]:
|
||||
"""Return declared choices for a cmdlet argument."""
|
||||
meta = get_cmdlet_metadata(cmd_name)
|
||||
if not meta:
|
||||
return []
|
||||
target = arg_name.lstrip("-")
|
||||
for arg in meta.get("args", []):
|
||||
if arg.get("name") == target:
|
||||
return list(arg.get("choices", []) or [])
|
||||
return []
|
||||
@@ -4,10 +4,10 @@ from typing import Any, Dict, Sequence
|
||||
import json
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
from . import register
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, should_show_help
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from pathlib import Path
|
||||
import sys
|
||||
import json
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
from . import register
|
||||
from ._shared import Cmdlet, CmdletArg, get_pipe_object_path, normalize_result_input, filter_results_by_temp, should_show_help
|
||||
|
||||
@@ -5,10 +5,10 @@ from typing import Any, Dict, Sequence
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from helper.logger import debug, log
|
||||
from helper.store import Folder
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash, get_origin, get_field, should_show_help
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from SYS.logger import debug, log
|
||||
from Store.Folder import Folder
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash, get_field, should_show_help
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
@@ -48,17 +48,17 @@ class Delete_File(Cmdlet):
|
||||
hash_hex_raw = get_field(item, "hash_hex") or get_field(item, "hash")
|
||||
target = get_field(item, "target") or get_field(item, "file_path") or get_field(item, "path")
|
||||
|
||||
origin = get_origin(item)
|
||||
|
||||
# Also check the store field explicitly from PipeObject
|
||||
store = None
|
||||
if isinstance(item, dict):
|
||||
store = item.get("store")
|
||||
else:
|
||||
store = get_field(item, "store")
|
||||
|
||||
store_lower = str(store).lower() if store else ""
|
||||
is_hydrus_store = bool(store_lower) and ("hydrus" in store_lower or store_lower in {"home", "work"})
|
||||
|
||||
# For Hydrus files, the target IS the hash
|
||||
if origin and origin.lower() == "hydrus" and not hash_hex_raw:
|
||||
if is_hydrus_store and not hash_hex_raw:
|
||||
hash_hex_raw = target
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_hex_raw)
|
||||
@@ -72,7 +72,7 @@ class Delete_File(Cmdlet):
|
||||
# If lib_root is provided and this is from a folder store, use the Folder class
|
||||
if lib_root:
|
||||
try:
|
||||
folder = Folder(Path(lib_root), name=origin or "local")
|
||||
folder = Folder(Path(lib_root), name=store or "local")
|
||||
if folder.delete_file(str(path)):
|
||||
local_deleted = True
|
||||
ctx.emit(f"Removed file: {path.name}")
|
||||
@@ -109,17 +109,7 @@ class Delete_File(Cmdlet):
|
||||
pass
|
||||
|
||||
hydrus_deleted = False
|
||||
# Only attempt Hydrus deletion if store is explicitly Hydrus-related
|
||||
# Check both origin and store fields to determine if this is a Hydrus file
|
||||
|
||||
should_try_hydrus = False
|
||||
|
||||
# Check if store indicates this is a Hydrus backend
|
||||
if store and ("hydrus" in store.lower() or store.lower() == "home" or store.lower() == "work"):
|
||||
should_try_hydrus = True
|
||||
# Fallback to origin check if store not available
|
||||
elif origin and origin.lower() == "hydrus":
|
||||
should_try_hydrus = True
|
||||
should_try_hydrus = is_hydrus_store
|
||||
|
||||
# If conserve is set to hydrus, definitely don't delete
|
||||
if conserve == "hydrus":
|
||||
|
||||
@@ -4,9 +4,9 @@ from typing import Any, Dict, Sequence
|
||||
import json
|
||||
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, get_hash_for_operation, fetch_hydrus_metadata, should_show_help, get_field
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-note",
|
||||
|
||||
@@ -7,11 +7,11 @@ import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
import pipeline as ctx
|
||||
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args, normalize_result_input, get_field
|
||||
from helper.folder_store import LocalLibrarySearchOptimizer
|
||||
from API.folder import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path
|
||||
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ import sys
|
||||
from . import register
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, fetch_hydrus_metadata, should_show_help, get_field
|
||||
from helper.logger import debug, log
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, should_show_help, get_field
|
||||
from SYS.logger import debug, log
|
||||
from Store import Store
|
||||
|
||||
|
||||
def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, config: Dict[str, Any]) -> None:
|
||||
def _refresh_tag_view_if_current(file_hash: str | None, store_name: str | None, path: str | None, config: Dict[str, Any]) -> None:
|
||||
"""If the current subject matches the target, refresh tags via get-tag."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
@@ -28,17 +28,17 @@ def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, co
|
||||
def norm(val: Any) -> str:
|
||||
return str(val).lower()
|
||||
|
||||
target_hash = norm(hash_hex) if hash_hex else None
|
||||
target_path = norm(file_path) if file_path else None
|
||||
target_hash = norm(file_hash) if file_hash else None
|
||||
target_path = norm(path) if path else None
|
||||
|
||||
subj_hashes: list[str] = []
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hydrus_hash"), subject.get("hash"), subject.get("hash_hex"), subject.get("file_hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("file_path"), subject.get("path"), subject.get("target")] if v]
|
||||
subj_hashes = [norm(v) for v in [subject.get("hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(get_field(subject, f)) for f in ("hydrus_hash", "hash", "hash_hex", "file_hash") if get_field(subject, f)]
|
||||
subj_paths = [norm(get_field(subject, f)) for f in ("file_path", "path", "target") if get_field(subject, f)]
|
||||
subj_hashes = [norm(get_field(subject, f)) for f in ("hash",) if get_field(subject, f)]
|
||||
subj_paths = [norm(get_field(subject, f)) for f in ("path", "target") if get_field(subject, f)]
|
||||
|
||||
is_match = False
|
||||
if target_hash and target_hash in subj_hashes:
|
||||
@@ -49,20 +49,20 @@ def _refresh_tag_view_if_current(hash_hex: str | None, file_path: str | None, co
|
||||
return
|
||||
|
||||
refresh_args: list[str] = []
|
||||
if hash_hex:
|
||||
refresh_args.extend(["-hash", hash_hex])
|
||||
if file_hash:
|
||||
refresh_args.extend(["-hash", file_hash])
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-tags",
|
||||
summary="Remove tags from a Hydrus file.",
|
||||
usage="del-tags [-hash <sha256>] <tag>[,<tag>...]",
|
||||
alias=["del-tag", "del-tags", "delete-tag"],
|
||||
name="delete-tag",
|
||||
summary="Remove tags from a file in a store.",
|
||||
usage="delete-tag -store <store> [-hash <sha256>] <tag>[,<tag>...]",
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
SharedArgs.STORE,
|
||||
CmdletArg("<tag>[,<tag>...]", required=True, description="One or more tags to remove. Comma- or space-separated."),
|
||||
],
|
||||
detail=[
|
||||
@@ -71,7 +71,7 @@ CMDLET = Cmdlet(
|
||||
],
|
||||
)
|
||||
|
||||
@register(["del-tag", "del-tags", "delete-tag", "delete-tags"]) # Still needed for backward compatibility
|
||||
@register(["delete-tag"])
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
if should_show_help(args):
|
||||
@@ -94,6 +94,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Parse -hash override and collect tags from remaining args
|
||||
override_hash: str | None = None
|
||||
override_store: str | None = None
|
||||
rest: list[str] = []
|
||||
i = 0
|
||||
while i < len(args):
|
||||
@@ -103,6 +104,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
override_hash = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-store", "--store", "store"} and i + 1 < len(args):
|
||||
override_store = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
rest.append(a)
|
||||
i += 1
|
||||
|
||||
@@ -110,7 +115,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# @5 or @{2,5,8} to delete tags from ResultTable by index
|
||||
tags_from_at_syntax = []
|
||||
hash_from_at_syntax = None
|
||||
file_path_from_at_syntax = None
|
||||
path_from_at_syntax = None
|
||||
store_from_at_syntax = None
|
||||
|
||||
if rest and str(rest[0]).startswith("@"):
|
||||
selector_arg = str(rest[0])
|
||||
@@ -142,9 +148,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tags_from_at_syntax.append(tag_name)
|
||||
# Also get hash from first item for consistency
|
||||
if not hash_from_at_syntax:
|
||||
hash_from_at_syntax = get_field(item, 'hash_hex')
|
||||
if not file_path_from_at_syntax:
|
||||
file_path_from_at_syntax = get_field(item, 'file_path')
|
||||
hash_from_at_syntax = get_field(item, 'hash')
|
||||
if not path_from_at_syntax:
|
||||
path_from_at_syntax = get_field(item, 'path')
|
||||
if not store_from_at_syntax:
|
||||
store_from_at_syntax = get_field(item, 'store')
|
||||
|
||||
if not tags_from_at_syntax:
|
||||
log(f"No tags found at indices: {indices}")
|
||||
@@ -201,10 +209,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# This preserves the existing logic for @ selection.
|
||||
|
||||
tags = tags_from_at_syntax
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||
file_path = file_path_from_at_syntax
|
||||
file_hash = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||
path = path_from_at_syntax
|
||||
store_name = override_store or store_from_at_syntax
|
||||
|
||||
if _process_deletion(tags, hash_hex, file_path, config):
|
||||
if _process_deletion(tags, file_hash, path, store_name, config):
|
||||
success_count += 1
|
||||
|
||||
else:
|
||||
@@ -216,13 +225,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
for item in items_to_process:
|
||||
tags_to_delete = []
|
||||
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(item, "hash_hex"))
|
||||
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(item, "hash"))
|
||||
item_path = (
|
||||
get_field(item, "path")
|
||||
or get_field(item, "file_path")
|
||||
or get_field(item, "target")
|
||||
)
|
||||
item_source = get_field(item, "source")
|
||||
item_store = override_store or get_field(item, "store")
|
||||
|
||||
if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem':
|
||||
# It's a TagItem
|
||||
@@ -248,66 +256,43 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# but inside the loop we might have mixed items? Unlikely.
|
||||
continue
|
||||
|
||||
if tags_to_delete and (item_hash or item_path):
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, config, source=item_source):
|
||||
if tags_to_delete:
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, item_store, config):
|
||||
success_count += 1
|
||||
|
||||
if success_count > 0:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | None, config: Dict[str, Any], source: str | None = None) -> bool:
|
||||
def _process_deletion(tags: list[str], file_hash: str | None, path: str | None, store_name: str | None, config: Dict[str, Any]) -> bool:
|
||||
"""Helper to execute the deletion logic for a single target."""
|
||||
|
||||
if not tags:
|
||||
return False
|
||||
|
||||
if not store_name:
|
||||
log("Store is required (use -store or pipe a result with store)", file=sys.stderr)
|
||||
return False
|
||||
|
||||
resolved_hash = normalize_hash(file_hash) if file_hash else None
|
||||
if not resolved_hash and path:
|
||||
try:
|
||||
from SYS.utils import sha256_file
|
||||
resolved_hash = sha256_file(Path(path))
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not resolved_hash:
|
||||
log("Item does not include a usable hash (and hash could not be derived from path)", file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _fetch_existing_tags() -> list[str]:
|
||||
existing: list[str] = []
|
||||
# Prefer local DB when we have a path and not explicitly hydrus
|
||||
if file_path and (source == "local" or (source != "hydrus" and not hash_hex)):
|
||||
try:
|
||||
from helper.folder_store import FolderDB
|
||||
from config import get_local_storage_path
|
||||
path_obj = Path(file_path)
|
||||
local_root = get_local_storage_path(config) or path_obj.parent
|
||||
with FolderDB(local_root) as db:
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
existing = db.get_tags(file_hash) if file_hash else []
|
||||
except Exception:
|
||||
existing = []
|
||||
elif hash_hex:
|
||||
meta, _ = fetch_hydrus_metadata(
|
||||
config, hash_hex,
|
||||
include_service_keys_to_tags=True,
|
||||
include_file_url=False,
|
||||
)
|
||||
if isinstance(meta, dict):
|
||||
tags_payload = meta.get("tags")
|
||||
if isinstance(tags_payload, dict):
|
||||
seen: set[str] = set()
|
||||
for svc_data in tags_payload.values():
|
||||
if not isinstance(svc_data, dict):
|
||||
continue
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list):
|
||||
for t in display:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
storage = svc_data.get("storage_tags")
|
||||
if isinstance(storage, dict):
|
||||
current_list = storage.get("0") or storage.get(0)
|
||||
if isinstance(current_list, list):
|
||||
for t in current_list:
|
||||
if isinstance(t, (str, bytes)):
|
||||
val = str(t).strip()
|
||||
if val and val not in seen:
|
||||
seen.add(val)
|
||||
existing.append(val)
|
||||
return existing
|
||||
try:
|
||||
backend = Store(config)[store_name]
|
||||
existing, _src = backend.get_tag(resolved_hash, config=config)
|
||||
return list(existing or [])
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
# Safety: only block if this deletion would remove the final title tag
|
||||
title_tags = [t for t in tags if isinstance(t, str) and t.lower().startswith("title:")]
|
||||
@@ -320,61 +305,17 @@ def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | No
|
||||
log("Cannot delete the last title: tag. Add a replacement title first (add-tag \"title:new title\").", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Item does not include a hash or file path")
|
||||
return False
|
||||
|
||||
# Handle local file tag deletion
|
||||
if file_path and (source == "local" or (not hash_hex and source != "hydrus")):
|
||||
try:
|
||||
from helper.folder_store import FolderDB
|
||||
from pathlib import Path
|
||||
|
||||
path_obj = Path(file_path)
|
||||
if not path_obj.exists():
|
||||
log(f"File not found: {file_path}")
|
||||
return False
|
||||
|
||||
# Try to get local storage path from config
|
||||
from config import get_local_storage_path
|
||||
local_root = get_local_storage_path(config)
|
||||
|
||||
if not local_root:
|
||||
# Fallback: assume file is in a library root or use its parent
|
||||
local_root = path_obj.parent
|
||||
|
||||
with FolderDB(local_root) as db:
|
||||
db.remove_tags(path_obj, tags)
|
||||
debug(f"Removed {len(tags)} tag(s) from {path_obj.name} (local)")
|
||||
_refresh_tag_view_if_current(hash_hex, file_path, config)
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Failed to remove local tags: {exc}")
|
||||
return False
|
||||
|
||||
# Hydrus deletion logic
|
||||
if not hash_hex:
|
||||
return False
|
||||
|
||||
try:
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return False
|
||||
|
||||
debug(f"Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
||||
client.delete_tags(hash_hex, tags, service_name)
|
||||
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||
_refresh_tag_view_if_current(hash_hex, None, config)
|
||||
return True
|
||||
|
||||
backend = Store(config)[store_name]
|
||||
ok = backend.delete_tag(resolved_hash, list(tags), config=config)
|
||||
if ok:
|
||||
preview = resolved_hash[:12] + ('…' if len(resolved_hash) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via store '{store_name}'.")
|
||||
_refresh_tag_view_if_current(resolved_hash, store_name, path, config)
|
||||
return True
|
||||
return False
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-tag failed: {exc}")
|
||||
log(f"del-tag failed: {exc}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import sys
|
||||
from . import register
|
||||
import pipeline as ctx
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash
|
||||
from helper.logger import log
|
||||
from helper.store import FileStorage
|
||||
from SYS.logger import log
|
||||
from Store import Store
|
||||
|
||||
|
||||
class Delete_Url(Cmdlet):
|
||||
@@ -54,19 +54,19 @@ class Delete_Url(Cmdlet):
|
||||
return 1
|
||||
|
||||
# Parse url (comma-separated)
|
||||
url = [u.strip() for u in str(url_arg).split(',') if u.strip()]
|
||||
if not url:
|
||||
urls = [u.strip() for u in str(url_arg).split(',') if u.strip()]
|
||||
if not urls:
|
||||
log("Error: No valid url provided")
|
||||
return 1
|
||||
|
||||
# Get backend and delete url
|
||||
try:
|
||||
storage = FileStorage(config)
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
for url in url:
|
||||
backend.delete_url(file_hash, url)
|
||||
ctx.emit(f"Deleted URL: {url}")
|
||||
|
||||
backend.delete_url(file_hash, urls)
|
||||
for u in urls:
|
||||
ctx.emit(f"Deleted URL: {u}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@ import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
|
||||
from helper.download import DownloadError, _download_direct_file
|
||||
from helper.logger import log, debug
|
||||
from SYS.download import DownloadError, _download_direct_file
|
||||
from SYS.logger import log, debug
|
||||
from models import DownloadOptions
|
||||
import pipeline as pipeline_context
|
||||
|
||||
@@ -168,19 +168,16 @@ class Download_File(Cmdlet):
|
||||
# Build tags with title for searchability
|
||||
tags = [f"title:{title}"]
|
||||
|
||||
# Prefer canonical fields while keeping legacy keys for compatibility
|
||||
# Canonical pipeline payload (no legacy aliases)
|
||||
return {
|
||||
"path": str(media_path),
|
||||
"hash": hash_value,
|
||||
"file_hash": hash_value,
|
||||
"title": title,
|
||||
"file_title": title,
|
||||
"action": "cmdlet:download-file",
|
||||
"download_mode": "file",
|
||||
"url": url or (download_result.get('url') if isinstance(download_result, dict) else None),
|
||||
"url": [url] if url else [],
|
||||
"store": "local",
|
||||
"storage_source": "downloads",
|
||||
"media_kind": "file",
|
||||
"tags": tags,
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ from typing import Any, Dict, Iterator, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.utils import ensure_directory, sha256_file
|
||||
from helper.http_client import HTTPClient
|
||||
from SYS.logger import log, debug
|
||||
from SYS.utils import ensure_directory, sha256_file
|
||||
from API.HTTP import HTTPClient
|
||||
from models import DownloadError, DownloadOptions, DownloadMediaResult, DebugLogger, ProgressBar
|
||||
import pipeline as pipeline_context
|
||||
from result_table import ResultTable
|
||||
@@ -1199,7 +1199,7 @@ class Download_Media(Cmdlet):
|
||||
|
||||
# Build format dict for emission and table
|
||||
format_dict = {
|
||||
"origin": "download-media",
|
||||
"table": "download-media",
|
||||
"title": f"Format {format_id}",
|
||||
"url": url,
|
||||
"target": url,
|
||||
|
||||
157
cmdlets/download_provider.py
Normal file
157
cmdlets/download_provider.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""download-provider cmdlet: Download items from external providers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import json
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from Provider.registry import get_search_provider, SearchResult
|
||||
from SYS.utils import unique_path
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, should_show_help, get_field, coerce_to_pipe_object
|
||||
import pipeline as ctx
|
||||
|
||||
# Optional dependencies
|
||||
try:
|
||||
from config import get_local_storage_path, resolve_output_dir
|
||||
except Exception: # pragma: no cover
|
||||
get_local_storage_path = None # type: ignore
|
||||
resolve_output_dir = None # type: ignore
|
||||
|
||||
class Download_Provider(Cmdlet):
|
||||
"""Download items from external providers."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
name="download-provider",
|
||||
summary="Download items from external providers (soulseek, libgen, etc).",
|
||||
usage="download-provider [item] [-output DIR]",
|
||||
arg=[
|
||||
CmdletArg("output", type="string", alias="o", description="Output directory"),
|
||||
],
|
||||
detail=[
|
||||
"Download items from external providers.",
|
||||
"Usually called automatically by @N selection on provider results.",
|
||||
"Can be used manually by piping a provider result item.",
|
||||
],
|
||||
exec=self.run
|
||||
)
|
||||
self.register()
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Execute download-provider cmdlet."""
|
||||
if should_show_help(args):
|
||||
ctx.emit(self.__dict__)
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
output_dir_arg = None
|
||||
i = 0
|
||||
while i < len(args):
|
||||
arg = args[i]
|
||||
if arg in ("-output", "--output", "-o") and i + 1 < len(args):
|
||||
output_dir_arg = args[i+1]
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
|
||||
# Determine output directory
|
||||
if output_dir_arg:
|
||||
output_dir = Path(output_dir_arg)
|
||||
elif resolve_output_dir:
|
||||
output_dir = resolve_output_dir(config)
|
||||
else:
|
||||
output_dir = Path("./downloads")
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Process input result
|
||||
items = []
|
||||
if isinstance(result, list):
|
||||
items = result
|
||||
elif result:
|
||||
items = [result]
|
||||
|
||||
if not items:
|
||||
log("No items to download", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
success_count = 0
|
||||
|
||||
for item in items:
|
||||
try:
|
||||
# Extract provider info
|
||||
table = get_field(item, "table")
|
||||
if not table:
|
||||
log(f"Skipping item without provider info: {item}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
provider = get_search_provider(table, config)
|
||||
if not provider:
|
||||
log(f"Provider '{table}' not available for download", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Reconstruct SearchResult if needed
|
||||
# The provider.download method expects a SearchResult object or compatible dict
|
||||
if isinstance(item, dict):
|
||||
# Ensure full_metadata is present
|
||||
if "full_metadata" not in item and "extra" in item:
|
||||
item["full_metadata"] = item["extra"].get("full_metadata", {})
|
||||
|
||||
search_result = SearchResult(
|
||||
table=table,
|
||||
title=item.get("title", "Unknown"),
|
||||
path=item.get("path", ""),
|
||||
full_metadata=item.get("full_metadata", {})
|
||||
)
|
||||
else:
|
||||
# Assume it's an object with attributes (like PipeObject)
|
||||
full_metadata = getattr(item, "full_metadata", {})
|
||||
# Check extra dict if full_metadata is missing/empty
|
||||
if not full_metadata and hasattr(item, "extra") and isinstance(item.extra, dict):
|
||||
full_metadata = item.extra.get("full_metadata", {})
|
||||
# Fallback: if full_metadata key isn't there, maybe the extra dict IS the metadata
|
||||
if not full_metadata and "username" in item.extra:
|
||||
full_metadata = item.extra
|
||||
|
||||
search_result = SearchResult(
|
||||
table=table,
|
||||
title=getattr(item, "title", "Unknown"),
|
||||
path=getattr(item, "path", ""),
|
||||
full_metadata=full_metadata
|
||||
)
|
||||
|
||||
debug(f"[download-provider] Downloading '{search_result.title}' via {table}...")
|
||||
downloaded_path = provider.download(search_result, output_dir)
|
||||
|
||||
if downloaded_path:
|
||||
debug(f"[download-provider] Download successful: {downloaded_path}")
|
||||
|
||||
# Create PipeObject for the downloaded file
|
||||
pipe_obj = coerce_to_pipe_object({
|
||||
"path": str(downloaded_path),
|
||||
"title": search_result.title,
|
||||
"table": "local", # Now it's a local file
|
||||
"media_kind": getattr(item, "media_kind", "other"),
|
||||
"tags": getattr(item, "tags", []),
|
||||
"full_metadata": search_result.full_metadata
|
||||
})
|
||||
|
||||
ctx.emit(pipe_obj)
|
||||
success_count += 1
|
||||
else:
|
||||
log(f"Download failed for '{search_result.title}'", file=sys.stderr)
|
||||
|
||||
except Exception as e:
|
||||
log(f"Error downloading item: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
debug(traceback.format_exc())
|
||||
|
||||
if success_count > 0:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
# Register cmdlet instance
|
||||
Download_Provider_Instance = Download_Provider()
|
||||
@@ -14,7 +14,7 @@ import threading
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
|
||||
class Download_Torrent(Cmdlet):
|
||||
@@ -66,7 +66,7 @@ class Download_Torrent(Cmdlet):
|
||||
worker_manager: Optional[Any] = None,
|
||||
) -> None:
|
||||
try:
|
||||
from helper.alldebrid import AllDebridClient
|
||||
from API.alldebrid import AllDebridClient
|
||||
client = AllDebridClient(api_key)
|
||||
log(f"[Worker {worker_id}] Submitting magnet to AllDebrid...")
|
||||
magnet_info = client.magnet_add(magnet_url)
|
||||
|
||||
@@ -8,8 +8,8 @@ import shutil
|
||||
from . import register
|
||||
import pipeline as ctx
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash
|
||||
from helper.logger import log, debug
|
||||
from helper.store import FileStorage
|
||||
from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from config import resolve_output_dir
|
||||
|
||||
|
||||
@@ -68,8 +68,8 @@ class Get_File(Cmdlet):
|
||||
debug(f"[get-file] Getting storage backend: {store_name}")
|
||||
|
||||
# Get storage backend
|
||||
storage = FileStorage(config)
|
||||
backend = storage[store_name]
|
||||
store = Store(config)
|
||||
backend = store[store_name]
|
||||
debug(f"[get-file] Backend retrieved: {type(backend).__name__}")
|
||||
|
||||
# Get file metadata to determine name and extension
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ from typing import Any, Dict, Sequence, Optional
|
||||
import json
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
from pathlib import Path
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field
|
||||
@@ -69,7 +69,7 @@ class Get_Metadata(Cmdlet):
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _build_table_row(title: str, origin: str, path: str, mime: str, size_bytes: Optional[int],
|
||||
def _build_table_row(title: str, store: str, path: str, mime: str, size_bytes: Optional[int],
|
||||
dur_seconds: Optional[int], imported_ts: Optional[int], url: list[str],
|
||||
hash_value: Optional[str], pages: Optional[int] = None) -> Dict[str, Any]:
|
||||
"""Build a table row dict with metadata fields."""
|
||||
@@ -97,13 +97,13 @@ class Get_Metadata(Cmdlet):
|
||||
("Size(MB)", str(size_mb) if size_mb is not None else ""),
|
||||
(duration_label, duration_value),
|
||||
("Imported", imported_label),
|
||||
("Store", origin or ""),
|
||||
("Store", store or ""),
|
||||
]
|
||||
|
||||
return {
|
||||
"title": title or path,
|
||||
"path": path,
|
||||
"origin": origin,
|
||||
"store": store,
|
||||
"mime": mime,
|
||||
"size_bytes": size_bytes,
|
||||
"duration_seconds": dur_int,
|
||||
@@ -143,8 +143,8 @@ class Get_Metadata(Cmdlet):
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
# Get hash and store from parsed args or result
|
||||
file_hash = parsed.get("hash") or get_field(result, "hash") or get_field(result, "file_hash") or get_field(result, "hash_hex")
|
||||
storage_source = parsed.get("store") or get_field(result, "store") or get_field(result, "storage") or get_field(result, "origin")
|
||||
file_hash = parsed.get("hash") or get_field(result, "hash")
|
||||
storage_source = parsed.get("store") or get_field(result, "store")
|
||||
|
||||
if not file_hash:
|
||||
log("No hash available - use -hash to specify", file=sys.stderr)
|
||||
@@ -156,8 +156,8 @@ class Get_Metadata(Cmdlet):
|
||||
|
||||
# Use storage backend to get metadata
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
from Store import Store
|
||||
storage = Store(config)
|
||||
backend = storage[storage_source]
|
||||
|
||||
# Get metadata from backend
|
||||
@@ -200,8 +200,8 @@ class Get_Metadata(Cmdlet):
|
||||
# Build display row
|
||||
row = self._build_table_row(
|
||||
title=title,
|
||||
origin=storage_source,
|
||||
path=metadata.get("file_path", ""),
|
||||
store=storage_source,
|
||||
path=metadata.get("path", ""),
|
||||
mime=mime_type,
|
||||
size_bytes=file_size,
|
||||
dur_seconds=duration_seconds,
|
||||
|
||||
@@ -6,9 +6,9 @@ import json
|
||||
from . import register
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, get_hash_for_operation, fetch_hydrus_metadata, get_field, should_show_help
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="get-note",
|
||||
|
||||
@@ -5,13 +5,13 @@ import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from helper.logger import log
|
||||
from SYS.logger import log
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, fmt_bytes, get_hash_for_operation, fetch_hydrus_metadata, should_show_help
|
||||
from helper.folder_store import FolderDB
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
from result_table import ResultTable
|
||||
|
||||
@@ -53,7 +53,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
result = result[0]
|
||||
|
||||
# Initialize results collection
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, origin}
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, store}
|
||||
source_title = "Unknown"
|
||||
|
||||
def _add_relationship(entry: Dict[str, Any]) -> None:
|
||||
@@ -89,7 +89,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
storage_path = get_local_storage_path(config)
|
||||
print(f"[DEBUG] Storage path: {storage_path}", file=sys.stderr)
|
||||
if storage_path:
|
||||
with FolderDB(storage_path) as db:
|
||||
with API_folder_store(storage_path) as db:
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
metadata = db.get_metadata(file_hash) if file_hash else None
|
||||
print(f"[DEBUG] Metadata found: {metadata is not None}", file=sys.stderr)
|
||||
@@ -130,7 +130,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"type": entry_type,
|
||||
"title": title,
|
||||
"path": path,
|
||||
"origin": "local"
|
||||
"store": "local"
|
||||
})
|
||||
|
||||
# RECURSIVE LOOKUP: If this is an "alt" relationship (meaning we're an alt pointing to a king),
|
||||
@@ -169,7 +169,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"type": "king" if rel_type.lower() == "alt" else rel_type,
|
||||
"title": parent_title,
|
||||
"path": str(path),
|
||||
"origin": "local"
|
||||
"store": "local"
|
||||
})
|
||||
else:
|
||||
# If already in results, ensure it's marked as king if appropriate
|
||||
@@ -223,7 +223,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"type": f"alt" if child_type == "alt" else f"sibling ({child_type})",
|
||||
"title": child_title,
|
||||
"path": str(child_path_obj),
|
||||
"origin": "local"
|
||||
"store": "local"
|
||||
})
|
||||
else:
|
||||
print(f"[DEBUG] ⚠️ Parent has no relationships metadata", file=sys.stderr)
|
||||
@@ -261,7 +261,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"type": f"alt" if child_type == "alt" else f"sibling ({child_type})",
|
||||
"title": child_title,
|
||||
"path": child_path,
|
||||
"origin": "local"
|
||||
"store": "local"
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
@@ -299,7 +299,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# "type": f"reverse-{rev_type}", # e.g. reverse-alt
|
||||
# "title": rev_title,
|
||||
# "path": rev_path,
|
||||
# "origin": "local"
|
||||
# "store": "local"
|
||||
# })
|
||||
|
||||
except Exception as e:
|
||||
@@ -352,7 +352,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"type": rel_name,
|
||||
"title": rel_hash, # Can't resolve title easily without another API call
|
||||
"path": None,
|
||||
"origin": "hydrus"
|
||||
"store": "hydrus"
|
||||
})
|
||||
except Exception as exc:
|
||||
# Only log error if we didn't find local relationships either
|
||||
@@ -390,7 +390,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
row.add_column("Type", item['type'].title())
|
||||
row.add_column("Title", item['title'])
|
||||
# row.add_column("Hash", item['hash'][:16] + "...") # User requested removal
|
||||
row.add_column("Origin", item['origin'])
|
||||
row.add_column("Store", item['store'])
|
||||
|
||||
# Create result object for pipeline
|
||||
res_obj = {
|
||||
@@ -398,7 +398,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"hash": item['hash'],
|
||||
"file_hash": item['hash'],
|
||||
"relationship_type": item['type'],
|
||||
"origin": item['origin']
|
||||
"store": item['store']
|
||||
}
|
||||
if item['path']:
|
||||
res_obj["path"] = item['path']
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Get tags from Hydrus or local sidecar metadata.
|
||||
|
||||
This cmdlet retrieves tags for a selected result, supporting both:
|
||||
- Hydrus Network (for files with hash_hex)
|
||||
- Hydrus Network (for files with hash)
|
||||
- Local sidecar files (.tags)
|
||||
|
||||
In interactive mode: navigate with numbers, add/delete tags
|
||||
@@ -12,15 +12,15 @@ from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.metadata_search import get_metadata_provider, list_metadata_providers
|
||||
from SYS.logger import log, debug
|
||||
from Provider.metadata_provider import get_metadata_provider, list_metadata_providers
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
import pipeline as ctx
|
||||
from helper import hydrus
|
||||
from helper.folder_store import read_sidecar, write_sidecar, find_sidecar, FolderDB
|
||||
from API import HydrusNetwork
|
||||
from API.folder import read_sidecar, write_sidecar, find_sidecar, API_folder_store
|
||||
from ._shared import normalize_hash, looks_like_hash, Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field
|
||||
from config import get_local_storage_path
|
||||
|
||||
@@ -47,15 +47,14 @@ class TagItem:
|
||||
"""
|
||||
tag_name: str
|
||||
tag_index: int # 1-based index for user reference
|
||||
hash_hex: Optional[str] = None
|
||||
source: str = "hydrus"
|
||||
hash: Optional[str] = None
|
||||
store: str = "hydrus"
|
||||
service_name: Optional[str] = None
|
||||
file_path: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
# Make ResultTable happy by adding standard fields
|
||||
# NOTE: Don't set 'title' - we want only the tag column in ResultTable
|
||||
self.origin = self.source
|
||||
self.detail = f"Tag #{self.tag_index}"
|
||||
self.target = self.tag_name
|
||||
self.media_kind = "tag"
|
||||
@@ -65,20 +64,21 @@ class TagItem:
|
||||
return {
|
||||
"tag_name": self.tag_name,
|
||||
"tag_index": self.tag_index,
|
||||
"hash_hex": self.hash_hex,
|
||||
"source": self.source,
|
||||
"hash": self.hash,
|
||||
"store": self.store,
|
||||
"path": self.path,
|
||||
"service_name": self.service_name,
|
||||
}
|
||||
|
||||
|
||||
def _emit_tags_as_table(
|
||||
tags_list: List[str],
|
||||
hash_hex: Optional[str],
|
||||
source: str = "hydrus",
|
||||
file_hash: Optional[str],
|
||||
store: str = "hydrus",
|
||||
service_name: Optional[str] = None,
|
||||
config: Dict[str, Any] = None,
|
||||
item_title: Optional[str] = None,
|
||||
file_path: Optional[str] = None,
|
||||
path: Optional[str] = None,
|
||||
subject: Optional[Any] = None,
|
||||
) -> None:
|
||||
"""Emit tags as TagItem objects and display via ResultTable.
|
||||
@@ -92,8 +92,8 @@ def _emit_tags_as_table(
|
||||
table_title = "Tags"
|
||||
if item_title:
|
||||
table_title = f"Tags: {item_title}"
|
||||
if hash_hex:
|
||||
table_title += f" [{hash_hex[:8]}]"
|
||||
if file_hash:
|
||||
table_title += f" [{file_hash[:8]}]"
|
||||
|
||||
table = ResultTable(table_title, max_columns=1)
|
||||
table.set_source_command("get-tag", [])
|
||||
@@ -104,10 +104,10 @@ def _emit_tags_as_table(
|
||||
tag_item = TagItem(
|
||||
tag_name=tag_name,
|
||||
tag_index=idx,
|
||||
hash_hex=hash_hex,
|
||||
source=source,
|
||||
hash=file_hash,
|
||||
store=store,
|
||||
service_name=service_name,
|
||||
file_path=file_path,
|
||||
path=path,
|
||||
)
|
||||
tag_items.append(tag_item)
|
||||
table.add_result(tag_item)
|
||||
@@ -401,8 +401,8 @@ def _emit_tag_payload(source: str, tags_list: List[str], *, hash_value: Optional
|
||||
tag_item = TagItem(
|
||||
tag_name=tag_name,
|
||||
tag_index=idx,
|
||||
hash_hex=hash_value,
|
||||
source=source,
|
||||
hash=hash_value,
|
||||
store=source,
|
||||
service_name=None
|
||||
)
|
||||
ctx.emit(tag_item)
|
||||
@@ -698,7 +698,7 @@ def _scrape_isbn_metadata(isbn: str) -> List[str]:
|
||||
"""Scrape metadata for an ISBN using Open Library API."""
|
||||
new_tags = []
|
||||
try:
|
||||
from ..helper.http_client import HTTPClient
|
||||
from ..API.HTTP import HTTPClient
|
||||
import json as json_module
|
||||
|
||||
isbn_clean = isbn.replace('-', '').strip()
|
||||
@@ -822,7 +822,7 @@ def _scrape_openlibrary_metadata(olid: str) -> List[str]:
|
||||
"""
|
||||
new_tags = []
|
||||
try:
|
||||
from ..helper.http_client import HTTPClient
|
||||
from ..API.HTTP import HTTPClient
|
||||
import json as json_module
|
||||
|
||||
# Format: OL9674499M or just 9674499M
|
||||
@@ -995,7 +995,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
get-tag -scrape <url|provider>
|
||||
|
||||
Options:
|
||||
-hash <sha256>: Override hash to use instead of result's hash_hex
|
||||
-hash <sha256>: Override hash to use instead of result's hash
|
||||
--store <key>: Store result to this key for pipeline
|
||||
--emit: Emit result without interactive prompt (quiet mode)
|
||||
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks)
|
||||
@@ -1150,7 +1150,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
table = ResultTable(f"Metadata: {provider.name}")
|
||||
table.set_source_command("get-tag", [])
|
||||
selection_payload = []
|
||||
hash_for_payload = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash_hex", None))
|
||||
hash_for_payload = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None))
|
||||
store_for_payload = get_field(result, "store", None)
|
||||
for idx, item in enumerate(items):
|
||||
tags = provider.to_tags(item)
|
||||
row = table.add_row()
|
||||
@@ -1165,13 +1166,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"artist": item.get("artist"),
|
||||
"album": item.get("album"),
|
||||
"year": item.get("year"),
|
||||
"hash": hash_for_payload,
|
||||
"store": store_for_payload,
|
||||
"extra": {
|
||||
"tags": tags,
|
||||
"provider": provider.name,
|
||||
"hydrus_hash": hash_for_payload,
|
||||
"storage_source": get_field(result, "source", None) or get_field(result, "origin", None),
|
||||
},
|
||||
"file_hash": hash_for_payload,
|
||||
}
|
||||
selection_payload.append(payload)
|
||||
table.set_row_selection_args(idx, [str(idx + 1)])
|
||||
@@ -1192,30 +1192,29 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
|
||||
hash_from_result = normalize_hash(get_field(result, "hash_hex", None))
|
||||
hash_hex = hash_override or hash_from_result
|
||||
hash_from_result = normalize_hash(get_field(result, "hash", None))
|
||||
file_hash = hash_override or hash_from_result
|
||||
# Only use emit mode if explicitly requested with --emit flag, not just because we're in a pipeline
|
||||
# This allows interactive REPL to work even in pipelines
|
||||
emit_mode = emit_requested or bool(store_key)
|
||||
store_label = (store_key.strip() if store_key and store_key.strip() else None)
|
||||
|
||||
# Get hash and store from result
|
||||
file_hash = hash_hex
|
||||
storage_source = get_field(result, "store") or get_field(result, "storage") or get_field(result, "origin")
|
||||
store_name = get_field(result, "store")
|
||||
|
||||
if not file_hash:
|
||||
log("No hash available in result", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not storage_source:
|
||||
log("No storage backend specified in result", file=sys.stderr)
|
||||
if not store_name:
|
||||
log("No store specified in result", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get tags using storage backend
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
backend = storage[storage_source]
|
||||
from Store import Store
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
current, source = backend.get_tag(file_hash, config=config)
|
||||
|
||||
if not current:
|
||||
@@ -1224,7 +1223,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
service_name = ""
|
||||
except KeyError:
|
||||
log(f"Storage backend '{storage_source}' not found", file=sys.stderr)
|
||||
log(f"Store '{store_name}' not found", file=sys.stderr)
|
||||
return 1
|
||||
except Exception as exc:
|
||||
log(f"Failed to get tags: {exc}", file=sys.stderr)
|
||||
@@ -1235,48 +1234,42 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
item_title = get_field(result, "title", None) or get_field(result, "name", None) or get_field(result, "filename", None)
|
||||
|
||||
# Build a subject payload representing the file whose tags are being shown
|
||||
subject_origin = get_field(result, "origin", None) or get_field(result, "source", None) or source
|
||||
subject_store = get_field(result, "store", None) or store_name
|
||||
subject_payload: Dict[str, Any] = {
|
||||
"tags": list(current),
|
||||
"title": item_title,
|
||||
"name": item_title,
|
||||
"origin": subject_origin,
|
||||
"source": subject_origin,
|
||||
"storage_source": subject_origin,
|
||||
"store": subject_store,
|
||||
"service_name": service_name,
|
||||
"extra": {
|
||||
"tags": list(current),
|
||||
"storage_source": subject_origin,
|
||||
"hydrus_hash": hash_hex,
|
||||
},
|
||||
}
|
||||
if hash_hex:
|
||||
subject_payload.update({
|
||||
"hash": hash_hex,
|
||||
"hash_hex": hash_hex,
|
||||
"file_hash": hash_hex,
|
||||
"hydrus_hash": hash_hex,
|
||||
})
|
||||
if file_hash:
|
||||
subject_payload["hash"] = file_hash
|
||||
if local_path:
|
||||
try:
|
||||
path_text = str(local_path)
|
||||
subject_payload.update({
|
||||
"file_path": path_text,
|
||||
"path": path_text,
|
||||
"target": path_text,
|
||||
})
|
||||
subject_payload["extra"]["file_path"] = path_text
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if source == "hydrus":
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config, item_title=item_title, subject=subject_payload)
|
||||
else:
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config, item_title=item_title, file_path=str(local_path) if local_path else None, subject=subject_payload)
|
||||
_emit_tags_as_table(
|
||||
current,
|
||||
file_hash=file_hash,
|
||||
store=subject_store,
|
||||
service_name=service_name if source == "hydrus" else None,
|
||||
config=config,
|
||||
item_title=item_title,
|
||||
path=str(local_path) if local_path else None,
|
||||
subject=subject_payload,
|
||||
)
|
||||
|
||||
# If emit requested or store key provided, emit payload
|
||||
if emit_mode:
|
||||
_emit_tag_payload(source, current, hash_value=hash_hex, store_label=store_label)
|
||||
_emit_tag_payload(source, current, hash_value=file_hash, store_label=store_label)
|
||||
|
||||
return 0
|
||||
|
||||
@@ -1341,22 +1334,22 @@ class Get_Tag(Cmdlet):
|
||||
|
||||
# Get hash and store from parsed args or result
|
||||
hash_override = parsed.get("hash")
|
||||
file_hash = hash_override or get_field(result, "hash") or get_field(result, "file_hash") or get_field(result, "hash_hex")
|
||||
storage_source = parsed.get("store") or get_field(result, "store") or get_field(result, "storage") or get_field(result, "origin")
|
||||
file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash"))
|
||||
store_name = parsed.get("store") or get_field(result, "store")
|
||||
|
||||
if not file_hash:
|
||||
log("No hash available in result", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not storage_source:
|
||||
log("No storage backend specified in result", file=sys.stderr)
|
||||
if not store_name:
|
||||
log("No store specified in result", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get tags using storage backend
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage_obj = FileStorage(config)
|
||||
backend = storage_obj[storage_source]
|
||||
from Store import Store
|
||||
storage_obj = Store(config)
|
||||
backend = storage_obj[store_name]
|
||||
current, source = backend.get_tag(file_hash, config=config)
|
||||
|
||||
if not current:
|
||||
@@ -1367,18 +1360,18 @@ class Get_Tag(Cmdlet):
|
||||
item_title = get_field(result, "title") or file_hash[:16]
|
||||
_emit_tags_as_table(
|
||||
tags_list=current,
|
||||
hash_hex=file_hash,
|
||||
source=source,
|
||||
file_hash=file_hash,
|
||||
store=store_name,
|
||||
service_name="",
|
||||
config=config,
|
||||
item_title=item_title,
|
||||
file_path=None,
|
||||
path=None,
|
||||
subject=result,
|
||||
)
|
||||
return 0
|
||||
|
||||
except KeyError:
|
||||
log(f"Storage backend '{storage_source}' not found", file=sys.stderr)
|
||||
log(f"Store '{store_name}' not found", file=sys.stderr)
|
||||
return 1
|
||||
except Exception as exc:
|
||||
log(f"Failed to get tags: {exc}", file=sys.stderr)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,8 +6,8 @@ import sys
|
||||
from . import register
|
||||
import pipeline as ctx
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash
|
||||
from helper.logger import log
|
||||
from helper.store import FileStorage
|
||||
from SYS.logger import log
|
||||
from Store import Store
|
||||
|
||||
|
||||
class Get_Url(Cmdlet):
|
||||
@@ -48,16 +48,16 @@ class Get_Url(Cmdlet):
|
||||
|
||||
# Get backend and retrieve url
|
||||
try:
|
||||
storage = FileStorage(config)
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
url = backend.get_url(file_hash)
|
||||
|
||||
if url:
|
||||
for url in url:
|
||||
|
||||
urls = backend.get_url(file_hash)
|
||||
|
||||
if urls:
|
||||
for u in urls:
|
||||
# Emit rich object for pipeline compatibility
|
||||
ctx.emit({
|
||||
"url": url,
|
||||
"url": u,
|
||||
"hash": file_hash,
|
||||
"store": store_name,
|
||||
})
|
||||
|
||||
@@ -6,8 +6,8 @@ from pathlib import Path
|
||||
import json
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from helper.download import download_media
|
||||
from SYS.logger import log
|
||||
from cmdlets.download_media import download_media
|
||||
from models import DownloadOptions
|
||||
from config import resolve_output_dir
|
||||
import subprocess as _subprocess
|
||||
@@ -326,7 +326,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
self.media_kind = media_kind
|
||||
self.tags = tags or []
|
||||
self.url = url or []
|
||||
self.origin = "local" # Ensure origin is set for add-file
|
||||
self.store = "local"
|
||||
PipelineItem = SimpleItem
|
||||
|
||||
merged_item = PipelineItem(
|
||||
@@ -589,7 +589,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
|
||||
# Run ffmpeg with progress monitoring
|
||||
try:
|
||||
from helper.progress import print_progress, print_final_progress
|
||||
from SYS.progress import print_progress, print_final_progress
|
||||
import re
|
||||
|
||||
process = _subprocess.Popen(
|
||||
|
||||
@@ -18,9 +18,9 @@ from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
from urllib.parse import urlsplit, quote, urljoin
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.http_client import HTTPClient
|
||||
from helper.utils import ensure_directory, unique_path, unique_preserve_order
|
||||
from SYS.logger import log, debug
|
||||
from API.HTTP import HTTPClient
|
||||
from SYS.utils import ensure_directory, unique_path, unique_preserve_order
|
||||
|
||||
from . import register
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, create_pipe_object_result, normalize_result_input, should_show_help, get_field
|
||||
@@ -661,7 +661,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file_path=str(screenshot_result.path),
|
||||
cmdlet_name='screen-shot',
|
||||
title=f"Screenshot: {Path(screenshot_result.path).name}",
|
||||
file_hash=screenshot_hash,
|
||||
hash_value=screenshot_hash,
|
||||
is_temp=True,
|
||||
parent_hash=hashlib.sha256(url.encode()).hexdigest(),
|
||||
extra={
|
||||
@@ -695,30 +695,19 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return exit_code
|
||||
CMDLET = Cmdlet(
|
||||
name="screen-shot",
|
||||
summary="Capture a screenshot of a URL or file and mark as temporary artifact",
|
||||
summary="Capture a website screenshot",
|
||||
usage="screen-shot <url> [options] or download-data <url> | screen-shot [options]",
|
||||
alias=["screenshot", "ss"],
|
||||
arg=[
|
||||
CmdletArg(name="url", type="string", required=False, description="URL to screenshot (or from pipeline)"),
|
||||
SharedArgs.URL,
|
||||
CmdletArg(name="format", type="string", description="Output format: png, jpeg, or pdf"),
|
||||
CmdletArg(name="selector", type="string", description="CSS selector for element capture"),
|
||||
SharedArgs.ARCHIVE, # Use shared archive argument
|
||||
SharedArgs.STORE, # Use shared storage argument
|
||||
|
||||
],
|
||||
detail=[
|
||||
"Take screenshots of url with optional archiving and element targeting.",
|
||||
"Screenshots are marked as temporary artifacts for cleanup by the cleanup cmdlet.",
|
||||
"",
|
||||
"Arguments:",
|
||||
" url URL to capture (optional if piped from pipeline)",
|
||||
" --format FORMAT Output format: png (default), jpeg, or pdf",
|
||||
" --selector SEL CSS selector for capturing specific element",
|
||||
" --archive, -arch Archive URL to Wayback/Archive.today/Archive.ph",
|
||||
" --storage LOCATION Storage destination: hydrus, local, 0x0, debrid, or ftp",
|
||||
"",
|
||||
"Examples:",
|
||||
" download-data https://example.com | screen-shot --storage local",
|
||||
" download-data https://twitter.com/user/status/123 | screen-shot --selector 'article[role=article]' --storage hydrus --archive",
|
||||
" screen-shot https://example.com --format jpeg --storage 0x0 --archive",
|
||||
]
|
||||
detail=
|
||||
["""
|
||||
|
||||
|
||||
|
||||
"""]
|
||||
)
|
||||
|
||||
@@ -1,15 +1,23 @@
|
||||
"""search-provider cmdlet: Search external providers (bandcamp, libgen, soulseek, youtube)."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Sequence
|
||||
from typing import Any, Dict, List, Sequence, Optional
|
||||
import sys
|
||||
import json
|
||||
import uuid
|
||||
import importlib
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.provider import get_search_provider, list_search_providers
|
||||
from SYS.logger import log, debug
|
||||
from Provider.registry import get_search_provider, list_search_providers
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, should_show_help
|
||||
import pipeline as ctx
|
||||
|
||||
# Optional dependencies
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
except Exception: # pragma: no cover
|
||||
get_local_storage_path = None # type: ignore
|
||||
|
||||
class Search_Provider(Cmdlet):
|
||||
"""Search external content providers."""
|
||||
@@ -88,30 +96,74 @@ class Search_Provider(Cmdlet):
|
||||
if available:
|
||||
log(f" - {name}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Execute search
|
||||
try:
|
||||
debug(f"[search-provider] Calling {provider_name}.search()")
|
||||
results = provider.search(query, limit=limit)
|
||||
debug(f"[search-provider] Got {len(results)} results")
|
||||
|
||||
if not results:
|
||||
log(f"No results found for query: {query}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
# Emit results for pipeline
|
||||
for search_result in results:
|
||||
ctx.emit(search_result.to_dict())
|
||||
|
||||
log(f"Found {len(results)} result(s) from {provider_name}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
log(f"Error searching {provider_name}: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
debug(traceback.format_exc())
|
||||
|
||||
from API.folder import API_folder_store
|
||||
worker_id = str(uuid.uuid4())
|
||||
library_root = get_local_storage_path(config or {})
|
||||
if not library_root:
|
||||
log("No library root configured", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Use context manager to ensure database is always closed
|
||||
with API_folder_store(library_root) as db:
|
||||
try:
|
||||
db.insert_worker(
|
||||
worker_id,
|
||||
"search-provider",
|
||||
title=f"Search: {query}",
|
||||
description=f"Provider: {provider_name}, Query: {query}",
|
||||
pipe=ctx.get_current_command_text()
|
||||
)
|
||||
|
||||
results_list = []
|
||||
import result_table
|
||||
importlib.reload(result_table)
|
||||
from result_table import ResultTable
|
||||
|
||||
table_title = f"Search: {query} [{provider_name}]"
|
||||
preserve_order = provider_name.lower() in ('youtube', 'openlibrary')
|
||||
table = ResultTable(table_title).set_preserve_order(preserve_order)
|
||||
table.set_table(provider_name)
|
||||
|
||||
debug(f"[search-provider] Calling {provider_name}.search()")
|
||||
results = provider.search(query, limit=limit)
|
||||
debug(f"[search-provider] Got {len(results)} results")
|
||||
|
||||
if not results:
|
||||
log(f"No results found for query: {query}", file=sys.stderr)
|
||||
db.append_worker_stdout(worker_id, json.dumps([], indent=2))
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
return 0
|
||||
|
||||
# Emit results for pipeline
|
||||
for search_result in results:
|
||||
item_dict = search_result.to_dict() if hasattr(search_result, 'to_dict') else dict(search_result)
|
||||
|
||||
# Ensure table field is set (should be by provider, but just in case)
|
||||
if 'table' not in item_dict:
|
||||
item_dict['table'] = provider_name
|
||||
|
||||
table.add_result(search_result) # ResultTable handles SearchResult objects
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
|
||||
log(f"Found {len(results)} result(s) from {provider_name}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
log(f"Error searching {provider_name}: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
debug(traceback.format_exc())
|
||||
try:
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
except Exception:
|
||||
pass
|
||||
return 1
|
||||
|
||||
|
||||
# Register cmdlet instance
|
||||
Search_Provider_Instance = Search_Provider()
|
||||
|
||||
@@ -9,9 +9,9 @@ import re
|
||||
import json
|
||||
import sys
|
||||
|
||||
from helper.logger import log, debug
|
||||
from SYS.logger import log, debug
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, get_origin, get_field, should_show_help
|
||||
from ._shared import Cmdlet, CmdletArg, get_field, should_show_help
|
||||
import pipeline as ctx
|
||||
|
||||
# Optional dependencies
|
||||
@@ -27,18 +27,18 @@ except Exception: # pragma: no cover
|
||||
resolve_output_dir = None # type: ignore
|
||||
|
||||
try:
|
||||
from helper.hydrus import HydrusClient, HydrusRequestError
|
||||
from API.HydrusNetwork import HydrusClient, HydrusRequestError
|
||||
except ImportError: # pragma: no cover
|
||||
HydrusClient = None # type: ignore
|
||||
HydrusRequestError = RuntimeError # type: ignore
|
||||
|
||||
try:
|
||||
from helper.utils import sha256_file
|
||||
from SYS.utils import sha256_file
|
||||
except ImportError: # pragma: no cover
|
||||
sha256_file = None # type: ignore
|
||||
|
||||
try:
|
||||
from helper.utils_constant import mime_maps
|
||||
from SYS.utils_constant import mime_maps
|
||||
except ImportError: # pragma: no cover
|
||||
mime_maps = {} # type: ignore
|
||||
|
||||
@@ -48,7 +48,7 @@ class SearchRecord:
|
||||
size_bytes: int | None = None
|
||||
duration_seconds: str | None = None
|
||||
tags: str | None = None
|
||||
hash_hex: str | None = None
|
||||
hash: str | None = None
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
payload: dict[str, str] = {"path": self.path}
|
||||
@@ -58,8 +58,8 @@ class SearchRecord:
|
||||
payload["duration"] = self.duration_seconds
|
||||
if self.tags:
|
||||
payload["tags"] = self.tags
|
||||
if self.hash_hex:
|
||||
payload["hash"] = self.hash_hex
|
||||
if self.hash:
|
||||
payload["hash"] = self.hash
|
||||
return payload
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ class Search_Store(Cmdlet):
|
||||
|
||||
def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Ensure storage results have the necessary fields for result_table display."""
|
||||
store_value = str(get_origin(payload, "") or "").lower()
|
||||
store_value = str(payload.get("store") or "").lower()
|
||||
if store_value not in STORAGE_ORIGINS:
|
||||
return payload
|
||||
|
||||
@@ -162,7 +162,7 @@ class Search_Store(Cmdlet):
|
||||
while i < len(args_list):
|
||||
arg = args_list[i]
|
||||
low = arg.lower()
|
||||
if low in {"-store", "--store", "-storage", "--storage"} and i + 1 < len(args_list):
|
||||
if low in {"-store", "--store"} and i + 1 < len(args_list):
|
||||
storage_backend = args_list[i + 1]
|
||||
i += 2
|
||||
elif low in {"-tag", "--tag"} and i + 1 < len(args_list):
|
||||
@@ -199,7 +199,7 @@ class Search_Store(Cmdlet):
|
||||
log("Provide a search query", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
from helper.folder_store import FolderDB
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
import uuid
|
||||
worker_id = str(uuid.uuid4())
|
||||
@@ -209,7 +209,7 @@ class Search_Store(Cmdlet):
|
||||
return 1
|
||||
|
||||
# Use context manager to ensure database is always closed
|
||||
with FolderDB(library_root) as db:
|
||||
with API_folder_store(library_root) as db:
|
||||
try:
|
||||
db.insert_worker(
|
||||
worker_id,
|
||||
@@ -231,8 +231,8 @@ class Search_Store(Cmdlet):
|
||||
|
||||
table = ResultTable(table_title)
|
||||
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config=config or {})
|
||||
from Store import Store
|
||||
storage = Store(config=config or {})
|
||||
|
||||
backend_to_search = storage_backend or None
|
||||
if backend_to_search:
|
||||
@@ -242,18 +242,21 @@ class Search_Store(Cmdlet):
|
||||
log(f"Backend '{backend_to_search}' does not support searching", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
return 1
|
||||
results = target_backend.search_file(query, limit=limit)
|
||||
results = target_backend.search_store(query, limit=limit)
|
||||
else:
|
||||
from helper.hydrus import is_hydrus_available
|
||||
from API.HydrusNetwork import is_hydrus_available
|
||||
hydrus_available = is_hydrus_available(config or {})
|
||||
from Store.HydrusNetwork import HydrusNetwork
|
||||
|
||||
all_results = []
|
||||
for backend_name in storage.list_searchable_backends():
|
||||
if backend_name.startswith("hydrus") and not hydrus_available:
|
||||
continue
|
||||
searched_backends.append(backend_name)
|
||||
try:
|
||||
backend_results = storage[backend_name].search_file(query, limit=limit - len(all_results))
|
||||
backend = storage[backend_name]
|
||||
if isinstance(backend, HydrusNetwork) and not hydrus_available:
|
||||
continue
|
||||
searched_backends.append(backend_name)
|
||||
|
||||
backend_results = backend.search_store(query, limit=limit - len(all_results))
|
||||
if backend_results:
|
||||
all_results.extend(backend_results)
|
||||
if len(all_results) >= limit:
|
||||
@@ -270,10 +273,10 @@ class Search_Store(Cmdlet):
|
||||
|
||||
storage_counts: OrderedDict[str, int] = OrderedDict((name, 0) for name in searched_backends)
|
||||
for item in results or []:
|
||||
origin = get_origin(item)
|
||||
if not origin:
|
||||
store = get_field(item, "store")
|
||||
if not store:
|
||||
continue
|
||||
key = str(origin).lower()
|
||||
key = str(store).lower()
|
||||
if key not in storage_counts:
|
||||
storage_counts[key] = 0
|
||||
storage_counts[key] += 1
|
||||
@@ -295,14 +298,14 @@ class Search_Store(Cmdlet):
|
||||
|
||||
item_dict = _as_dict(item)
|
||||
if store_filter:
|
||||
origin_val = str(get_origin(item_dict) or "").lower()
|
||||
if store_filter != origin_val:
|
||||
store_val = str(item_dict.get("store") or "").lower()
|
||||
if store_filter != store_val:
|
||||
continue
|
||||
normalized = self._ensure_storage_columns(item_dict)
|
||||
|
||||
# Make hash/store available for downstream cmdlets without rerunning search
|
||||
hash_val = normalized.get("hash")
|
||||
store_val = normalized.get("store") or get_origin(item_dict)
|
||||
store_val = normalized.get("store") or item_dict.get("store")
|
||||
if hash_val and not normalized.get("hash"):
|
||||
normalized["hash"] = hash_val
|
||||
if store_val and not normalized.get("store"):
|
||||
|
||||
@@ -9,8 +9,8 @@ import subprocess
|
||||
import shutil
|
||||
import re
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.utils import sha256_file
|
||||
from SYS.logger import log, debug
|
||||
from SYS.utils import sha256_file
|
||||
from . import register
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
@@ -219,11 +219,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Update original file in local DB if possible
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from helper.folder_store import FolderDB
|
||||
from API.folder import API_folder_store
|
||||
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with FolderDB(storage_path) as db:
|
||||
with API_folder_store(storage_path) as db:
|
||||
# Get original file metadata
|
||||
# We need to find the original file by hash or path
|
||||
# Try path first
|
||||
|
||||
Reference in New Issue
Block a user