Add YAPF style + ignore, and format tracked Python files
This commit is contained in:
@@ -7,7 +7,8 @@ from importlib import import_module as _import_module
|
||||
Cmdlet = Callable[[Any, Sequence[str], Dict[str, Any]], int]
|
||||
|
||||
# Registry of command-name -> cmdlet function
|
||||
REGISTRY: Dict[str, Cmdlet] = {}
|
||||
REGISTRY: Dict[str,
|
||||
Cmdlet] = {}
|
||||
|
||||
|
||||
def _normalize_cmd_name(name: str) -> str:
|
||||
@@ -52,9 +53,8 @@ import os
|
||||
|
||||
cmdlet_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(cmdlet_dir):
|
||||
if not (
|
||||
filename.endswith(".py") and not filename.startswith("_") and filename != "__init__.py"
|
||||
):
|
||||
if not (filename.endswith(".py") and not filename.startswith("_")
|
||||
and filename != "__init__.py"):
|
||||
continue
|
||||
|
||||
mod_name = filename[:-3]
|
||||
@@ -82,7 +82,8 @@ except Exception:
|
||||
pass
|
||||
|
||||
# Import root-level modules that also register cmdlet
|
||||
for _root_mod in ("select_cmdlet",):
|
||||
for _root_mod in ("select_cmdlet",
|
||||
):
|
||||
try:
|
||||
_import_module(_root_mod)
|
||||
except Exception:
|
||||
|
||||
@@ -89,7 +89,10 @@ class CmdletArg:
|
||||
storage_flags = SharedArgs.STORAGE.to_flags()
|
||||
# Returns: ('--storage', '-storage', '-s')
|
||||
"""
|
||||
flags = [f"--{self.name}", f"-{self.name}"] # Both double-dash and single-dash variants
|
||||
flags = [
|
||||
f"--{self.name}",
|
||||
f"-{self.name}"
|
||||
] # Both double-dash and single-dash variants
|
||||
|
||||
# Add short form if alias exists
|
||||
if self.alias:
|
||||
@@ -130,8 +133,11 @@ def QueryArg(
|
||||
description=str(description or ""),
|
||||
choices=list(choices or []),
|
||||
handler=handler,
|
||||
query_key=str(key or name).strip().lower() if str(key or name).strip() else None,
|
||||
query_aliases=[str(a).strip().lower() for a in (aliases or []) if str(a).strip()],
|
||||
query_key=str(key or name).strip().lower()
|
||||
if str(key or name).strip() else None,
|
||||
query_aliases=[
|
||||
str(a).strip().lower() for a in (aliases or []) if str(a).strip()
|
||||
],
|
||||
query_only=bool(query_only),
|
||||
)
|
||||
|
||||
@@ -208,9 +214,7 @@ class SharedArgs:
|
||||
# If no config provided, try to load it
|
||||
if config is None:
|
||||
try:
|
||||
from config import load_config
|
||||
|
||||
config = load_config()
|
||||
from SYS.config import load_config
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
@@ -223,7 +227,9 @@ class SharedArgs:
|
||||
LOCATION = CmdletArg(
|
||||
"location",
|
||||
type="enum",
|
||||
choices=["hydrus", "0x0", "local"],
|
||||
choices=["hydrus",
|
||||
"0x0",
|
||||
"local"],
|
||||
required=True,
|
||||
description="Destination location",
|
||||
)
|
||||
@@ -257,15 +263,25 @@ class SharedArgs:
|
||||
LIBRARY = CmdletArg(
|
||||
"library",
|
||||
type="string",
|
||||
choices=["hydrus", "local", "soulseek", "libgen", "ftp"],
|
||||
choices=["hydrus",
|
||||
"local",
|
||||
"soulseek",
|
||||
"libgen",
|
||||
"ftp"],
|
||||
description="Search library or source location.",
|
||||
)
|
||||
|
||||
TIMEOUT = CmdletArg(
|
||||
"timeout", type="integer", description="Search or operation timeout in seconds."
|
||||
"timeout",
|
||||
type="integer",
|
||||
description="Search or operation timeout in seconds."
|
||||
)
|
||||
|
||||
LIMIT = CmdletArg("limit", type="integer", description="Maximum number of results to return.")
|
||||
LIMIT = CmdletArg(
|
||||
"limit",
|
||||
type="integer",
|
||||
description="Maximum number of results to return."
|
||||
)
|
||||
|
||||
# Path/File arguments
|
||||
PATH = CmdletArg("path", type="string", description="File or directory path.")
|
||||
@@ -280,18 +296,24 @@ class SharedArgs:
|
||||
)
|
||||
|
||||
REASON = CmdletArg(
|
||||
"reason", type="string", description="Reason or explanation for the operation."
|
||||
"reason",
|
||||
type="string",
|
||||
description="Reason or explanation for the operation."
|
||||
)
|
||||
|
||||
ARCHIVE = CmdletArg(
|
||||
"archive",
|
||||
type="flag",
|
||||
description="Archive the URL to Wayback Machine, Archive.today, and Archive.ph (requires URL argument in cmdlet).",
|
||||
description=
|
||||
"Archive the URL to Wayback Machine, Archive.today, and Archive.ph (requires URL argument in cmdlet).",
|
||||
alias="arch",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def resolve_storage(storage_value: Optional[str], default: Optional[Path] = None) -> Path:
|
||||
def resolve_storage(
|
||||
storage_value: Optional[str],
|
||||
default: Optional[Path] = None
|
||||
) -> Path:
|
||||
"""Resolve a storage location name to a filesystem Path.
|
||||
|
||||
Maps storage identifiers (hydrus, local, ftp) to their actual
|
||||
@@ -394,7 +416,11 @@ class Cmdlet:
|
||||
detail: List[str] = field(default_factory=list)
|
||||
"""Detailed explanation lines (for help text)"""
|
||||
# Execution function: func(result, args, config) -> int
|
||||
exec: Optional[Callable[[Any, Sequence[str], Dict[str, Any]], int]] = field(default=None)
|
||||
exec: Optional[Callable[[Any,
|
||||
Sequence[str],
|
||||
Dict[str,
|
||||
Any]],
|
||||
int]] = field(default=None)
|
||||
|
||||
def _collect_names(self) -> List[str]:
|
||||
"""Collect primary name plus aliases, de-duplicated and normalized."""
|
||||
@@ -450,7 +476,8 @@ class Cmdlet:
|
||||
if low in cmdlet.get_flags('library'):
|
||||
# handle library flag
|
||||
"""
|
||||
return {f"-{arg_name}", f"--{arg_name}"}
|
||||
return {f"-{arg_name}",
|
||||
f"--{arg_name}"}
|
||||
|
||||
def build_flag_registry(self) -> Dict[str, set[str]]:
|
||||
"""Build a registry of all flag variants for this cmdlet's arguments.
|
||||
@@ -470,7 +497,10 @@ class Cmdlet:
|
||||
elif low in flags.get('tag', set()):
|
||||
# handle tag
|
||||
"""
|
||||
return {arg.name: self.get_flags(arg.name) for arg in self.arg}
|
||||
return {
|
||||
arg.name: self.get_flags(arg.name)
|
||||
for arg in self.arg
|
||||
}
|
||||
|
||||
|
||||
# Tag groups cache (loaded from JSON config file)
|
||||
@@ -487,7 +517,10 @@ def set_tag_groups_path(path: Path) -> None:
|
||||
TAG_GROUPS_PATH = path
|
||||
|
||||
|
||||
def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet) -> Dict[str, Any]:
|
||||
def parse_cmdlet_args(args: Sequence[str],
|
||||
cmdlet_spec: Dict[str,
|
||||
Any] | Cmdlet) -> Dict[str,
|
||||
Any]:
|
||||
"""Parse command-line arguments based on cmdlet specification.
|
||||
|
||||
Extracts argument values from command-line tokens using the argument names
|
||||
@@ -515,7 +548,8 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
result = parse_cmdlet_args(["value1", "-count", "5"], cmdlet)
|
||||
# result = {"path": "value1", "count": "5"}
|
||||
"""
|
||||
result: Dict[str, Any] = {}
|
||||
result: Dict[str,
|
||||
Any] = {}
|
||||
|
||||
# Only accept Cmdlet objects
|
||||
if not isinstance(cmdlet_spec, Cmdlet):
|
||||
@@ -527,7 +561,8 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
flagged_args: List[CmdletArg] = [] # args with prefix in definition
|
||||
query_mapped_args: List[CmdletArg] = []
|
||||
|
||||
arg_spec_map: Dict[str, str] = {} # prefix variant -> canonical name (without prefix)
|
||||
arg_spec_map: Dict[str,
|
||||
str] = {} # prefix variant -> canonical name (without prefix)
|
||||
|
||||
for spec in arg_specs:
|
||||
name = spec.name
|
||||
@@ -572,7 +607,8 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
|
||||
# Legacy guidance: -hash/--hash was removed in favor of -query "hash:...".
|
||||
# However, some cmdlets may explicitly re-introduce a -hash flag.
|
||||
if token_lower in {"-hash", "--hash"} and token_lower not in arg_spec_map:
|
||||
if token_lower in {"-hash",
|
||||
"--hash"} and token_lower not in arg_spec_map:
|
||||
try:
|
||||
log(
|
||||
'Legacy flag -hash is no longer supported. Use: -query "hash:<sha256>"',
|
||||
@@ -587,7 +623,10 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
if token_lower in arg_spec_map:
|
||||
canonical_name = arg_spec_map[token_lower]
|
||||
spec = next(
|
||||
(s for s in arg_specs if str(s.name).lstrip("-").lower() == canonical_name.lower()),
|
||||
(
|
||||
s for s in arg_specs
|
||||
if str(s.name).lstrip("-").lower() == canonical_name.lower()
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
@@ -650,14 +689,18 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
|
||||
if query_mapped_args and raw_query is not None:
|
||||
try:
|
||||
from cli_syntax import parse_query as _parse_query
|
||||
from SYS.cli_syntax import parse_query as _parse_query
|
||||
|
||||
parsed_query = _parse_query(str(raw_query))
|
||||
fields = parsed_query.get("fields", {}) if isinstance(parsed_query, dict) else {}
|
||||
fields = parsed_query.get("fields",
|
||||
{}) if isinstance(parsed_query,
|
||||
dict) else {}
|
||||
norm_fields = (
|
||||
{str(k).strip().lower(): v for k, v in fields.items()}
|
||||
if isinstance(fields, dict)
|
||||
else {}
|
||||
{
|
||||
str(k).strip().lower(): v
|
||||
for k, v in fields.items()
|
||||
} if isinstance(fields,
|
||||
dict) else {}
|
||||
)
|
||||
except Exception:
|
||||
norm_fields = {}
|
||||
@@ -667,12 +710,15 @@ def parse_cmdlet_args(args: Sequence[str], cmdlet_spec: Dict[str, Any] | Cmdlet)
|
||||
if not canonical_name:
|
||||
continue
|
||||
# Do not override explicit flags.
|
||||
if canonical_name in result and result.get(canonical_name) not in (None, ""):
|
||||
if canonical_name in result and result.get(canonical_name) not in (None,
|
||||
""):
|
||||
continue
|
||||
try:
|
||||
key = str(getattr(spec, "query_key", "") or "").strip().lower()
|
||||
aliases = getattr(spec, "query_aliases", None)
|
||||
alias_list = [str(a).strip().lower() for a in (aliases or []) if str(a).strip()]
|
||||
alias_list = [
|
||||
str(a).strip().lower() for a in (aliases or []) if str(a).strip()
|
||||
]
|
||||
except Exception:
|
||||
key = ""
|
||||
alias_list = []
|
||||
@@ -761,7 +807,9 @@ def parse_single_hash_query(query: Optional[str]) -> Optional[str]:
|
||||
|
||||
|
||||
def get_hash_for_operation(
|
||||
override_hash: Optional[str], result: Any, field_name: str = "hash"
|
||||
override_hash: Optional[str],
|
||||
result: Any,
|
||||
field_name: str = "hash"
|
||||
) -> Optional[str]:
|
||||
"""Get normalized hash from override or result object, consolidating common pattern.
|
||||
|
||||
@@ -778,9 +826,12 @@ def get_hash_for_operation(
|
||||
if override_hash:
|
||||
return normalize_hash(override_hash)
|
||||
hash_value = (
|
||||
get_field(result, field_name)
|
||||
or getattr(result, field_name, None)
|
||||
or getattr(result, "hash", None)
|
||||
get_field(result,
|
||||
field_name) or getattr(result,
|
||||
field_name,
|
||||
None) or getattr(result,
|
||||
"hash",
|
||||
None)
|
||||
)
|
||||
return normalize_hash(hash_value)
|
||||
|
||||
@@ -792,7 +843,9 @@ def fetch_hydrus_metadata(
|
||||
store_name: Optional[str] = None,
|
||||
hydrus_client: Any = None,
|
||||
**kwargs,
|
||||
) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
||||
) -> tuple[Optional[Dict[str,
|
||||
Any]],
|
||||
Optional[int]]:
|
||||
"""Fetch metadata from Hydrus for a given hash, consolidating common fetch pattern.
|
||||
|
||||
Eliminates repeated boilerplate: client initialization, error handling, metadata extraction.
|
||||
@@ -850,7 +903,11 @@ def fetch_hydrus_metadata(
|
||||
return None, 1
|
||||
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
meta = items[0] if (isinstance(items, list) and items and isinstance(items[0], dict)) else None
|
||||
meta = items[0] if (
|
||||
isinstance(items,
|
||||
list) and items and isinstance(items[0],
|
||||
dict)
|
||||
) else None
|
||||
|
||||
return meta, 0
|
||||
|
||||
@@ -914,7 +971,14 @@ def should_show_help(args: Sequence[str]) -> bool:
|
||||
return 0
|
||||
"""
|
||||
try:
|
||||
return any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args)
|
||||
return any(
|
||||
str(a).lower() in {"-?",
|
||||
"/?",
|
||||
"--help",
|
||||
"-h",
|
||||
"help",
|
||||
"--cmdlet"} for a in args
|
||||
)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@@ -963,8 +1027,12 @@ def pipeline_item_local_path(item: Any) -> Optional[str]:
|
||||
|
||||
|
||||
def collect_relationship_labels(
|
||||
payload: Any, label_stack: List[str] | None = None, mapping: Dict[str, str] | None = None
|
||||
) -> Dict[str, str]:
|
||||
payload: Any,
|
||||
label_stack: List[str] | None = None,
|
||||
mapping: Dict[str,
|
||||
str] | None = None
|
||||
) -> Dict[str,
|
||||
str]:
|
||||
"""Recursively extract hash-to-label mappings from nested relationship data.
|
||||
|
||||
Walks through nested dicts/lists looking for sha256-like strings (64 hex chars)
|
||||
@@ -1149,7 +1217,8 @@ def _load_tag_groups() -> Dict[str, List[str]]:
|
||||
_TAG_GROUPS_MTIME = mtime
|
||||
return {}
|
||||
|
||||
groups: Dict[str, List[str]] = {}
|
||||
groups: Dict[str,
|
||||
List[str]] = {}
|
||||
if isinstance(payload, dict):
|
||||
for key, value in payload.items():
|
||||
if not isinstance(key, str):
|
||||
@@ -1167,7 +1236,8 @@ def _load_tag_groups() -> Dict[str, List[str]]:
|
||||
normalised = _normalise_tag_group_entry(value)
|
||||
if normalised:
|
||||
members.extend(
|
||||
token.strip() for token in normalised.split(",") if token.strip()
|
||||
token.strip() for token in normalised.split(",")
|
||||
if token.strip()
|
||||
)
|
||||
if members:
|
||||
groups[name] = members
|
||||
@@ -1201,19 +1271,24 @@ def expand_tag_groups(raw_tags: Iterable[str]) -> List[str]:
|
||||
candidate = token.strip()
|
||||
if not candidate:
|
||||
continue
|
||||
if candidate.startswith("{") and candidate.endswith("}") and len(candidate) > 2:
|
||||
if candidate.startswith("{") and candidate.endswith("}") and len(candidate
|
||||
) > 2:
|
||||
name = candidate[1:-1].strip().lower()
|
||||
if not name:
|
||||
continue
|
||||
if name in seen:
|
||||
log(f"Tag group recursion detected for {{{name}}}; skipping", file=sys.stderr)
|
||||
log(
|
||||
f"Tag group recursion detected for {{{name}}}; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
members = groups.get(name)
|
||||
if not members:
|
||||
log(f"Unknown tag group {{{name}}}", file=sys.stderr)
|
||||
result.append(candidate)
|
||||
continue
|
||||
result.extend(_expand(members, seen | {name}))
|
||||
result.extend(_expand(members,
|
||||
seen | {name}))
|
||||
else:
|
||||
result.append(candidate)
|
||||
return result
|
||||
@@ -1291,7 +1366,8 @@ def create_pipe_object_result(
|
||||
parent_hash: Optional[str] = None,
|
||||
tag: Optional[List[str]] = None,
|
||||
**extra: Any,
|
||||
) -> Dict[str, Any]:
|
||||
) -> Dict[str,
|
||||
Any]:
|
||||
"""Create a PipeObject-compatible result dict for pipeline chaining.
|
||||
|
||||
This is a helper to emit results in the standard format that downstream
|
||||
@@ -1395,7 +1471,8 @@ def _extract_flag_value(args: Sequence[str], *flags: str) -> Optional[str]:
|
||||
"""
|
||||
if not args:
|
||||
return None
|
||||
want = {str(f).strip().lower() for f in flags if str(f).strip()}
|
||||
want = {str(f).strip().lower()
|
||||
for f in flags if str(f).strip()}
|
||||
if not want:
|
||||
return None
|
||||
try:
|
||||
@@ -1499,7 +1576,9 @@ def apply_output_path_from_pipeobjects(
|
||||
try:
|
||||
dest_str = str(dest_raw).strip()
|
||||
if "://" in dest_str:
|
||||
_print_live_safe_stderr(f"Ignoring -path value that looks like a URL: {dest_str}")
|
||||
_print_live_safe_stderr(
|
||||
f"Ignoring -path value that looks like a URL: {dest_str}"
|
||||
)
|
||||
return list(emits or [])
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1564,7 +1643,9 @@ def apply_output_path_from_pipeobjects(
|
||||
try:
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc:
|
||||
_print_live_safe_stderr(f"Failed to create destination directory: {dest_dir} ({exc})")
|
||||
_print_live_safe_stderr(
|
||||
f"Failed to create destination directory: {dest_dir} ({exc})"
|
||||
)
|
||||
return items
|
||||
|
||||
for idx, src in zip(artifact_indices, artifact_paths):
|
||||
@@ -1572,7 +1653,11 @@ def apply_output_path_from_pipeobjects(
|
||||
final = _unique_destination_path(final)
|
||||
try:
|
||||
if src.resolve() == final.resolve():
|
||||
_apply_saved_path_update(items[idx], old_path=str(src), new_path=str(final))
|
||||
_apply_saved_path_update(
|
||||
items[idx],
|
||||
old_path=str(src),
|
||||
new_path=str(final)
|
||||
)
|
||||
_print_saved_output_panel(items[idx], final)
|
||||
continue
|
||||
except Exception:
|
||||
@@ -1602,7 +1687,9 @@ def apply_output_path_from_pipeobjects(
|
||||
try:
|
||||
final.parent.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc:
|
||||
_print_live_safe_stderr(f"Failed to create destination directory: {final.parent} ({exc})")
|
||||
_print_live_safe_stderr(
|
||||
f"Failed to create destination directory: {final.parent} ({exc})"
|
||||
)
|
||||
return items
|
||||
|
||||
final = _unique_destination_path(final)
|
||||
@@ -1667,7 +1754,9 @@ def _print_saved_output_panel(item: Any, final_path: Path) -> None:
|
||||
|
||||
file_hash = ""
|
||||
try:
|
||||
file_hash = str(get_field(item, "hash") or get_field(item, "sha256") or "").strip()
|
||||
file_hash = str(get_field(item,
|
||||
"hash") or get_field(item,
|
||||
"sha256") or "").strip()
|
||||
except Exception:
|
||||
file_hash = ""
|
||||
if not file_hash:
|
||||
@@ -1742,13 +1831,15 @@ def get_pipe_object_hash(pipe_object: Any) -> Optional[str]:
|
||||
"""Extract file hash from PipeObject, dict, or pipeline-friendly object."""
|
||||
if pipe_object is None:
|
||||
return None
|
||||
for attr in ("hash",):
|
||||
for attr in ("hash",
|
||||
):
|
||||
if hasattr(pipe_object, attr):
|
||||
value = getattr(pipe_object, attr)
|
||||
if value:
|
||||
return value
|
||||
if isinstance(pipe_object, dict):
|
||||
for key in ("hash",):
|
||||
for key in ("hash",
|
||||
):
|
||||
value = pipe_object.get(key)
|
||||
if value:
|
||||
return value
|
||||
@@ -1832,7 +1923,8 @@ def filter_results_by_temp(results: List[Any], include_temp: bool = False) -> Li
|
||||
return filtered
|
||||
|
||||
|
||||
def merge_sequences(*sources: Optional[Iterable[Any]], case_sensitive: bool = True) -> list[str]:
|
||||
def merge_sequences(*sources: Optional[Iterable[Any]],
|
||||
case_sensitive: bool = True) -> list[str]:
|
||||
"""Merge iterable sources while preserving order and removing duplicates."""
|
||||
seen: set[str] = set()
|
||||
merged: list[str] = []
|
||||
@@ -1858,7 +1950,9 @@ def merge_sequences(*sources: Optional[Iterable[Any]], case_sensitive: bool = Tr
|
||||
|
||||
|
||||
def collapse_namespace_tags(
|
||||
tags: Optional[Iterable[Any]], namespace: str, prefer: str = "last"
|
||||
tags: Optional[Iterable[Any]],
|
||||
namespace: str,
|
||||
prefer: str = "last"
|
||||
) -> list[str]:
|
||||
"""Reduce tags so only one entry for a given namespace remains.
|
||||
|
||||
@@ -1901,7 +1995,9 @@ def collapse_namespace_tags(
|
||||
|
||||
|
||||
def collapse_namespace_tag(
|
||||
tags: Optional[Iterable[Any]], namespace: str, prefer: str = "last"
|
||||
tags: Optional[Iterable[Any]],
|
||||
namespace: str,
|
||||
prefer: str = "last"
|
||||
) -> list[str]:
|
||||
"""Singular alias for collapse_namespace_tags.
|
||||
|
||||
@@ -2020,7 +2116,10 @@ def extract_duration(result: Any) -> Optional[float]:
|
||||
return None
|
||||
|
||||
|
||||
def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> models.PipeObject:
|
||||
def coerce_to_pipe_object(
|
||||
value: Any,
|
||||
default_path: Optional[str] = None
|
||||
) -> models.PipeObject:
|
||||
"""Normalize any incoming result to a PipeObject for single-source-of-truth state.
|
||||
|
||||
Uses hash+store canonical pattern.
|
||||
@@ -2029,11 +2128,9 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
try:
|
||||
from SYS.logger import is_debug_enabled, debug
|
||||
|
||||
if (
|
||||
is_debug_enabled()
|
||||
and hasattr(value, "__class__")
|
||||
and value.__class__.__name__ == "ResultItem"
|
||||
):
|
||||
if (is_debug_enabled() and hasattr(value,
|
||||
"__class__")
|
||||
and value.__class__.__name__ == "ResultItem"):
|
||||
debug("[ResultItem -> PipeObject conversion]")
|
||||
debug(f" title={getattr(value, 'title', None)}")
|
||||
debug(f" target={getattr(value, 'target', None)}")
|
||||
@@ -2081,29 +2178,30 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
value = value.to_dict()
|
||||
elif not isinstance(value, dict):
|
||||
try:
|
||||
obj_map: Dict[str, Any] = {}
|
||||
obj_map: Dict[str,
|
||||
Any] = {}
|
||||
for k in (
|
||||
"hash",
|
||||
"store",
|
||||
"provider",
|
||||
"prov",
|
||||
"tag",
|
||||
"title",
|
||||
"url",
|
||||
"source_url",
|
||||
"duration",
|
||||
"duration_seconds",
|
||||
"metadata",
|
||||
"full_metadata",
|
||||
"warnings",
|
||||
"path",
|
||||
"target",
|
||||
"relationships",
|
||||
"is_temp",
|
||||
"action",
|
||||
"parent_hash",
|
||||
"extra",
|
||||
"media_kind",
|
||||
"hash",
|
||||
"store",
|
||||
"provider",
|
||||
"prov",
|
||||
"tag",
|
||||
"title",
|
||||
"url",
|
||||
"source_url",
|
||||
"duration",
|
||||
"duration_seconds",
|
||||
"metadata",
|
||||
"full_metadata",
|
||||
"warnings",
|
||||
"path",
|
||||
"target",
|
||||
"relationships",
|
||||
"is_temp",
|
||||
"action",
|
||||
"parent_hash",
|
||||
"extra",
|
||||
"media_kind",
|
||||
):
|
||||
if hasattr(value, k):
|
||||
obj_map[k] = getattr(value, k)
|
||||
@@ -2118,7 +2216,8 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
store_val = value.get("store") or "PATH"
|
||||
if not store_val or store_val == "PATH":
|
||||
try:
|
||||
extra_store = value.get("extra", {}).get("store")
|
||||
extra_store = value.get("extra",
|
||||
{}).get("store")
|
||||
except Exception:
|
||||
extra_store = None
|
||||
if extra_store:
|
||||
@@ -2150,7 +2249,10 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
extra = {k: v for k, v in value.items() if k not in known_keys}
|
||||
extra = {
|
||||
k: v
|
||||
for k, v in value.items() if k not in known_keys
|
||||
}
|
||||
|
||||
# Extract URL: prefer direct url field, then url list
|
||||
from metadata import normalize_urls
|
||||
@@ -2177,17 +2279,16 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
# Only use target as path if it's not a URL (url should stay in url field)
|
||||
if not path_val and "target" in value:
|
||||
target = value["target"]
|
||||
if target and not (
|
||||
isinstance(target, str)
|
||||
and (target.startswith("http://") or target.startswith("https://"))
|
||||
):
|
||||
if target and not (isinstance(target,
|
||||
str) and (target.startswith("http://")
|
||||
or target.startswith("https://"))):
|
||||
path_val = target
|
||||
|
||||
# If the path value is actually a URL, move it to url_val and clear path_val
|
||||
try:
|
||||
if isinstance(path_val, str) and (
|
||||
path_val.startswith("http://") or path_val.startswith("https://")
|
||||
):
|
||||
if isinstance(path_val,
|
||||
str) and (path_val.startswith("http://")
|
||||
or path_val.startswith("https://")):
|
||||
# Prefer existing url_val if present, otherwise move path_val into url_val
|
||||
if not url_val:
|
||||
url_val = path_val
|
||||
@@ -2203,9 +2304,9 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
hash=hash_val,
|
||||
store=store_val,
|
||||
provider=str(
|
||||
value.get("provider") or value.get("prov") or extra.get("provider") or ""
|
||||
).strip()
|
||||
or None,
|
||||
value.get("provider") or value.get("prov") or extra.get("provider")
|
||||
or ""
|
||||
).strip() or None,
|
||||
tag=tag_val,
|
||||
title=title_val,
|
||||
url=url_val,
|
||||
@@ -2215,7 +2316,8 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
warnings=list(value.get("warnings") or []),
|
||||
path=path_val,
|
||||
relationships=rels,
|
||||
is_temp=bool(value.get("is_temp", False)),
|
||||
is_temp=bool(value.get("is_temp",
|
||||
False)),
|
||||
action=value.get("action"),
|
||||
parent_hash=value.get("parent_hash"),
|
||||
extra=extra,
|
||||
@@ -2270,7 +2372,11 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
return pipe_obj
|
||||
|
||||
|
||||
def register_url_with_local_library(pipe_obj: models.PipeObject, config: Dict[str, Any]) -> bool:
|
||||
def register_url_with_local_library(
|
||||
pipe_obj: models.PipeObject,
|
||||
config: Dict[str,
|
||||
Any]
|
||||
) -> bool:
|
||||
"""Register url with a file in the local library database.
|
||||
|
||||
This is called automatically by download cmdlet to ensure url are persisted
|
||||
@@ -2285,7 +2391,7 @@ def register_url_with_local_library(pipe_obj: models.PipeObject, config: Dict[st
|
||||
"""
|
||||
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
from API.folder import API_folder_store
|
||||
|
||||
file_path = get_field(pipe_obj, "path")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,11 +23,13 @@ from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Add_Note(Cmdlet):
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="add-note",
|
||||
summary="Add file store note",
|
||||
usage='add-note (-query "title:<title>,text:<text>[,store:<store>][,hash:<sha256>]") [ -store <store> | <piped> ]',
|
||||
usage=
|
||||
'add-note (-query "title:<title>,text:<text>[,store:<store>][,hash:<sha256>]") [ -store <store> | <piped> ]',
|
||||
alias=[""],
|
||||
arg=[
|
||||
SharedArgs.STORE,
|
||||
@@ -38,16 +40,15 @@ class Add_Note(Cmdlet):
|
||||
type="string",
|
||||
required=False,
|
||||
handler=normalize_hash,
|
||||
description="(Optional) Specific file hash target, provided via -query as hash:<sha256>. When omitted, uses piped item hash.",
|
||||
description=
|
||||
"(Optional) Specific file hash target, provided via -query as hash:<sha256>. When omitted, uses piped item hash.",
|
||||
query_only=True,
|
||||
),
|
||||
SharedArgs.QUERY,
|
||||
],
|
||||
detail=[
|
||||
"""
|
||||
detail=["""
|
||||
dde
|
||||
"""
|
||||
],
|
||||
"""],
|
||||
exec=self.run,
|
||||
)
|
||||
# Populate dynamic store choices for autocomplete
|
||||
@@ -97,7 +98,7 @@ class Add_Note(Cmdlet):
|
||||
return None, None
|
||||
|
||||
try:
|
||||
from cli_syntax import parse_query, get_field
|
||||
from SYS.cli_syntax import parse_query, get_field
|
||||
except Exception:
|
||||
parse_query = None # type: ignore
|
||||
get_field = None # type: ignore
|
||||
@@ -113,16 +114,24 @@ class Add_Note(Cmdlet):
|
||||
return (name_s or None, text_s or None)
|
||||
|
||||
# Fallback: best-effort regex.
|
||||
name_match = re.search(r"\btitle\s*:\s*([^,\s]+)", normalized, flags=re.IGNORECASE)
|
||||
name_match = re.search(
|
||||
r"\btitle\s*:\s*([^,\s]+)",
|
||||
normalized,
|
||||
flags=re.IGNORECASE
|
||||
)
|
||||
text_match = re.search(r"\btext\s*:\s*(.+)$", normalized, flags=re.IGNORECASE)
|
||||
note_name = name_match.group(1).strip() if name_match else ""
|
||||
note_text = text_match.group(1).strip() if text_match else ""
|
||||
return (note_name or None, note_text or None)
|
||||
|
||||
def _resolve_hash(
|
||||
self, raw_hash: Optional[str], raw_path: Optional[str], override_hash: Optional[str]
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str]
|
||||
) -> Optional[str]:
|
||||
resolved = normalize_hash(override_hash) if override_hash else normalize_hash(raw_hash)
|
||||
resolved = normalize_hash(override_hash
|
||||
) if override_hash else normalize_hash(raw_hash)
|
||||
if resolved:
|
||||
return resolved
|
||||
|
||||
@@ -130,7 +139,8 @@ class Add_Note(Cmdlet):
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef"
|
||||
for c in stem.lower()):
|
||||
return stem.lower()
|
||||
if p.exists() and p.is_file():
|
||||
return sha256_file(p)
|
||||
@@ -171,10 +181,18 @@ class Add_Note(Cmdlet):
|
||||
try:
|
||||
store_registry = Store(config)
|
||||
backend = store_registry[str(store_override)]
|
||||
ok = bool(backend.set_note(str(hash_override), note_name, note_text, config=config))
|
||||
ok = bool(
|
||||
backend.set_note(
|
||||
str(hash_override),
|
||||
note_name,
|
||||
note_text,
|
||||
config=config
|
||||
)
|
||||
)
|
||||
if ok:
|
||||
ctx.print_if_visible(
|
||||
f"✓ add-note: 1 item in '{store_override}'", file=sys.stderr
|
||||
f"✓ add-note: 1 item in '{store_override}'",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"[add_note] Error: Failed to set note: {exc}", file=sys.stderr)
|
||||
@@ -187,7 +205,10 @@ class Add_Note(Cmdlet):
|
||||
if not results:
|
||||
if explicit_target:
|
||||
# Allow standalone use (no piped input) and enable piping the target forward.
|
||||
results = [{"store": str(store_override), "hash": hash_override}]
|
||||
results = [{
|
||||
"store": str(store_override),
|
||||
"hash": hash_override
|
||||
}]
|
||||
else:
|
||||
log(
|
||||
'[add_note] Error: Requires piped item(s) from add-file, or explicit targeting via store/hash (e.g., -query "store:<store> hash:<sha256> ...")',
|
||||
@@ -199,7 +220,10 @@ class Add_Note(Cmdlet):
|
||||
updated = 0
|
||||
|
||||
# Batch write plan: store -> [(hash, name, text), ...]
|
||||
note_ops: Dict[str, List[Tuple[str, str, str]]] = {}
|
||||
note_ops: Dict[str,
|
||||
List[Tuple[str,
|
||||
str,
|
||||
str]]] = {}
|
||||
|
||||
for res in results:
|
||||
if not isinstance(res, dict):
|
||||
@@ -213,7 +237,10 @@ class Add_Note(Cmdlet):
|
||||
raw_path = res.get("path")
|
||||
|
||||
if not store_name:
|
||||
log("[add_note] Error: Missing -store and item has no store field", file=sys.stderr)
|
||||
log(
|
||||
"[add_note] Error: Missing -store and item has no store field",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
resolved_hash = self._resolve_hash(
|
||||
@@ -222,19 +249,28 @@ class Add_Note(Cmdlet):
|
||||
override_hash=str(hash_override) if hash_override else None,
|
||||
)
|
||||
if not resolved_hash:
|
||||
log("[add_note] Warning: Item missing usable hash; skipping", file=sys.stderr)
|
||||
log(
|
||||
"[add_note] Warning: Item missing usable hash; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
except Exception as exc:
|
||||
log(f"[add_note] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"[add_note] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# Queue for bulk write per store. We still emit items immediately;
|
||||
# the pipeline only advances after this cmdlet returns.
|
||||
note_ops.setdefault(store_name, []).append((resolved_hash, note_name, item_note_text))
|
||||
note_ops.setdefault(store_name,
|
||||
[]).append((resolved_hash,
|
||||
note_name,
|
||||
item_note_text))
|
||||
updated += 1
|
||||
|
||||
ctx.emit(res)
|
||||
@@ -255,7 +291,8 @@ class Add_Note(Cmdlet):
|
||||
ok = bool(bulk_fn(list(ops), config=config))
|
||||
wrote_any = wrote_any or ok or True
|
||||
ctx.print_if_visible(
|
||||
f"✓ add-note: {len(ops)} item(s) in '{store_name}'", file=sys.stderr
|
||||
f"✓ add-note: {len(ops)} item(s) in '{store_name}'",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
except Exception as exc:
|
||||
|
||||
@@ -23,11 +23,12 @@ get_field = sh.get_field
|
||||
from API.folder import read_sidecar, find_sidecar, API_folder_store
|
||||
from Store import Store
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="add-relationship",
|
||||
summary="Associate file relationships (king/alt/related) in Hydrus based on relationship tags in sidecar.",
|
||||
usage="@1-3 | add-relationship -king @4 OR add-relationship -path <file> OR @1,@2,@3 | add-relationship",
|
||||
summary=
|
||||
"Associate file relationships (king/alt/related) in Hydrus based on relationship tags in sidecar.",
|
||||
usage=
|
||||
"@1-3 | add-relationship -king @4 OR add-relationship -path <file> OR @1,@2,@3 | add-relationship",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
"path",
|
||||
@@ -39,17 +40,20 @@ CMDLET = Cmdlet(
|
||||
CmdletArg(
|
||||
"-king",
|
||||
type="string",
|
||||
description="Explicitly set the king hash/file for relationships (e.g., -king @4 or -king hash)",
|
||||
description=
|
||||
"Explicitly set the king hash/file for relationships (e.g., -king @4 or -king hash)",
|
||||
),
|
||||
CmdletArg(
|
||||
"-alt",
|
||||
type="string",
|
||||
description="Explicitly select alt item(s) by @ selection or hash list (e.g., -alt @3-5 or -alt <hash>,<hash>)",
|
||||
description=
|
||||
"Explicitly select alt item(s) by @ selection or hash list (e.g., -alt @3-5 or -alt <hash>,<hash>)",
|
||||
),
|
||||
CmdletArg(
|
||||
"-type",
|
||||
type="string",
|
||||
description="Relationship type for piped items (default: 'alt', options: 'king', 'alt', 'related')",
|
||||
description=
|
||||
"Relationship type for piped items (default: 'alt', options: 'king', 'alt', 'related')",
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
@@ -84,7 +88,8 @@ def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]:
|
||||
|
||||
Returns a dict like {"king": ["HASH1"], "alt": ["HASH2"], ...}
|
||||
"""
|
||||
result: Dict[str, list[str]] = {}
|
||||
result: Dict[str,
|
||||
list[str]] = {}
|
||||
if not isinstance(tag_value, str):
|
||||
return result
|
||||
|
||||
@@ -126,7 +131,8 @@ def _apply_relationships_from_tags(
|
||||
hydrus_client: Any,
|
||||
use_local_storage: bool,
|
||||
local_storage_path: Optional[Path],
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> int:
|
||||
"""Persist relationship tags into Hydrus or local DB.
|
||||
|
||||
@@ -135,8 +141,7 @@ def _apply_relationships_from_tags(
|
||||
- Store directional alt -> king relationships (no reverse edge).
|
||||
"""
|
||||
rel_tags = [
|
||||
t
|
||||
for t in relationship_tags
|
||||
t for t in relationship_tags
|
||||
if isinstance(t, str) and t.strip().lower().startswith("relationship:")
|
||||
]
|
||||
if not rel_tags:
|
||||
@@ -196,7 +201,12 @@ def _apply_relationships_from_tags(
|
||||
continue
|
||||
if (alt_norm, king_norm) in processed_pairs:
|
||||
continue
|
||||
db.set_relationship_by_hash(alt_norm, king_norm, "alt", bidirectional=False)
|
||||
db.set_relationship_by_hash(
|
||||
alt_norm,
|
||||
king_norm,
|
||||
"alt",
|
||||
bidirectional=False
|
||||
)
|
||||
processed_pairs.add((alt_norm, king_norm))
|
||||
except Exception:
|
||||
return 1
|
||||
@@ -270,7 +280,10 @@ def _resolve_items_from_at(token: str) -> Optional[list[Any]]:
|
||||
def _extract_hash_and_store(item: Any) -> tuple[Optional[str], Optional[str]]:
|
||||
"""Extract (hash_hex, store) from a result item (dict/object)."""
|
||||
try:
|
||||
h = get_field(item, "hash_hex") or get_field(item, "hash") or get_field(item, "file_hash")
|
||||
h = get_field(item,
|
||||
"hash_hex") or get_field(item,
|
||||
"hash") or get_field(item,
|
||||
"file_hash")
|
||||
s = get_field(item, "store")
|
||||
|
||||
hash_norm = _normalise_hash_hex(str(h) if h is not None else None)
|
||||
@@ -336,7 +349,10 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]:
|
||||
|
||||
item = selected[0]
|
||||
item_hash = (
|
||||
get_field(item, "hash_hex") or get_field(item, "hash") or get_field(item, "file_hash")
|
||||
get_field(item,
|
||||
"hash_hex") or get_field(item,
|
||||
"hash") or get_field(item,
|
||||
"file_hash")
|
||||
)
|
||||
|
||||
if item_hash:
|
||||
@@ -354,7 +370,8 @@ def _refresh_relationship_view_if_current(
|
||||
target_hash: Optional[str],
|
||||
target_path: Optional[str],
|
||||
other: Optional[str],
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> None:
|
||||
"""If the current subject matches the target, refresh relationships via get-relationship."""
|
||||
try:
|
||||
@@ -385,29 +402,29 @@ def _refresh_relationship_view_if_current(
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [
|
||||
norm(v)
|
||||
for v in [
|
||||
norm(v) for v in [
|
||||
subject.get("hydrus_hash"),
|
||||
subject.get("hash"),
|
||||
subject.get("hash_hex"),
|
||||
subject.get("file_hash"),
|
||||
]
|
||||
if v
|
||||
subject.get("file_hash"), ] if v
|
||||
]
|
||||
subj_paths = [
|
||||
norm(v)
|
||||
for v in [subject.get("file_path"), subject.get("path"), subject.get("target")]
|
||||
norm(v) for v in
|
||||
[subject.get("file_path"), subject.get("path"), subject.get("target")]
|
||||
if v
|
||||
]
|
||||
else:
|
||||
subj_hashes = [
|
||||
norm(getattr(subject, f, None))
|
||||
norm(getattr(subject,
|
||||
f,
|
||||
None))
|
||||
for f in ("hydrus_hash", "hash", "hash_hex", "file_hash")
|
||||
if getattr(subject, f, None)
|
||||
]
|
||||
subj_paths = [
|
||||
norm(getattr(subject, f, None))
|
||||
for f in ("file_path", "path", "target")
|
||||
norm(getattr(subject,
|
||||
f,
|
||||
None)) for f in ("file_path", "path", "target")
|
||||
if getattr(subject, f, None)
|
||||
]
|
||||
|
||||
@@ -472,12 +489,17 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if alt_text.startswith("@"):
|
||||
selected = _resolve_items_from_at(alt_text)
|
||||
if not selected:
|
||||
log(f"Failed to resolve -alt {alt_text}: no selection context", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to resolve -alt {alt_text}: no selection context",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
resolved_alt_items = selected
|
||||
else:
|
||||
# Treat as comma/semicolon-separated list of hashes
|
||||
parts = [p.strip() for p in alt_text.replace(";", ",").split(",") if p.strip()]
|
||||
parts = [
|
||||
p.strip() for p in alt_text.replace(";", ",").split(",") if p.strip()
|
||||
]
|
||||
hashes = [h for h in (_normalise_hash_hex(p) for p in parts) if h]
|
||||
if not hashes:
|
||||
log(
|
||||
@@ -486,25 +508,46 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
)
|
||||
return 1
|
||||
if not override_store:
|
||||
log("-store is required when using -alt with a raw hash list", file=sys.stderr)
|
||||
log(
|
||||
"-store is required when using -alt with a raw hash list",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
resolved_alt_items = [{"hash": h, "store": str(override_store)} for h in hashes]
|
||||
resolved_alt_items = [
|
||||
{
|
||||
"hash": h,
|
||||
"store": str(override_store)
|
||||
} for h in hashes
|
||||
]
|
||||
items_to_process = normalize_result_input(resolved_alt_items)
|
||||
|
||||
# Allow explicit store/hash-first operation via -query "hash:<sha256>" (supports multiple hash: tokens)
|
||||
if (not items_to_process) and override_hashes:
|
||||
if not override_store:
|
||||
log("-store is required when using -query without piped items", file=sys.stderr)
|
||||
log(
|
||||
"-store is required when using -query without piped items",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
items_to_process = [{"hash": h, "store": str(override_store)} for h in override_hashes]
|
||||
items_to_process = [
|
||||
{
|
||||
"hash": h,
|
||||
"store": str(override_store)
|
||||
} for h in override_hashes
|
||||
]
|
||||
|
||||
if not items_to_process and not arg_path:
|
||||
log("No items provided to add-relationship (no piped result and no -path)", file=sys.stderr)
|
||||
log(
|
||||
"No items provided to add-relationship (no piped result and no -path)",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# If no items from pipeline, just process the -path arg
|
||||
if not items_to_process and arg_path:
|
||||
items_to_process = [{"file_path": arg_path}]
|
||||
items_to_process = [{
|
||||
"file_path": arg_path
|
||||
}]
|
||||
|
||||
# Resolve the king reference once (if provided)
|
||||
king_hash: Optional[str] = None
|
||||
@@ -514,7 +557,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if king_text.startswith("@"):
|
||||
selected = _resolve_items_from_at(king_text)
|
||||
if not selected:
|
||||
log(f"Cannot resolve {king_text}: no selection context", file=sys.stderr)
|
||||
log(
|
||||
f"Cannot resolve {king_text}: no selection context",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
if len(selected) != 1:
|
||||
log(
|
||||
@@ -605,7 +651,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Sidecar/tag import fallback DB root (legacy): if a folder store is selected, use it;
|
||||
# otherwise fall back to configured local storage path.
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
|
||||
local_storage_root: Optional[Path] = None
|
||||
if store_root is not None:
|
||||
@@ -629,8 +675,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if sidecar_path is not None and sidecar_path.exists():
|
||||
_, tags, _ = read_sidecar(sidecar_path)
|
||||
relationship_tags = [
|
||||
t
|
||||
for t in (tags or [])
|
||||
t for t in (tags or [])
|
||||
if isinstance(t, str) and t.lower().startswith("relationship:")
|
||||
]
|
||||
if relationship_tags:
|
||||
@@ -657,12 +702,12 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if isinstance(tags_val, list):
|
||||
rel_tags_from_pipe.extend(
|
||||
[
|
||||
t
|
||||
for t in tags_val
|
||||
t for t in tags_val
|
||||
if isinstance(t, str) and t.lower().startswith("relationship:")
|
||||
]
|
||||
)
|
||||
elif isinstance(tags_val, str) and tags_val.lower().startswith("relationship:"):
|
||||
elif isinstance(tags_val,
|
||||
str) and tags_val.lower().startswith("relationship:"):
|
||||
rel_tags_from_pipe.append(tags_val)
|
||||
|
||||
if rel_tags_from_pipe:
|
||||
@@ -686,7 +731,8 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
first_hash = None
|
||||
for item in items_to_process:
|
||||
h, item_store = _extract_hash_and_store(item)
|
||||
if item_store and store_name and str(item_store) != str(store_name):
|
||||
if item_store and store_name and str(item_store) != str(
|
||||
store_name):
|
||||
log(
|
||||
f"Cross-store relationship blocked: item store '{item_store}' != '{store_name}'",
|
||||
file=sys.stderr,
|
||||
@@ -700,7 +746,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# directional alt -> king by default for local DB
|
||||
bidirectional = str(rel_type).lower() != "alt"
|
||||
db.set_relationship_by_hash(
|
||||
h, first_hash, str(rel_type), bidirectional=bidirectional
|
||||
h,
|
||||
first_hash,
|
||||
str(rel_type),
|
||||
bidirectional=bidirectional
|
||||
)
|
||||
return 0
|
||||
|
||||
@@ -717,7 +766,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
bidirectional = str(rel_type).lower() != "alt"
|
||||
db.set_relationship_by_hash(
|
||||
h, king_hash, str(rel_type), bidirectional=bidirectional
|
||||
h,
|
||||
king_hash,
|
||||
str(rel_type),
|
||||
bidirectional=bidirectional
|
||||
)
|
||||
return 0
|
||||
except Exception as exc:
|
||||
@@ -798,15 +850,21 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
if isinstance(item, dict):
|
||||
file_hash = item.get("hash_hex") or item.get("hash")
|
||||
file_path_from_result = item.get("file_path") or item.get("path") or item.get("target")
|
||||
file_path_from_result = item.get("file_path") or item.get(
|
||||
"path"
|
||||
) or item.get("target")
|
||||
else:
|
||||
file_hash = getattr(item, "hash_hex", None) or getattr(item, "hash", None)
|
||||
file_path_from_result = getattr(item, "file_path", None) or getattr(item, "path", None)
|
||||
file_path_from_result = getattr(item,
|
||||
"file_path",
|
||||
None) or getattr(item,
|
||||
"path",
|
||||
None)
|
||||
|
||||
# Legacy LOCAL STORAGE MODE: Handle relationships for local files
|
||||
# (kept for -path sidecar workflows; store/hash mode above is preferred)
|
||||
from API.folder import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
|
||||
local_storage_path = get_local_storage_path(config) if config else None
|
||||
use_local_storage = bool(local_storage_path)
|
||||
@@ -847,20 +905,27 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
king_file_path = opt.db.search_hash(normalized_king)
|
||||
if not king_file_path:
|
||||
log(
|
||||
f"King hash not found in local DB: {king_hash}", file=sys.stderr
|
||||
f"King hash not found in local DB: {king_hash}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
bidirectional = str(rel_type).lower() != "alt"
|
||||
opt.db.set_relationship(
|
||||
file_path_obj, king_file_path, rel_type, bidirectional=bidirectional
|
||||
file_path_obj,
|
||||
king_file_path,
|
||||
rel_type,
|
||||
bidirectional=bidirectional
|
||||
)
|
||||
log(
|
||||
f"Set {rel_type} relationship: {file_path_obj.name} -> {king_file_path.name}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
_refresh_relationship_view_if_current(
|
||||
None, str(file_path_obj), str(king_file_path), config
|
||||
None,
|
||||
str(file_path_obj),
|
||||
str(king_file_path),
|
||||
config
|
||||
)
|
||||
else:
|
||||
# Original behavior: first becomes king, rest become alts
|
||||
@@ -871,7 +936,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
if not king_path:
|
||||
try:
|
||||
ctx.store_value("relationship_king_path", str(file_path_obj))
|
||||
ctx.store_value(
|
||||
"relationship_king_path",
|
||||
str(file_path_obj)
|
||||
)
|
||||
log(
|
||||
f"Established king file: {file_path_obj.name}",
|
||||
file=sys.stderr,
|
||||
@@ -893,7 +961,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file=sys.stderr,
|
||||
)
|
||||
_refresh_relationship_view_if_current(
|
||||
None, str(file_path_obj), str(king_path), config
|
||||
None,
|
||||
str(file_path_obj),
|
||||
str(king_path),
|
||||
config
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Local storage error: {exc}", file=sys.stderr)
|
||||
@@ -902,7 +973,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# PIPELINE MODE with Hydrus: Track relationships using hash
|
||||
if file_hash and hydrus_client:
|
||||
file_hash = _normalise_hash_hex(str(file_hash) if file_hash is not None else None)
|
||||
file_hash = _normalise_hash_hex(
|
||||
str(file_hash) if file_hash is not None else None
|
||||
)
|
||||
if not file_hash:
|
||||
log("Invalid file hash format", file=sys.stderr)
|
||||
return 1
|
||||
@@ -917,7 +990,8 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
)
|
||||
_refresh_relationship_view_if_current(
|
||||
file_hash,
|
||||
str(file_path_from_result) if file_path_from_result is not None else None,
|
||||
str(file_path_from_result)
|
||||
if file_path_from_result is not None else None,
|
||||
king_hash,
|
||||
config,
|
||||
)
|
||||
@@ -943,7 +1017,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# If we already have a king and this is a different hash, link them
|
||||
if existing_king and existing_king != file_hash:
|
||||
try:
|
||||
hydrus_client.set_relationship(file_hash, existing_king, rel_type)
|
||||
hydrus_client.set_relationship(
|
||||
file_hash,
|
||||
existing_king,
|
||||
rel_type
|
||||
)
|
||||
log(
|
||||
f"[add-relationship] Set {rel_type} relationship: {file_hash} <-> {existing_king}",
|
||||
file=sys.stderr,
|
||||
@@ -952,8 +1030,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file_hash,
|
||||
(
|
||||
str(file_path_from_result)
|
||||
if file_path_from_result is not None
|
||||
else None
|
||||
if file_path_from_result is not None else None
|
||||
),
|
||||
existing_king,
|
||||
config,
|
||||
@@ -975,7 +1052,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Resolve media path from -path arg or result target
|
||||
target = getattr(result, "target", None) or getattr(result, "path", None)
|
||||
media_path = (
|
||||
arg_path if arg_path is not None else Path(str(target)) if isinstance(target, str) else None
|
||||
arg_path
|
||||
if arg_path is not None else Path(str(target)) if isinstance(target,
|
||||
str) else None
|
||||
)
|
||||
if media_path is None:
|
||||
log("Provide -path <file> or pipe a local file result", file=sys.stderr)
|
||||
@@ -1055,7 +1134,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
|
||||
try:
|
||||
hydrus_client.set_relationship(file_hash, related_hash, rel_type)
|
||||
hydrus_client.set_relationship(
|
||||
file_hash,
|
||||
related_hash,
|
||||
rel_type
|
||||
)
|
||||
log(
|
||||
f"[add-relationship] Set {rel_type} relationship: "
|
||||
f"{file_hash} <-> {related_hash}",
|
||||
@@ -1063,7 +1146,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
)
|
||||
success_count += 1
|
||||
except Exception as exc:
|
||||
log(f"Failed to set {rel_type} relationship: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to set {rel_type} relationship: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
error_count += 1
|
||||
|
||||
except Exception as exc:
|
||||
@@ -1075,7 +1161,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
f"Successfully set {success_count} relationship(s) for {media_path.name}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
ctx.emit(f"add-relationship: {media_path.name} ({success_count} relationships set)")
|
||||
ctx.emit(
|
||||
f"add-relationship: {media_path.name} ({success_count} relationships set)"
|
||||
)
|
||||
return 0
|
||||
elif error_count == 0:
|
||||
log(f"No relationships to set", file=sys.stderr)
|
||||
|
||||
@@ -26,7 +26,6 @@ get_field = sh.get_field
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
_FIELD_NAME_RE = re.compile(r"^[A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@@ -106,13 +105,15 @@ def _compile_extract_template(template: str) -> tuple[re.Pattern[str], List[str]
|
||||
last_end = 0
|
||||
|
||||
for idx, m in enumerate(matches):
|
||||
literal = tpl[last_end : m.start()]
|
||||
literal = tpl[last_end:m.start()]
|
||||
if literal:
|
||||
parts.append(_literal_to_title_pattern_regex(literal))
|
||||
|
||||
raw_name = (m.group(1) or "").strip()
|
||||
if not raw_name or not _FIELD_NAME_RE.fullmatch(raw_name):
|
||||
raise ValueError(f"invalid field name '{raw_name}' (use A-Z, 0-9, underscore)")
|
||||
raise ValueError(
|
||||
f"invalid field name '{raw_name}' (use A-Z, 0-9, underscore)"
|
||||
)
|
||||
field_names.append(raw_name)
|
||||
|
||||
name_lower = raw_name.lower()
|
||||
@@ -123,15 +124,15 @@ def _compile_extract_template(template: str) -> tuple[re.Pattern[str], List[str]
|
||||
# Heuristic: common numeric fields should capture full digit runs.
|
||||
# This avoids ambiguous splits like track='2', title='3 ...'.
|
||||
if name_lower in {
|
||||
"disk",
|
||||
"disc",
|
||||
"cd",
|
||||
"track",
|
||||
"trk",
|
||||
"episode",
|
||||
"ep",
|
||||
"season",
|
||||
"year",
|
||||
"disk",
|
||||
"disc",
|
||||
"cd",
|
||||
"track",
|
||||
"trk",
|
||||
"episode",
|
||||
"ep",
|
||||
"season",
|
||||
"year",
|
||||
}:
|
||||
parts.append(rf"(?P<{raw_name}>\d+)")
|
||||
else:
|
||||
@@ -170,7 +171,8 @@ def _extract_tags_from_title(title_text: str, template: str) -> List[str]:
|
||||
|
||||
|
||||
def _get_title_candidates_for_extraction(
|
||||
res: Any, existing_tags: Optional[List[str]] = None
|
||||
res: Any,
|
||||
existing_tags: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Return a list of possible title strings in priority order."""
|
||||
|
||||
@@ -209,9 +211,9 @@ def _get_title_candidates_for_extraction(
|
||||
return candidates
|
||||
|
||||
|
||||
def _extract_tags_from_title_candidates(
|
||||
candidates: List[str], template: str
|
||||
) -> tuple[List[str], Optional[str]]:
|
||||
def _extract_tags_from_title_candidates(candidates: List[str],
|
||||
template: str) -> tuple[List[str],
|
||||
Optional[str]]:
|
||||
"""Try candidates in order; return (tags, matched_candidate)."""
|
||||
|
||||
for c in candidates:
|
||||
@@ -223,7 +225,8 @@ def _extract_tags_from_title_candidates(
|
||||
|
||||
def _try_compile_extract_template(
|
||||
template: Optional[str],
|
||||
) -> tuple[Optional[re.Pattern[str]], Optional[str]]:
|
||||
) -> tuple[Optional[re.Pattern[str]],
|
||||
Optional[str]]:
|
||||
"""Compile template for debug; return (pattern, error_message)."""
|
||||
if template is None:
|
||||
return None, None
|
||||
@@ -408,7 +411,8 @@ def _refresh_tag_view(
|
||||
target_hash: Optional[str],
|
||||
store_name: Optional[str],
|
||||
target_path: Optional[str],
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
@@ -450,13 +454,15 @@ class Add_Tag(Cmdlet):
|
||||
super().__init__(
|
||||
name="add-tag",
|
||||
summary="Add tag to a file in a store.",
|
||||
usage='add-tag -store <store> [-query "hash:<sha256>"] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]',
|
||||
usage=
|
||||
'add-tag -store <store> [-query "hash:<sha256>"] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]',
|
||||
arg=[
|
||||
CmdletArg(
|
||||
"tag",
|
||||
type="string",
|
||||
required=False,
|
||||
description="One or more tag to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tag from pipeline payload.",
|
||||
description=
|
||||
"One or more tag to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tag from pipeline payload.",
|
||||
variadic=True,
|
||||
),
|
||||
SharedArgs.QUERY,
|
||||
@@ -464,27 +470,32 @@ class Add_Tag(Cmdlet):
|
||||
CmdletArg(
|
||||
"-extract",
|
||||
type="string",
|
||||
description='Extract tags from the item\'s title using a simple template with (field) placeholders. Example: -extract "(artist) - (album) - (disk)-(track) (title)" will add artist:, album:, disk:, track:, title: tags.',
|
||||
description=
|
||||
'Extract tags from the item\'s title using a simple template with (field) placeholders. Example: -extract "(artist) - (album) - (disk)-(track) (title)" will add artist:, album:, disk:, track:, title: tags.',
|
||||
),
|
||||
CmdletArg(
|
||||
"--extract-debug",
|
||||
type="flag",
|
||||
description="Print debug info for -extract matching (matched title source and extracted tags).",
|
||||
description=
|
||||
"Print debug info for -extract matching (matched title source and extracted tags).",
|
||||
),
|
||||
CmdletArg(
|
||||
"-duplicate",
|
||||
type="string",
|
||||
description="Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)",
|
||||
description=
|
||||
"Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)",
|
||||
),
|
||||
CmdletArg(
|
||||
"-list",
|
||||
type="string",
|
||||
description="Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult).",
|
||||
description=
|
||||
"Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult).",
|
||||
),
|
||||
CmdletArg(
|
||||
"--all",
|
||||
type="flag",
|
||||
description="Include temporary files in tagging (by default, only tag non-temporary files).",
|
||||
description=
|
||||
"Include temporary files in tagging (by default, only tag non-temporary files).",
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
@@ -523,7 +534,10 @@ class Add_Tag(Cmdlet):
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("[add_tag] Error: -query must be of the form hash:<sha256>", file=sys.stderr)
|
||||
log(
|
||||
"[add_tag] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# If add-tag is in the middle of a pipeline (has downstream stages), default to
|
||||
@@ -532,7 +546,9 @@ class Add_Tag(Cmdlet):
|
||||
store_override = parsed.get("store")
|
||||
stage_ctx = ctx.get_stage_context()
|
||||
has_downstream = bool(
|
||||
stage_ctx is not None and not getattr(stage_ctx, "is_last_stage", False)
|
||||
stage_ctx is not None and not getattr(stage_ctx,
|
||||
"is_last_stage",
|
||||
False)
|
||||
)
|
||||
|
||||
include_temp = bool(parsed.get("all", False))
|
||||
@@ -598,11 +614,17 @@ class Add_Tag(Cmdlet):
|
||||
tag_to_add = expand_tag_groups(tag_to_add)
|
||||
|
||||
if not tag_to_add and not extract_template:
|
||||
log("No tag provided to add (and no -extract template provided)", file=sys.stderr)
|
||||
log(
|
||||
"No tag provided to add (and no -extract template provided)",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
if extract_template and extract_debug and extract_debug_err:
|
||||
log(f"[add_tag] extract template error: {extract_debug_err}", file=sys.stderr)
|
||||
log(
|
||||
f"[add_tag] extract template error: {extract_debug_err}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# Get other flags
|
||||
@@ -643,8 +665,7 @@ class Add_Tag(Cmdlet):
|
||||
if not store_override:
|
||||
store_name_str = str(store_name) if store_name is not None else ""
|
||||
local_mode_requested = (
|
||||
(not store_name_str)
|
||||
or (store_name_str.upper() == "PATH")
|
||||
(not store_name_str) or (store_name_str.upper() == "PATH")
|
||||
or (store_name_str.lower() == "local")
|
||||
)
|
||||
is_known_backend = bool(store_name_str) and store_registry.is_available(
|
||||
@@ -656,14 +677,16 @@ class Add_Tag(Cmdlet):
|
||||
if Path(str(raw_path)).expanduser().exists():
|
||||
existing_tag_list = _extract_item_tags(res)
|
||||
existing_lower = {
|
||||
t.lower() for t in existing_tag_list if isinstance(t, str)
|
||||
t.lower()
|
||||
for t in existing_tag_list if isinstance(t, str)
|
||||
}
|
||||
|
||||
item_tag_to_add = list(tag_to_add)
|
||||
|
||||
if extract_template:
|
||||
candidates = _get_title_candidates_for_extraction(
|
||||
res, existing_tag_list
|
||||
res,
|
||||
existing_tag_list
|
||||
)
|
||||
extracted, matched = _extract_tags_from_title_candidates(
|
||||
candidates, extract_template
|
||||
@@ -683,17 +706,20 @@ class Add_Tag(Cmdlet):
|
||||
if extract_debug:
|
||||
rx_preview = (
|
||||
extract_debug_rx.pattern
|
||||
if extract_debug_rx
|
||||
else "<uncompiled>"
|
||||
if extract_debug_rx else "<uncompiled>"
|
||||
)
|
||||
cand_preview = "; ".join(
|
||||
[repr(c) for c in candidates[:3]]
|
||||
)
|
||||
cand_preview = "; ".join([repr(c) for c in candidates[:3]])
|
||||
log(
|
||||
f"[add_tag] extract no match for template {extract_template!r}. regex: {rx_preview!r}. candidates: {cand_preview}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
item_tag_to_add = collapse_namespace_tag(
|
||||
item_tag_to_add, "title", prefer="last"
|
||||
item_tag_to_add,
|
||||
"title",
|
||||
prefer="last"
|
||||
)
|
||||
|
||||
if duplicate_arg:
|
||||
@@ -703,12 +729,17 @@ class Add_Tag(Cmdlet):
|
||||
|
||||
if len(parts) > 1:
|
||||
source_ns = parts[0]
|
||||
targets = [t.strip() for t in parts[1].split(",") if t.strip()]
|
||||
targets = [
|
||||
t.strip() for t in parts[1].split(",")
|
||||
if t.strip()
|
||||
]
|
||||
else:
|
||||
parts2 = str(duplicate_arg).split(",")
|
||||
if len(parts2) > 1:
|
||||
source_ns = parts2[0]
|
||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
||||
targets = [
|
||||
t.strip() for t in parts2[1:] if t.strip()
|
||||
]
|
||||
|
||||
if source_ns and targets:
|
||||
source_prefix = source_ns.lower() + ":"
|
||||
@@ -730,21 +761,22 @@ class Add_Tag(Cmdlet):
|
||||
continue
|
||||
ns_prefix = ns.lower() + ":"
|
||||
for t in existing_tag_list:
|
||||
if (
|
||||
t.lower().startswith(ns_prefix)
|
||||
and t.lower() != new_tag.lower()
|
||||
):
|
||||
if (t.lower().startswith(ns_prefix)
|
||||
and t.lower() != new_tag.lower()):
|
||||
removed_namespace_tag.append(t)
|
||||
removed_namespace_tag = sorted({t for t in removed_namespace_tag})
|
||||
removed_namespace_tag = sorted(
|
||||
{t
|
||||
for t in removed_namespace_tag}
|
||||
)
|
||||
|
||||
actual_tag_to_add = [
|
||||
t
|
||||
for t in item_tag_to_add
|
||||
if isinstance(t, str) and t.lower() not in existing_lower
|
||||
t for t in item_tag_to_add if isinstance(t, str)
|
||||
and t.lower() not in existing_lower
|
||||
]
|
||||
|
||||
updated_tag_list = [
|
||||
t for t in existing_tag_list if t not in removed_namespace_tag
|
||||
t for t in existing_tag_list
|
||||
if t not in removed_namespace_tag
|
||||
]
|
||||
updated_tag_list.extend(actual_tag_to_add)
|
||||
|
||||
@@ -777,13 +809,15 @@ class Add_Tag(Cmdlet):
|
||||
return 1
|
||||
|
||||
resolved_hash = (
|
||||
normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
|
||||
normalize_hash(hash_override)
|
||||
if hash_override else normalize_hash(raw_hash)
|
||||
)
|
||||
if not resolved_hash and raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef"
|
||||
for c in stem.lower()):
|
||||
resolved_hash = stem.lower()
|
||||
elif p.exists() and p.is_file():
|
||||
resolved_hash = sha256_file(p)
|
||||
@@ -801,7 +835,10 @@ class Add_Tag(Cmdlet):
|
||||
try:
|
||||
backend = store_registry[str(store_name)]
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"[add_tag] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
try:
|
||||
@@ -810,14 +847,18 @@ class Add_Tag(Cmdlet):
|
||||
existing_tag = []
|
||||
|
||||
existing_tag_list = [t for t in (existing_tag or []) if isinstance(t, str)]
|
||||
existing_lower = {t.lower() for t in existing_tag_list}
|
||||
existing_lower = {t.lower()
|
||||
for t in existing_tag_list}
|
||||
original_title = _extract_title_tag(existing_tag_list)
|
||||
|
||||
# Per-item tag list (do not mutate shared list)
|
||||
item_tag_to_add = list(tag_to_add)
|
||||
|
||||
if extract_template:
|
||||
candidates2 = _get_title_candidates_for_extraction(res, existing_tag_list)
|
||||
candidates2 = _get_title_candidates_for_extraction(
|
||||
res,
|
||||
existing_tag_list
|
||||
)
|
||||
extracted2, matched2 = _extract_tags_from_title_candidates(
|
||||
candidates2, extract_template
|
||||
)
|
||||
@@ -835,7 +876,8 @@ class Add_Tag(Cmdlet):
|
||||
extract_no_match_items += 1
|
||||
if extract_debug:
|
||||
rx_preview2 = (
|
||||
extract_debug_rx.pattern if extract_debug_rx else "<uncompiled>"
|
||||
extract_debug_rx.pattern
|
||||
if extract_debug_rx else "<uncompiled>"
|
||||
)
|
||||
cand_preview2 = "; ".join([repr(c) for c in candidates2[:3]])
|
||||
log(
|
||||
@@ -843,7 +885,11 @@ class Add_Tag(Cmdlet):
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
item_tag_to_add = collapse_namespace_tag(item_tag_to_add, "title", prefer="last")
|
||||
item_tag_to_add = collapse_namespace_tag(
|
||||
item_tag_to_add,
|
||||
"title",
|
||||
prefer="last"
|
||||
)
|
||||
|
||||
# Handle -duplicate logic (copy existing tag to new namespaces)
|
||||
if duplicate_arg:
|
||||
@@ -881,15 +927,19 @@ class Add_Tag(Cmdlet):
|
||||
|
||||
try:
|
||||
refreshed_tag, _src2 = backend.get_tag(resolved_hash, config=config)
|
||||
refreshed_list = [t for t in (refreshed_tag or []) if isinstance(t, str)]
|
||||
refreshed_list = [
|
||||
t for t in (refreshed_tag or []) if isinstance(t, str)
|
||||
]
|
||||
except Exception:
|
||||
refreshed_list = existing_tag_list
|
||||
|
||||
# Decide whether anything actually changed (case-sensitive so title casing updates count).
|
||||
if set(refreshed_list) != set(existing_tag_list):
|
||||
changed = True
|
||||
before_lower = {t.lower() for t in existing_tag_list}
|
||||
after_lower = {t.lower() for t in refreshed_list}
|
||||
before_lower = {t.lower()
|
||||
for t in existing_tag_list}
|
||||
after_lower = {t.lower()
|
||||
for t in refreshed_list}
|
||||
total_added += len(after_lower - before_lower)
|
||||
total_modified += 1
|
||||
|
||||
@@ -903,7 +953,12 @@ class Add_Tag(Cmdlet):
|
||||
_apply_title_to_result(res, final_title)
|
||||
|
||||
if final_title and (not original_title or final_title != original_title):
|
||||
_refresh_result_table_title(final_title, resolved_hash, str(store_name), raw_path)
|
||||
_refresh_result_table_title(
|
||||
final_title,
|
||||
resolved_hash,
|
||||
str(store_name),
|
||||
raw_path
|
||||
)
|
||||
|
||||
if changed:
|
||||
_refresh_tag_view(res, resolved_hash, str(store_name), raw_path, config)
|
||||
|
||||
@@ -20,7 +20,9 @@ class Add_Url(sh.Cmdlet):
|
||||
arg=[
|
||||
sh.SharedArgs.QUERY,
|
||||
sh.SharedArgs.STORE,
|
||||
sh.CmdletArg("url", required=True, description="URL to associate"),
|
||||
sh.CmdletArg("url",
|
||||
required=True,
|
||||
description="URL to associate"),
|
||||
],
|
||||
detail=[
|
||||
"- Associates URL with file identified by hash+store",
|
||||
@@ -54,7 +56,8 @@ class Add_Url(sh.Cmdlet):
|
||||
|
||||
# Bulk input is common in pipelines; treat a list of PipeObjects as a batch.
|
||||
results: List[Any] = (
|
||||
result if isinstance(result, list) else ([result] if result is not None else [])
|
||||
result if isinstance(result,
|
||||
list) else ([result] if result is not None else [])
|
||||
)
|
||||
|
||||
if query_hash and len(results) > 1:
|
||||
@@ -62,16 +65,22 @@ class Add_Url(sh.Cmdlet):
|
||||
return 1
|
||||
|
||||
# Extract hash and store from result or args
|
||||
file_hash = query_hash or (sh.get_field(result, "hash") if result is not None else None)
|
||||
file_hash = query_hash or (
|
||||
sh.get_field(result,
|
||||
"hash") if result is not None else None
|
||||
)
|
||||
store_name = parsed.get("store") or (
|
||||
sh.get_field(result, "store") if result is not None else None
|
||||
sh.get_field(result,
|
||||
"store") if result is not None else None
|
||||
)
|
||||
url_arg = parsed.get("url")
|
||||
|
||||
# If we have multiple piped items, we will resolve hash/store per item below.
|
||||
if not results:
|
||||
if not file_hash:
|
||||
log('Error: No file hash provided (pipe an item or use -query "hash:<sha256>")')
|
||||
log(
|
||||
'Error: No file hash provided (pipe an item or use -query "hash:<sha256>")'
|
||||
)
|
||||
return 1
|
||||
if not store_name:
|
||||
log("Error: No store name provided")
|
||||
@@ -102,7 +111,9 @@ class Add_Url(sh.Cmdlet):
|
||||
out: List[str] = []
|
||||
try:
|
||||
if isinstance(existing, str):
|
||||
out.extend([p.strip() for p in existing.split(",") if p.strip()])
|
||||
out.extend(
|
||||
[p.strip() for p in existing.split(",") if p.strip()]
|
||||
)
|
||||
elif isinstance(existing, (list, tuple)):
|
||||
out.extend([str(u).strip() for u in existing if str(u).strip()])
|
||||
except Exception:
|
||||
@@ -131,7 +142,9 @@ class Add_Url(sh.Cmdlet):
|
||||
|
||||
# Build batches per store.
|
||||
store_override = parsed.get("store")
|
||||
batch: Dict[str, List[Tuple[str, List[str]]]] = {}
|
||||
batch: Dict[str,
|
||||
List[Tuple[str,
|
||||
List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
if results:
|
||||
@@ -142,21 +155,24 @@ class Add_Url(sh.Cmdlet):
|
||||
raw_store = store_override or sh.get_field(item, "store")
|
||||
if not raw_hash or not raw_store:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item missing hash/store; skipping", file=sys.stderr
|
||||
"[add-url] Warning: Item missing hash/store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
normalized = sh.normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item has invalid hash; skipping", file=sys.stderr
|
||||
"[add-url] Warning: Item has invalid hash; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item has empty store; skipping", file=sys.stderr
|
||||
"[add-url] Warning: Item has empty store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -178,7 +194,8 @@ class Add_Url(sh.Cmdlet):
|
||||
continue
|
||||
|
||||
# Coalesce duplicates per hash before passing to backend.
|
||||
merged: Dict[str, List[str]] = {}
|
||||
merged: Dict[str,
|
||||
List[str]] = {}
|
||||
for h, ulist in pairs:
|
||||
merged.setdefault(h, [])
|
||||
for u in ulist or []:
|
||||
@@ -210,7 +227,10 @@ class Add_Url(sh.Cmdlet):
|
||||
# Single-item mode
|
||||
backend = storage[str(store_name)]
|
||||
backend.add_url(str(file_hash), urls, config=config)
|
||||
ctx.print_if_visible(f"✓ add-url: {len(urls)} url(s) added", file=sys.stderr)
|
||||
ctx.print_if_visible(
|
||||
f"✓ add-url: {len(urls)} url(s) added",
|
||||
file=sys.stderr
|
||||
)
|
||||
if result is not None:
|
||||
existing = sh.get_field(result, "url")
|
||||
merged = _merge_urls(existing, list(urls))
|
||||
|
||||
@@ -15,7 +15,7 @@ from urllib.parse import parse_qs, urlparse
|
||||
from SYS.logger import log
|
||||
|
||||
import pipeline as ctx
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
from . import _shared as sh
|
||||
|
||||
Cmdlet = sh.Cmdlet
|
||||
@@ -26,7 +26,6 @@ create_pipe_object_result = sh.create_pipe_object_result
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
should_show_help = sh.should_show_help
|
||||
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
@@ -85,19 +84,27 @@ def _hydrus_instance_names(config: Dict[str, Any]) -> Set[str]:
|
||||
if isinstance(store_cfg, dict):
|
||||
hydrus_cfg = store_cfg.get("hydrusnetwork")
|
||||
if isinstance(hydrus_cfg, dict):
|
||||
instances = {str(k).strip().lower() for k in hydrus_cfg.keys() if str(k).strip()}
|
||||
instances = {
|
||||
str(k).strip().lower()
|
||||
for k in hydrus_cfg.keys() if str(k).strip()
|
||||
}
|
||||
except Exception:
|
||||
instances = set()
|
||||
return instances
|
||||
|
||||
|
||||
def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: Path) -> Path | None:
|
||||
def _maybe_download_hydrus_item(
|
||||
item: Any,
|
||||
config: Dict[str,
|
||||
Any],
|
||||
output_dir: Path
|
||||
) -> Path | None:
|
||||
"""Download a Hydrus-backed item to a local temp path (best-effort).
|
||||
|
||||
This is intentionally side-effect free except for writing the local temp file.
|
||||
"""
|
||||
try:
|
||||
from config import get_hydrus_access_key, get_hydrus_url
|
||||
from SYS.config import get_hydrus_access_key, get_hydrus_url
|
||||
from API.HydrusNetwork import HydrusNetwork as HydrusClient, download_hydrus_file
|
||||
except Exception:
|
||||
return None
|
||||
@@ -105,7 +112,8 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
|
||||
store_name = _extract_store_name(item)
|
||||
store_lower = store_name.lower()
|
||||
hydrus_instances = _hydrus_instance_names(config)
|
||||
store_hint = store_lower in {"hydrus", "hydrusnetwork"} or (store_lower in hydrus_instances)
|
||||
store_hint = store_lower in {"hydrus",
|
||||
"hydrusnetwork"} or (store_lower in hydrus_instances)
|
||||
|
||||
url = _extract_url(item)
|
||||
file_hash = _extract_sha256_hex(item) or (
|
||||
@@ -149,8 +157,13 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
|
||||
# Best-effort extension from Hydrus metadata.
|
||||
suffix = ".hydrus"
|
||||
try:
|
||||
meta_response = client.fetch_file_metadata(hashes=[file_hash], include_mime=True)
|
||||
entries = meta_response.get("metadata") if isinstance(meta_response, dict) else None
|
||||
meta_response = client.fetch_file_metadata(
|
||||
hashes=[file_hash],
|
||||
include_mime=True
|
||||
)
|
||||
entries = meta_response.get("metadata"
|
||||
) if isinstance(meta_response,
|
||||
dict) else None
|
||||
if isinstance(entries, list) and entries:
|
||||
entry = entries[0]
|
||||
if isinstance(entry, dict):
|
||||
@@ -173,7 +186,9 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
|
||||
if dest.exists():
|
||||
dest = output_dir / f"{file_hash}_{uuid.uuid4().hex[:10]}{suffix}"
|
||||
|
||||
headers = {"Hydrus-Client-API-Access-Key": access_key}
|
||||
headers = {
|
||||
"Hydrus-Client-API-Access-Key": access_key
|
||||
}
|
||||
download_hydrus_file(file_url, headers, dest, timeout=60.0)
|
||||
try:
|
||||
if dest.exists() and dest.is_file():
|
||||
@@ -183,17 +198,20 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
|
||||
return None
|
||||
|
||||
|
||||
def _resolve_existing_or_fetch_path(
|
||||
item: Any, config: Dict[str, Any]
|
||||
) -> tuple[Path | None, Path | None]:
|
||||
def _resolve_existing_or_fetch_path(item: Any,
|
||||
config: Dict[str,
|
||||
Any]) -> tuple[Path | None,
|
||||
Path | None]:
|
||||
"""Return (path, temp_path) where temp_path is non-None only for files we downloaded."""
|
||||
# 1) Direct local path
|
||||
try:
|
||||
po = coerce_to_pipe_object(item, None)
|
||||
raw_path = (
|
||||
getattr(po, "path", None)
|
||||
or getattr(po, "target", None)
|
||||
or sh.get_pipe_object_path(item)
|
||||
getattr(po,
|
||||
"path",
|
||||
None) or getattr(po,
|
||||
"target",
|
||||
None) or sh.get_pipe_object_path(item)
|
||||
)
|
||||
if raw_path:
|
||||
p = Path(str(raw_path)).expanduser()
|
||||
@@ -223,17 +241,22 @@ def _resolve_existing_or_fetch_path(
|
||||
if src.strip().lower().startswith(("http://", "https://")):
|
||||
tmp_base = None
|
||||
try:
|
||||
tmp_base = config.get("temp") if isinstance(config, dict) else None
|
||||
tmp_base = config.get("temp"
|
||||
) if isinstance(config,
|
||||
dict) else None
|
||||
except Exception:
|
||||
tmp_base = None
|
||||
out_dir = (
|
||||
Path(str(tmp_base)).expanduser()
|
||||
if tmp_base
|
||||
else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
Path(str(tmp_base)).expanduser() if tmp_base else
|
||||
(Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
)
|
||||
out_dir = out_dir / "archive" / "hydrus"
|
||||
downloaded = _maybe_download_hydrus_item(
|
||||
{"hash": file_hash, "store": store_name, "url": src.strip()},
|
||||
{
|
||||
"hash": file_hash,
|
||||
"store": store_name,
|
||||
"url": src.strip()
|
||||
},
|
||||
config,
|
||||
out_dir,
|
||||
)
|
||||
@@ -248,9 +271,8 @@ def _resolve_existing_or_fetch_path(
|
||||
except Exception:
|
||||
tmp_base = None
|
||||
out_dir = (
|
||||
Path(str(tmp_base)).expanduser()
|
||||
if tmp_base
|
||||
else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
Path(str(tmp_base)).expanduser() if tmp_base else
|
||||
(Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
)
|
||||
out_dir = out_dir / "archive" / "hydrus"
|
||||
downloaded = _maybe_download_hydrus_item(item, config, out_dir)
|
||||
@@ -350,21 +372,29 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
stamp = time.strftime("%Y%m%d_%H%M%S")
|
||||
out_path = out_dir / f"archive_{stamp}.tar.zst"
|
||||
try:
|
||||
out_path = sh._unique_destination_path(out_path) # type: ignore[attr-defined]
|
||||
out_path = sh._unique_destination_path(
|
||||
out_path
|
||||
) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc:
|
||||
log(f"Failed to create output directory: {out_path.parent} ({exc})", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to create output directory: {out_path.parent} ({exc})",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# Import zstandard lazily so the rest of the CLI still runs without it.
|
||||
try:
|
||||
import zstandard as zstd # type: ignore
|
||||
except Exception:
|
||||
log("Missing dependency: zstandard (pip install zstandard)", file=sys.stderr)
|
||||
log(
|
||||
"Missing dependency: zstandard (pip install zstandard)",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
# Write tar stream into zstd stream.
|
||||
@@ -372,9 +402,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
with open(out_path, "wb") as out_handle:
|
||||
cctx = zstd.ZstdCompressor(level=level)
|
||||
with cctx.stream_writer(out_handle) as compressor:
|
||||
with tarfile.open(
|
||||
fileobj=compressor, mode="w|", format=tarfile.PAX_FORMAT
|
||||
) as tf:
|
||||
with tarfile.open(fileobj=compressor,
|
||||
mode="w|",
|
||||
format=tarfile.PAX_FORMAT) as tf:
|
||||
seen_names: Set[str] = set()
|
||||
for p in paths:
|
||||
arcname = _unique_arcname(p.name, seen_names)
|
||||
@@ -382,7 +412,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
tf.add(str(p), arcname=arcname, recursive=True)
|
||||
except Exception as exc:
|
||||
log(f"Failed to add to archive: {p} ({exc})", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to add to archive: {p} ({exc})",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Archive creation failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
@@ -437,7 +470,9 @@ CMDLET = Cmdlet(
|
||||
usage="@N | archive-file [-level <1-22>] [-path <path>]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
"-level", type="integer", description="Zstandard compression level (default: 11)."
|
||||
"-level",
|
||||
type="integer",
|
||||
description="Zstandard compression level (default: 11)."
|
||||
),
|
||||
SharedArgs.PATH,
|
||||
],
|
||||
|
||||
@@ -23,18 +23,25 @@ class Delete_File(sh.Cmdlet):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="delete-file",
|
||||
summary="Delete a file locally and/or from Hydrus, including database entries.",
|
||||
usage='delete-file [-query "hash:<sha256>"] [-conserve <local|hydrus>] [-lib-root <path>] [reason]',
|
||||
summary=
|
||||
"Delete a file locally and/or from Hydrus, including database entries.",
|
||||
usage=
|
||||
'delete-file [-query "hash:<sha256>"] [-conserve <local|hydrus>] [-lib-root <path>] [reason]',
|
||||
alias=["del-file"],
|
||||
arg=[
|
||||
sh.SharedArgs.QUERY,
|
||||
sh.CmdletArg(
|
||||
"conserve", description="Choose which copy to keep: 'local' or 'hydrus'."
|
||||
"conserve",
|
||||
description="Choose which copy to keep: 'local' or 'hydrus'."
|
||||
),
|
||||
sh.CmdletArg(
|
||||
"lib-root", description="Path to local library root for database cleanup."
|
||||
"lib-root",
|
||||
description="Path to local library root for database cleanup."
|
||||
),
|
||||
sh.CmdletArg(
|
||||
"reason",
|
||||
description="Optional reason for deletion (free text)."
|
||||
),
|
||||
sh.CmdletArg("reason", description="Optional reason for deletion (free text)."),
|
||||
],
|
||||
detail=[
|
||||
"Default removes both the local file and Hydrus file.",
|
||||
@@ -53,8 +60,10 @@ class Delete_File(sh.Cmdlet):
|
||||
conserve: str | None,
|
||||
lib_root: str | None,
|
||||
reason: str,
|
||||
config: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> List[Dict[str,
|
||||
Any]]:
|
||||
"""Process deletion for a single item.
|
||||
|
||||
Returns display rows (for the final Rich table). Returning an empty list
|
||||
@@ -68,9 +77,11 @@ class Delete_File(sh.Cmdlet):
|
||||
else:
|
||||
hash_hex_raw = sh.get_field(item, "hash_hex") or sh.get_field(item, "hash")
|
||||
target = (
|
||||
sh.get_field(item, "target")
|
||||
or sh.get_field(item, "file_path")
|
||||
or sh.get_field(item, "path")
|
||||
sh.get_field(item,
|
||||
"target") or sh.get_field(item,
|
||||
"file_path")
|
||||
or sh.get_field(item,
|
||||
"path")
|
||||
)
|
||||
title_val = sh.get_field(item, "title") or sh.get_field(item, "name")
|
||||
|
||||
@@ -142,11 +153,9 @@ class Delete_File(sh.Cmdlet):
|
||||
is_hydrus_store = False
|
||||
|
||||
# Backwards-compatible fallback heuristic (older items might only carry a name).
|
||||
if (
|
||||
(not is_hydrus_store)
|
||||
and bool(store_lower)
|
||||
and ("hydrus" in store_lower or store_lower in {"home", "work"})
|
||||
):
|
||||
if ((not is_hydrus_store) and bool(store_lower)
|
||||
and ("hydrus" in store_lower or store_lower in {"home",
|
||||
"work"})):
|
||||
is_hydrus_store = True
|
||||
store_label = str(store) if store else "default"
|
||||
hydrus_prefix = f"[hydrusnetwork:{store_label}]"
|
||||
@@ -156,14 +165,16 @@ class Delete_File(sh.Cmdlet):
|
||||
hash_hex_raw = target
|
||||
|
||||
hash_hex = (
|
||||
sh.normalize_hash(override_hash) if override_hash else sh.normalize_hash(hash_hex_raw)
|
||||
sh.normalize_hash(override_hash)
|
||||
if override_hash else sh.normalize_hash(hash_hex_raw)
|
||||
)
|
||||
|
||||
local_deleted = False
|
||||
local_target = (
|
||||
isinstance(target, str)
|
||||
and target.strip()
|
||||
and not str(target).lower().startswith(("http://", "https://"))
|
||||
isinstance(target,
|
||||
str) and target.strip()
|
||||
and not str(target).lower().startswith(("http://",
|
||||
"https://"))
|
||||
)
|
||||
deleted_rows: List[Dict[str, Any]] = []
|
||||
|
||||
@@ -180,7 +191,9 @@ class Delete_File(sh.Cmdlet):
|
||||
if backend is not None:
|
||||
|
||||
# Prefer hash when available.
|
||||
hash_candidate = sh.normalize_hash(hash_hex_raw) if hash_hex_raw else None
|
||||
hash_candidate = sh.normalize_hash(
|
||||
hash_hex_raw
|
||||
) if hash_hex_raw else None
|
||||
if not hash_candidate and isinstance(target, str):
|
||||
hash_candidate = sh.normalize_hash(target)
|
||||
|
||||
@@ -189,13 +202,15 @@ class Delete_File(sh.Cmdlet):
|
||||
if hash_candidate and hasattr(backend, "get_file"):
|
||||
candidate_path = backend.get_file(hash_candidate)
|
||||
resolved_path = (
|
||||
candidate_path if isinstance(candidate_path, Path) else None
|
||||
candidate_path if isinstance(candidate_path,
|
||||
Path) else None
|
||||
)
|
||||
except Exception:
|
||||
resolved_path = None
|
||||
|
||||
identifier = hash_candidate or (
|
||||
str(target).strip() if isinstance(target, str) else ""
|
||||
str(target).strip() if isinstance(target,
|
||||
str) else ""
|
||||
)
|
||||
if identifier:
|
||||
deleter = getattr(backend, "delete_file", None)
|
||||
@@ -204,11 +219,10 @@ class Delete_File(sh.Cmdlet):
|
||||
|
||||
size_bytes: int | None = None
|
||||
try:
|
||||
if (
|
||||
resolved_path is not None
|
||||
and isinstance(resolved_path, Path)
|
||||
and resolved_path.exists()
|
||||
):
|
||||
if (resolved_path is not None
|
||||
and isinstance(resolved_path,
|
||||
Path)
|
||||
and resolved_path.exists()):
|
||||
size_bytes = int(resolved_path.stat().st_size)
|
||||
except Exception:
|
||||
size_bytes = None
|
||||
@@ -216,25 +230,34 @@ class Delete_File(sh.Cmdlet):
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title": (
|
||||
str(title_val).strip()
|
||||
if title_val
|
||||
else (resolved_path.name if resolved_path else identifier)
|
||||
str(title_val).strip() if title_val else (
|
||||
resolved_path.name
|
||||
if resolved_path else identifier
|
||||
)
|
||||
),
|
||||
"store":
|
||||
store_label,
|
||||
"hash":
|
||||
hash_candidate or (hash_hex or ""),
|
||||
"size_bytes":
|
||||
size_bytes,
|
||||
"ext":
|
||||
_get_ext_from_item() or (
|
||||
resolved_path.suffix.lstrip(".")
|
||||
if resolved_path else ""
|
||||
),
|
||||
"store": store_label,
|
||||
"hash": hash_candidate or (hash_hex or ""),
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item()
|
||||
or (resolved_path.suffix.lstrip(".") if resolved_path else ""),
|
||||
}
|
||||
)
|
||||
|
||||
# Best-effort remove sidecars if we know the resolved path.
|
||||
try:
|
||||
if resolved_path is not None and isinstance(resolved_path, Path):
|
||||
if resolved_path is not None and isinstance(
|
||||
resolved_path,
|
||||
Path):
|
||||
for sidecar in (
|
||||
resolved_path.with_suffix(".tag"),
|
||||
resolved_path.with_suffix(".metadata"),
|
||||
resolved_path.with_suffix(".notes"),
|
||||
resolved_path.with_suffix(".tag"),
|
||||
resolved_path.with_suffix(".metadata"),
|
||||
resolved_path.with_suffix(".notes"),
|
||||
):
|
||||
try:
|
||||
if sidecar.exists() and sidecar.is_file():
|
||||
@@ -266,7 +289,8 @@ class Delete_File(sh.Cmdlet):
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title": str(title_val).strip() if title_val else path.name,
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"size_bytes": size_bytes,
|
||||
@@ -282,11 +306,14 @@ class Delete_File(sh.Cmdlet):
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title": str(title_val).strip() if title_val else path.name,
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem)
|
||||
or "",
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item() or path.suffix.lstrip("."),
|
||||
"ext": _get_ext_from_item()
|
||||
or path.suffix.lstrip("."),
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
@@ -299,7 +326,8 @@ class Delete_File(sh.Cmdlet):
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title": str(title_val).strip() if title_val else path.name,
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"size_bytes": size_bytes,
|
||||
@@ -311,9 +339,9 @@ class Delete_File(sh.Cmdlet):
|
||||
|
||||
# Remove common sidecars regardless of file removal success
|
||||
for sidecar in (
|
||||
path.with_suffix(".tag"),
|
||||
path.with_suffix(".metadata"),
|
||||
path.with_suffix(".notes"),
|
||||
path.with_suffix(".tag"),
|
||||
path.with_suffix(".metadata"),
|
||||
path.with_suffix(".notes"),
|
||||
):
|
||||
try:
|
||||
if sidecar.exists() and sidecar.is_file():
|
||||
@@ -370,7 +398,10 @@ class Delete_File(sh.Cmdlet):
|
||||
return False
|
||||
if client is None:
|
||||
if not local_deleted:
|
||||
log(f"Hydrus client unavailable for store '{store}'", file=sys.stderr)
|
||||
log(
|
||||
f"Hydrus client unavailable for store '{store}'",
|
||||
file=sys.stderr
|
||||
)
|
||||
return False
|
||||
else:
|
||||
# No store context; use default Hydrus client.
|
||||
@@ -385,18 +416,29 @@ class Delete_File(sh.Cmdlet):
|
||||
log("Hydrus client unavailable", file=sys.stderr)
|
||||
return False
|
||||
|
||||
payload: Dict[str, Any] = {"hashes": [hash_hex]}
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"hashes": [hash_hex]
|
||||
}
|
||||
if reason:
|
||||
payload["reason"] = reason
|
||||
try:
|
||||
client._post("/add_files/delete_files", data=payload) # type: ignore[attr-defined]
|
||||
client._post(
|
||||
"/add_files/delete_files",
|
||||
data=payload
|
||||
) # type: ignore[attr-defined]
|
||||
# Best-effort clear deletion record if supported by this client.
|
||||
try:
|
||||
clearer = getattr(client, "clear_file_deletion_record", None)
|
||||
if callable(clearer):
|
||||
clearer([hash_hex])
|
||||
else:
|
||||
client._post("/add_files/clear_file_deletion_record", data={"hashes": [hash_hex]}) # type: ignore[attr-defined]
|
||||
client._post(
|
||||
"/add_files/clear_file_deletion_record",
|
||||
data={
|
||||
"hashes": [hash_hex]
|
||||
}
|
||||
) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -408,7 +450,10 @@ class Delete_File(sh.Cmdlet):
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
debug(f"{hydrus_prefix} Deleted hash:{hash_hex}", file=sys.stderr)
|
||||
debug(
|
||||
f"{hydrus_prefix} Deleted hash:{hash_hex}",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception:
|
||||
# If it's not in Hydrus (e.g. 404 or similar), that's fine
|
||||
if not local_deleted:
|
||||
@@ -420,7 +465,10 @@ class Delete_File(sh.Cmdlet):
|
||||
if isinstance(item, dict):
|
||||
size_hint = item.get("size_bytes") or item.get("size")
|
||||
else:
|
||||
size_hint = sh.get_field(item, "size_bytes") or sh.get_field(item, "size")
|
||||
size_hint = sh.get_field(item,
|
||||
"size_bytes"
|
||||
) or sh.get_field(item,
|
||||
"size")
|
||||
except Exception:
|
||||
size_hint = None
|
||||
deleted_rows.append(
|
||||
@@ -456,24 +504,32 @@ class Delete_File(sh.Cmdlet):
|
||||
while i < len(args):
|
||||
token = args[i]
|
||||
low = str(token).lower()
|
||||
if low in {"-query", "--query", "query"} and i + 1 < len(args):
|
||||
if low in {"-query",
|
||||
"--query",
|
||||
"query"} and i + 1 < len(args):
|
||||
override_query = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
|
||||
if low in {"-conserve",
|
||||
"--conserve"} and i + 1 < len(args):
|
||||
value = str(args[i + 1]).strip().lower()
|
||||
if value in {"local", "hydrus"}:
|
||||
if value in {"local",
|
||||
"hydrus"}:
|
||||
conserve = value
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
|
||||
if low in {"-lib-root",
|
||||
"--lib-root",
|
||||
"lib-root"} and i + 1 < len(args):
|
||||
lib_root = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
reason_tokens.append(token)
|
||||
i += 1
|
||||
|
||||
override_hash = sh.parse_single_hash_query(override_query) if override_query else None
|
||||
override_hash = sh.parse_single_hash_query(
|
||||
override_query
|
||||
) if override_query else None
|
||||
if override_query and not override_hash:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
return 1
|
||||
@@ -481,8 +537,10 @@ class Delete_File(sh.Cmdlet):
|
||||
# If no lib_root provided, try to get the first folder store from config
|
||||
if not lib_root:
|
||||
try:
|
||||
storage_config = config.get("storage", {})
|
||||
folder_config = storage_config.get("folder", {})
|
||||
storage_config = config.get("storage",
|
||||
{})
|
||||
folder_config = storage_config.get("folder",
|
||||
{})
|
||||
if folder_config:
|
||||
# Get first folder store path
|
||||
for store_name, store_config in folder_config.items():
|
||||
@@ -494,7 +552,8 @@ class Delete_File(sh.Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
||||
reason = " ".join(token for token in reason_tokens
|
||||
if str(token).strip()).strip()
|
||||
|
||||
items = []
|
||||
if isinstance(result, list):
|
||||
@@ -510,7 +569,12 @@ class Delete_File(sh.Cmdlet):
|
||||
deleted_rows: List[Dict[str, Any]] = []
|
||||
for item in items:
|
||||
rows = self._process_single_item(
|
||||
item, override_hash, conserve, lib_root, reason, config
|
||||
item,
|
||||
override_hash,
|
||||
conserve,
|
||||
lib_root,
|
||||
reason,
|
||||
config
|
||||
)
|
||||
if rows:
|
||||
success_count += 1
|
||||
@@ -525,7 +589,9 @@ class Delete_File(sh.Cmdlet):
|
||||
result_row.add_column("Store", row.get("store", ""))
|
||||
result_row.add_column("Hash", row.get("hash", ""))
|
||||
result_row.add_column(
|
||||
"Size", _format_size(row.get("size_bytes"), integer_only=False)
|
||||
"Size",
|
||||
_format_size(row.get("size_bytes"),
|
||||
integer_only=False)
|
||||
)
|
||||
result_row.add_column("Ext", row.get("ext", ""))
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Delete_Note(Cmdlet):
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="delete-note",
|
||||
@@ -32,7 +33,10 @@ class Delete_Note(Cmdlet):
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
CmdletArg(
|
||||
"name", type="string", required=True, description="The note name/key to delete."
|
||||
"name",
|
||||
type="string",
|
||||
required=True,
|
||||
description="The note name/key to delete."
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
@@ -47,16 +51,21 @@ class Delete_Note(Cmdlet):
|
||||
self.register()
|
||||
|
||||
def _resolve_hash(
|
||||
self, raw_hash: Optional[str], raw_path: Optional[str], override_hash: Optional[str]
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str]
|
||||
) -> Optional[str]:
|
||||
resolved = normalize_hash(override_hash) if override_hash else normalize_hash(raw_hash)
|
||||
resolved = normalize_hash(override_hash
|
||||
) if override_hash else normalize_hash(raw_hash)
|
||||
if resolved:
|
||||
return resolved
|
||||
if raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef"
|
||||
for c in stem.lower()):
|
||||
return stem.lower()
|
||||
if p.exists() and p.is_file():
|
||||
return sha256_file(p)
|
||||
@@ -74,7 +83,10 @@ class Delete_Note(Cmdlet):
|
||||
store_override = parsed.get("store")
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("[delete_note] Error: -query must be of the form hash:<sha256>", file=sys.stderr)
|
||||
log(
|
||||
"[delete_note] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
note_name_override = str(parsed.get("name") or "").strip()
|
||||
# Allow piping note rows from get-note: the selected item carries note_name.
|
||||
@@ -89,7 +101,10 @@ class Delete_Note(Cmdlet):
|
||||
results = normalize_result_input(result)
|
||||
if not results:
|
||||
if store_override and query_hash:
|
||||
results = [{"store": str(store_override), "hash": query_hash}]
|
||||
results = [{
|
||||
"store": str(store_override),
|
||||
"hash": query_hash
|
||||
}]
|
||||
else:
|
||||
log(
|
||||
'[delete_note] Error: Requires piped item(s) or -store and -query "hash:<sha256>"',
|
||||
@@ -107,7 +122,8 @@ class Delete_Note(Cmdlet):
|
||||
|
||||
# Resolve which note name to delete for this item.
|
||||
note_name = (
|
||||
note_name_override or str(res.get("note_name") or "").strip() or inferred_note_name
|
||||
note_name_override or str(res.get("note_name") or "").strip()
|
||||
or inferred_note_name
|
||||
)
|
||||
if not note_name:
|
||||
log(
|
||||
@@ -139,14 +155,20 @@ class Delete_Note(Cmdlet):
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
except Exception as exc:
|
||||
log(f"[delete_note] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"[delete_note] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
ok = False
|
||||
try:
|
||||
ok = bool(backend.delete_note(resolved_hash, note_name, config=config))
|
||||
except Exception as exc:
|
||||
log(f"[delete_note] Error: Failed to delete note: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"[delete_note] Error: Failed to delete note: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
ok = False
|
||||
|
||||
if ok:
|
||||
|
||||
@@ -22,16 +22,22 @@ get_field = sh.get_field
|
||||
should_show_help = sh.should_show_help
|
||||
from API.folder import API_folder_store
|
||||
from Store import Store
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
|
||||
|
||||
def _extract_hash(item: Any) -> Optional[str]:
|
||||
h = get_field(item, "hash_hex") or get_field(item, "hash") or get_field(item, "file_hash")
|
||||
h = get_field(item,
|
||||
"hash_hex") or get_field(item,
|
||||
"hash") or get_field(item,
|
||||
"file_hash")
|
||||
return normalize_hash(str(h)) if h else None
|
||||
|
||||
|
||||
def _upsert_relationships(
|
||||
db: API_folder_store, file_hash: str, relationships: Dict[str, Any]
|
||||
db: API_folder_store,
|
||||
file_hash: str,
|
||||
relationships: Dict[str,
|
||||
Any]
|
||||
) -> None:
|
||||
conn = db.connection
|
||||
if conn is None:
|
||||
@@ -46,12 +52,17 @@ def _upsert_relationships(
|
||||
time_modified = CURRENT_TIMESTAMP,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
""",
|
||||
(file_hash, json.dumps(relationships) if relationships else "{}"),
|
||||
(file_hash,
|
||||
json.dumps(relationships) if relationships else "{}"),
|
||||
)
|
||||
|
||||
|
||||
def _remove_reverse_link(
|
||||
db: API_folder_store, *, src_hash: str, dst_hash: str, rel_type: str
|
||||
db: API_folder_store,
|
||||
*,
|
||||
src_hash: str,
|
||||
dst_hash: str,
|
||||
rel_type: str
|
||||
) -> None:
|
||||
meta = db.get_metadata(dst_hash) or {}
|
||||
rels = meta.get("relationships") if isinstance(meta, dict) else None
|
||||
@@ -86,7 +97,8 @@ def _refresh_relationship_view_if_current(
|
||||
target_hash: Optional[str],
|
||||
target_path: Optional[str],
|
||||
other: Optional[str],
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> None:
|
||||
"""If the current subject matches the target, refresh relationships via get-relationship."""
|
||||
try:
|
||||
@@ -149,7 +161,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""
|
||||
try:
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}")
|
||||
log(
|
||||
f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}"
|
||||
)
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
@@ -169,22 +183,37 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Allow store/hash-first usage when no pipeline items were provided
|
||||
if (not results) and override_hashes:
|
||||
if not override_store:
|
||||
log("-store is required when using -query without piped items", file=sys.stderr)
|
||||
log(
|
||||
"-store is required when using -query without piped items",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
results = [{"hash": h, "store": str(override_store)} for h in override_hashes]
|
||||
results = [
|
||||
{
|
||||
"hash": h,
|
||||
"store": str(override_store)
|
||||
} for h in override_hashes
|
||||
]
|
||||
|
||||
if not results:
|
||||
# Legacy -path mode below may still apply
|
||||
if raw_path:
|
||||
results = [{"file_path": raw_path}]
|
||||
results = [{
|
||||
"file_path": raw_path
|
||||
}]
|
||||
else:
|
||||
log("No results to process", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Decide store (for same-store enforcement + folder-store DB routing)
|
||||
store_name: Optional[str] = str(override_store).strip() if override_store else None
|
||||
store_name: Optional[str] = str(override_store
|
||||
).strip() if override_store else None
|
||||
if not store_name:
|
||||
stores = {str(get_field(r, "store")) for r in results if get_field(r, "store")}
|
||||
stores = {
|
||||
str(get_field(r,
|
||||
"store"))
|
||||
for r in results if get_field(r, "store")
|
||||
}
|
||||
if len(stores) == 1:
|
||||
store_name = next(iter(stores))
|
||||
elif len(stores) > 1:
|
||||
@@ -230,9 +259,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if not file_hash:
|
||||
# Try path -> hash lookup within this store
|
||||
fp = (
|
||||
get_field(single_result, "file_path")
|
||||
or get_field(single_result, "path")
|
||||
or get_field(single_result, "target")
|
||||
get_field(single_result,
|
||||
"file_path")
|
||||
or get_field(single_result,
|
||||
"path")
|
||||
or get_field(single_result,
|
||||
"target")
|
||||
)
|
||||
if fp:
|
||||
try:
|
||||
@@ -247,7 +279,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
meta = db.get_metadata(file_hash) or {}
|
||||
rels = meta.get("relationships") if isinstance(meta, dict) else None
|
||||
rels = meta.get("relationships"
|
||||
) if isinstance(meta,
|
||||
dict) else None
|
||||
if not isinstance(rels, dict) or not rels:
|
||||
continue
|
||||
|
||||
@@ -299,7 +333,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
_upsert_relationships(db, file_hash, rels)
|
||||
conn.commit()
|
||||
_refresh_relationship_view_if_current(file_hash, None, None, config)
|
||||
_refresh_relationship_view_if_current(
|
||||
file_hash,
|
||||
None,
|
||||
None,
|
||||
config
|
||||
)
|
||||
deleted_count += 1
|
||||
|
||||
log(
|
||||
@@ -327,10 +366,15 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
for single_result in results:
|
||||
# Get file path from result
|
||||
file_path_from_result = (
|
||||
get_field(single_result, "file_path")
|
||||
or get_field(single_result, "path")
|
||||
or get_field(single_result, "target")
|
||||
or (str(single_result) if not isinstance(single_result, dict) else None)
|
||||
get_field(single_result,
|
||||
"file_path") or get_field(single_result,
|
||||
"path")
|
||||
or get_field(single_result,
|
||||
"target")
|
||||
or (
|
||||
str(single_result) if not isinstance(single_result,
|
||||
dict) else None
|
||||
)
|
||||
)
|
||||
|
||||
if not file_path_from_result:
|
||||
@@ -349,7 +393,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file_hash = None
|
||||
file_hash = normalize_hash(str(file_hash)) if file_hash else None
|
||||
if not file_hash:
|
||||
log(f"File not in database: {file_path_obj.name}", file=sys.stderr)
|
||||
log(
|
||||
f"File not in database: {file_path_obj.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
meta = db.get_metadata(file_hash) or {}
|
||||
@@ -404,14 +451,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
_upsert_relationships(db, file_hash, rels)
|
||||
conn.commit()
|
||||
_refresh_relationship_view_if_current(
|
||||
file_hash, str(file_path_obj), None, config
|
||||
file_hash,
|
||||
str(file_path_obj),
|
||||
None,
|
||||
config
|
||||
)
|
||||
deleted_count += 1
|
||||
except Exception as exc:
|
||||
log(f"Error deleting relationship: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
log(f"Successfully deleted relationships from {deleted_count} file(s)", file=sys.stderr)
|
||||
log(
|
||||
f"Successfully deleted relationships from {deleted_count} file(s)",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 0
|
||||
|
||||
except Exception as exc:
|
||||
@@ -422,16 +475,22 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-relationship",
|
||||
summary="Remove relationships from files.",
|
||||
usage="@1 | delete-relationship --all OR delete-relationship -path <file> --all OR @1-3 | delete-relationship -type alt",
|
||||
usage=
|
||||
"@1 | delete-relationship --all OR delete-relationship -path <file> --all OR @1-3 | delete-relationship -type alt",
|
||||
arg=[
|
||||
SharedArgs.PATH,
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
CmdletArg("all", type="flag", description="Delete all relationships for the file(s)."),
|
||||
CmdletArg(
|
||||
"all",
|
||||
type="flag",
|
||||
description="Delete all relationships for the file(s)."
|
||||
),
|
||||
CmdletArg(
|
||||
"type",
|
||||
type="string",
|
||||
description="Delete specific relationship type ('alt', 'king', 'related'). Default: delete all types.",
|
||||
description=
|
||||
"Delete specific relationship type ('alt', 'king', 'related'). Default: delete all types.",
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
|
||||
@@ -21,7 +21,11 @@ from Store import Store
|
||||
|
||||
|
||||
def _refresh_tag_view_if_current(
|
||||
file_hash: str | None, store_name: str | None, path: str | None, config: Dict[str, Any]
|
||||
file_hash: str | None,
|
||||
store_name: str | None,
|
||||
path: str | None,
|
||||
config: Dict[str,
|
||||
Any]
|
||||
) -> None:
|
||||
"""If the current subject matches the target, refresh tags via get-tag."""
|
||||
try:
|
||||
@@ -52,11 +56,17 @@ def _refresh_tag_view_if_current(
|
||||
subj_paths: list[str] = []
|
||||
if isinstance(subject, dict):
|
||||
subj_hashes = [norm(v) for v in [subject.get("hash")] if v]
|
||||
subj_paths = [norm(v) for v in [subject.get("path"), subject.get("target")] if v]
|
||||
else:
|
||||
subj_hashes = [norm(get_field(subject, f)) for f in ("hash",) if get_field(subject, f)]
|
||||
subj_paths = [
|
||||
norm(get_field(subject, f)) for f in ("path", "target") if get_field(subject, f)
|
||||
norm(v) for v in [subject.get("path"), subject.get("target")] if v
|
||||
]
|
||||
else:
|
||||
subj_hashes = [
|
||||
norm(get_field(subject,
|
||||
f)) for f in ("hash", ) if get_field(subject, f)
|
||||
]
|
||||
subj_paths = [
|
||||
norm(get_field(subject,
|
||||
f)) for f in ("path", "target") if get_field(subject, f)
|
||||
]
|
||||
|
||||
is_match = False
|
||||
@@ -108,11 +118,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return False
|
||||
# TagItem (direct) or PipeObject/dict emitted from get-tag table rows.
|
||||
try:
|
||||
if (
|
||||
hasattr(obj, "__class__")
|
||||
and obj.__class__.__name__ == "TagItem"
|
||||
and hasattr(obj, "tag_name")
|
||||
):
|
||||
if (hasattr(obj,
|
||||
"__class__") and obj.__class__.__name__ == "TagItem"
|
||||
and hasattr(obj,
|
||||
"tag_name")):
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
@@ -123,7 +132,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
has_piped_tag = _looks_like_tag_row(result)
|
||||
has_piped_tag_list = (
|
||||
isinstance(result, list) and bool(result) and _looks_like_tag_row(result[0])
|
||||
isinstance(result,
|
||||
list) and bool(result) and _looks_like_tag_row(result[0])
|
||||
)
|
||||
|
||||
# Parse -query/-store overrides and collect remaining args.
|
||||
@@ -135,25 +145,32 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
while i < len(args):
|
||||
a = args[i]
|
||||
low = str(a).lower()
|
||||
if low in {"-query", "--query", "query"} and i + 1 < len(args):
|
||||
if low in {"-query",
|
||||
"--query",
|
||||
"query"} and i + 1 < len(args):
|
||||
override_query = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-store", "--store", "store"} and i + 1 < len(args):
|
||||
if low in {"-store",
|
||||
"--store",
|
||||
"store"} and i + 1 < len(args):
|
||||
override_store = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
rest.append(a)
|
||||
i += 1
|
||||
|
||||
override_hash = sh.parse_single_hash_query(override_query) if override_query else None
|
||||
override_hash = sh.parse_single_hash_query(
|
||||
override_query
|
||||
) if override_query else None
|
||||
if override_query and not override_hash:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Selection syntax (@...) is handled by the pipeline runner, not by this cmdlet.
|
||||
# If @ reaches here as a literal argument, it's almost certainly user error.
|
||||
if rest and str(rest[0]).startswith("@") and not (has_piped_tag or has_piped_tag_list):
|
||||
if rest and str(rest[0]
|
||||
).startswith("@") and not (has_piped_tag or has_piped_tag_list):
|
||||
log("Selection syntax is only supported via piping. Use: @N | delete-tag")
|
||||
return 1
|
||||
|
||||
@@ -166,16 +183,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
grouped_table = ""
|
||||
grouped_tags = get_field(result, "tag") if result is not None else None
|
||||
tags_arg = parse_tag_arguments(rest)
|
||||
if (
|
||||
grouped_table == "tag.selection"
|
||||
and isinstance(grouped_tags, list)
|
||||
and grouped_tags
|
||||
and not tags_arg
|
||||
):
|
||||
if (grouped_table == "tag.selection" and isinstance(grouped_tags,
|
||||
list) and grouped_tags
|
||||
and not tags_arg):
|
||||
file_hash = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_field(result, "hash"))
|
||||
if override_hash else normalize_hash(get_field(result,
|
||||
"hash"))
|
||||
)
|
||||
store_name = override_store or get_field(result, "store")
|
||||
path = get_field(result, "path") or get_field(result, "target")
|
||||
@@ -200,20 +214,25 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# If we have Files (or other objects) and args, we are deleting tags FROM those files
|
||||
|
||||
# Check if we are in "delete selected tags" mode (tag rows)
|
||||
is_tag_item_mode = bool(items_to_process) and _looks_like_tag_row(items_to_process[0])
|
||||
is_tag_item_mode = bool(items_to_process) and _looks_like_tag_row(
|
||||
items_to_process[0]
|
||||
)
|
||||
|
||||
if is_tag_item_mode:
|
||||
# Collect all tags to delete from the TagItems and batch per file.
|
||||
# This keeps delete-tag efficient (one backend call per file).
|
||||
groups: Dict[tuple[str, str, str], list[str]] = {}
|
||||
groups: Dict[tuple[str,
|
||||
str,
|
||||
str],
|
||||
list[str]] = {}
|
||||
for item in items_to_process:
|
||||
tag_name = get_field(item, "tag_name")
|
||||
if not tag_name:
|
||||
continue
|
||||
item_hash = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_field(item, "hash"))
|
||||
if override_hash else normalize_hash(get_field(item,
|
||||
"hash"))
|
||||
)
|
||||
item_store = override_store or get_field(item, "store")
|
||||
item_path = get_field(item, "path") or get_field(item, "target")
|
||||
@@ -246,8 +265,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tags_to_delete: list[str] = []
|
||||
item_hash = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_field(item, "hash"))
|
||||
if override_hash else normalize_hash(get_field(item,
|
||||
"hash"))
|
||||
)
|
||||
item_path = get_field(item, "path") or get_field(item, "target")
|
||||
item_store = override_store or get_field(item, "store")
|
||||
@@ -266,7 +285,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
|
||||
if tags_to_delete:
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, item_store, config):
|
||||
if _process_deletion(tags_to_delete,
|
||||
item_hash,
|
||||
item_path,
|
||||
item_store,
|
||||
config):
|
||||
success_count += 1
|
||||
|
||||
if success_count > 0:
|
||||
@@ -279,7 +302,8 @@ def _process_deletion(
|
||||
file_hash: str | None,
|
||||
path: str | None,
|
||||
store_name: str | None,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> bool:
|
||||
"""Helper to execute the deletion logic for a single target."""
|
||||
|
||||
@@ -287,7 +311,10 @@ def _process_deletion(
|
||||
return False
|
||||
|
||||
if not store_name:
|
||||
log("Store is required (use -store or pipe a result with store)", file=sys.stderr)
|
||||
log(
|
||||
"Store is required (use -store or pipe a result with store)",
|
||||
file=sys.stderr
|
||||
)
|
||||
return False
|
||||
|
||||
resolved_hash = normalize_hash(file_hash) if file_hash else None
|
||||
@@ -315,13 +342,17 @@ def _process_deletion(
|
||||
return []
|
||||
|
||||
# Safety: only block if this deletion would remove the final title tag
|
||||
title_tags = [t for t in tags if isinstance(t, str) and t.lower().startswith("title:")]
|
||||
title_tags = [
|
||||
t for t in tags if isinstance(t, str) and t.lower().startswith("title:")
|
||||
]
|
||||
if title_tags:
|
||||
existing_tags = _fetch_existing_tags()
|
||||
current_titles = [
|
||||
t for t in existing_tags if isinstance(t, str) and t.lower().startswith("title:")
|
||||
t for t in existing_tags
|
||||
if isinstance(t, str) and t.lower().startswith("title:")
|
||||
]
|
||||
del_title_set = {t.lower() for t in title_tags}
|
||||
del_title_set = {t.lower()
|
||||
for t in title_tags}
|
||||
remaining_titles = [t for t in current_titles if t.lower() not in del_title_set]
|
||||
if current_titles and not remaining_titles:
|
||||
log(
|
||||
@@ -335,7 +366,9 @@ def _process_deletion(
|
||||
ok = backend.delete_tag(resolved_hash, list(tags), config=config)
|
||||
if ok:
|
||||
preview = resolved_hash[:12] + ("…" if len(resolved_hash) > 12 else "")
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via store '{store_name}'.")
|
||||
debug(
|
||||
f"Removed {len(tags)} tag(s) from {preview} via store '{store_name}'."
|
||||
)
|
||||
_refresh_tag_view_if_current(resolved_hash, store_name, path, config)
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -54,7 +54,8 @@ class Delete_Url(Cmdlet):
|
||||
|
||||
# Bulk input is common in pipelines; treat a list of PipeObjects as a batch.
|
||||
results: List[Any] = (
|
||||
result if isinstance(result, list) else ([result] if result is not None else [])
|
||||
result if isinstance(result,
|
||||
list) else ([result] if result is not None else [])
|
||||
)
|
||||
|
||||
if query_hash and len(results) > 1:
|
||||
@@ -62,16 +63,22 @@ class Delete_Url(Cmdlet):
|
||||
return 1
|
||||
|
||||
# Extract hash and store from result or args
|
||||
file_hash = query_hash or (get_field(result, "hash") if result is not None else None)
|
||||
file_hash = query_hash or (
|
||||
get_field(result,
|
||||
"hash") if result is not None else None
|
||||
)
|
||||
store_name = parsed.get("store") or (
|
||||
get_field(result, "store") if result is not None else None
|
||||
get_field(result,
|
||||
"store") if result is not None else None
|
||||
)
|
||||
url_arg = parsed.get("url")
|
||||
|
||||
# If we have multiple piped items, we will resolve hash/store per item below.
|
||||
if not results:
|
||||
if not file_hash:
|
||||
log('Error: No file hash provided (pipe an item or use -query "hash:<sha256>")')
|
||||
log(
|
||||
'Error: No file hash provided (pipe an item or use -query "hash:<sha256>")'
|
||||
)
|
||||
return 1
|
||||
if not store_name:
|
||||
log("Error: No store name provided")
|
||||
@@ -110,7 +117,8 @@ class Delete_Url(Cmdlet):
|
||||
current = [str(u).strip() for u in existing if str(u).strip()]
|
||||
except Exception:
|
||||
current = []
|
||||
remove_set = {u for u in (remove or []) if u}
|
||||
remove_set = {u
|
||||
for u in (remove or []) if u}
|
||||
new_urls = [u for u in current if u not in remove_set]
|
||||
if len(new_urls) == 1:
|
||||
return new_urls[0]
|
||||
@@ -127,7 +135,9 @@ class Delete_Url(Cmdlet):
|
||||
return
|
||||
|
||||
store_override = parsed.get("store")
|
||||
batch: Dict[str, List[Tuple[str, List[str]]]] = {}
|
||||
batch: Dict[str,
|
||||
List[Tuple[str,
|
||||
List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
if results:
|
||||
@@ -146,14 +156,16 @@ class Delete_Url(Cmdlet):
|
||||
normalized = normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has invalid hash; skipping", file=sys.stderr
|
||||
"[delete-url] Warning: Item has invalid hash; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has empty store; skipping", file=sys.stderr
|
||||
"[delete-url] Warning: Item has empty store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
if not storage.is_available(store_text):
|
||||
@@ -169,15 +181,14 @@ class Delete_Url(Cmdlet):
|
||||
item_urls = list(urls_from_cli)
|
||||
if not item_urls:
|
||||
item_urls = [
|
||||
u.strip()
|
||||
for u in normalize_urls(
|
||||
u.strip() for u in normalize_urls(
|
||||
get_field(item, "url") or get_field(item, "source_url")
|
||||
)
|
||||
if str(u).strip()
|
||||
) if str(u).strip()
|
||||
]
|
||||
if not item_urls:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has no url field; skipping", file=sys.stderr
|
||||
"[delete-url] Warning: Item has no url field; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -189,7 +200,8 @@ class Delete_Url(Cmdlet):
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
merged: Dict[str, List[str]] = {}
|
||||
merged: Dict[str,
|
||||
List[str]] = {}
|
||||
for h, ulist in pairs:
|
||||
merged.setdefault(h, [])
|
||||
for u in ulist or []:
|
||||
@@ -220,11 +232,9 @@ class Delete_Url(Cmdlet):
|
||||
remove_set = urls_from_cli
|
||||
if not remove_set:
|
||||
remove_set = [
|
||||
u.strip()
|
||||
for u in normalize_urls(
|
||||
u.strip() for u in normalize_urls(
|
||||
get_field(item, "url") or get_field(item, "source_url")
|
||||
)
|
||||
if str(u).strip()
|
||||
) if str(u).strip()
|
||||
]
|
||||
_set_item_url(item, _remove_urls(existing, list(remove_set)))
|
||||
ctx.emit(item)
|
||||
@@ -233,11 +243,9 @@ class Delete_Url(Cmdlet):
|
||||
# Single-item mode
|
||||
if not urls_from_cli:
|
||||
urls_from_cli = [
|
||||
u.strip()
|
||||
for u in normalize_urls(
|
||||
u.strip() for u in normalize_urls(
|
||||
get_field(result, "url") or get_field(result, "source_url")
|
||||
)
|
||||
if str(u).strip()
|
||||
) if str(u).strip()
|
||||
]
|
||||
if not urls_from_cli:
|
||||
log("Error: No URL provided")
|
||||
@@ -246,7 +254,8 @@ class Delete_Url(Cmdlet):
|
||||
backend = storage[str(store_name)]
|
||||
backend.delete_url(str(file_hash), list(urls_from_cli), config=config)
|
||||
ctx.print_if_visible(
|
||||
f"✓ delete-url: {len(urls_from_cli)} url(s) removed", file=sys.stderr
|
||||
f"✓ delete-url: {len(urls_from_cli)} url(s) removed",
|
||||
file=sys.stderr
|
||||
)
|
||||
if result is not None:
|
||||
existing = get_field(result, "url")
|
||||
|
||||
@@ -38,8 +38,10 @@ class Download_File(Cmdlet):
|
||||
super().__init__(
|
||||
name="download-file",
|
||||
summary="Download files via HTTP or provider handlers",
|
||||
usage="download-file <url> [-path DIR] [options] OR @N | download-file [-path DIR|DIR] [options]",
|
||||
alias=["dl-file", "download-http"],
|
||||
usage=
|
||||
"download-file <url> [-path DIR] [options] OR @N | download-file [-path DIR|DIR] [options]",
|
||||
alias=["dl-file",
|
||||
"download-http"],
|
||||
arg=[
|
||||
SharedArgs.URL,
|
||||
SharedArgs.PATH,
|
||||
@@ -86,7 +88,8 @@ class Download_File(Cmdlet):
|
||||
return expanded_urls
|
||||
|
||||
@staticmethod
|
||||
def _collect_piped_items_if_no_urls(result: Any, raw_urls: Sequence[str]) -> List[Any]:
|
||||
def _collect_piped_items_if_no_urls(result: Any,
|
||||
raw_urls: Sequence[str]) -> List[Any]:
|
||||
if raw_urls:
|
||||
return []
|
||||
if isinstance(result, list):
|
||||
@@ -104,13 +107,18 @@ class Download_File(Cmdlet):
|
||||
|
||||
@staticmethod
|
||||
def _build_preview(
|
||||
raw_urls: Sequence[str], piped_items: Sequence[Any], total_items: int
|
||||
raw_urls: Sequence[str],
|
||||
piped_items: Sequence[Any],
|
||||
total_items: int
|
||||
) -> List[Any]:
|
||||
try:
|
||||
preview: List[Any] = []
|
||||
preview.extend(list(raw_urls or [])[: max(0, total_items)])
|
||||
preview.extend(list(raw_urls or [])[:max(0, total_items)])
|
||||
if len(preview) < total_items:
|
||||
preview.extend(list(piped_items or [])[: max(0, total_items - len(preview))])
|
||||
preview.extend(
|
||||
list(piped_items or [])[:max(0,
|
||||
total_items - len(preview))]
|
||||
)
|
||||
return preview
|
||||
except Exception:
|
||||
return []
|
||||
@@ -145,8 +153,10 @@ class Download_File(Cmdlet):
|
||||
*,
|
||||
raw_urls: Sequence[str],
|
||||
piped_items: Sequence[Any],
|
||||
parsed: Dict[str, Any],
|
||||
config: Dict[str, Any],
|
||||
parsed: Dict[str,
|
||||
Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
quiet_mode: bool,
|
||||
) -> Optional[int]:
|
||||
"""If the input is an IA item page, show a selectable formats table.
|
||||
@@ -168,7 +178,9 @@ class Download_File(Cmdlet):
|
||||
target = ""
|
||||
if item is not None:
|
||||
try:
|
||||
target = str(get_field(item, "path") or get_field(item, "url") or "").strip()
|
||||
target = str(get_field(item,
|
||||
"path") or get_field(item,
|
||||
"url") or "").strip()
|
||||
except Exception:
|
||||
target = ""
|
||||
if not target and raw_urls:
|
||||
@@ -206,22 +218,28 @@ class Download_File(Cmdlet):
|
||||
try:
|
||||
files = ia.list_download_files(identifier)
|
||||
except Exception as exc:
|
||||
log(f"download-file: Internet Archive lookup failed: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"download-file: Internet Archive lookup failed: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
if not files:
|
||||
log("download-file: Internet Archive item has no downloadable files", file=sys.stderr)
|
||||
log(
|
||||
"download-file: Internet Archive item has no downloadable files",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
title = ""
|
||||
try:
|
||||
title = str(get_field(item, "title") or "").strip() if item is not None else ""
|
||||
title = str(get_field(item,
|
||||
"title") or "").strip() if item is not None else ""
|
||||
except Exception:
|
||||
title = ""
|
||||
table_title = (
|
||||
f"Internet Archive: {title}".strip().rstrip(":")
|
||||
if title
|
||||
else f"Internet Archive: {identifier}"
|
||||
if title else f"Internet Archive: {identifier}"
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -256,31 +274,43 @@ class Download_File(Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
row_item: Dict[str, Any] = {
|
||||
"table": "internetarchive",
|
||||
"title": fmt or name,
|
||||
"path": direct_url,
|
||||
"url": direct_url,
|
||||
"columns": [
|
||||
("Format", fmt),
|
||||
("Name", name),
|
||||
("Size", size_val),
|
||||
("Source", src),
|
||||
],
|
||||
"_selection_args": [direct_url],
|
||||
"full_metadata": {
|
||||
"identifier": identifier,
|
||||
"name": name,
|
||||
"format": fmt,
|
||||
"source": src,
|
||||
"size": f.get("size"),
|
||||
},
|
||||
}
|
||||
row_item: Dict[str,
|
||||
Any] = {
|
||||
"table":
|
||||
"internetarchive",
|
||||
"title":
|
||||
fmt or name,
|
||||
"path":
|
||||
direct_url,
|
||||
"url":
|
||||
direct_url,
|
||||
"columns": [
|
||||
("Format",
|
||||
fmt),
|
||||
("Name",
|
||||
name),
|
||||
("Size",
|
||||
size_val),
|
||||
("Source",
|
||||
src),
|
||||
],
|
||||
"_selection_args": [direct_url],
|
||||
"full_metadata": {
|
||||
"identifier": identifier,
|
||||
"name": name,
|
||||
"format": fmt,
|
||||
"source": src,
|
||||
"size": f.get("size"),
|
||||
},
|
||||
}
|
||||
rows.append(row_item)
|
||||
table.add_result(row_item)
|
||||
|
||||
if not rows:
|
||||
log("download-file: no downloadable files found for this item", file=sys.stderr)
|
||||
log(
|
||||
"download-file: no downloadable files found for this item",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
try:
|
||||
@@ -289,7 +319,10 @@ class Download_File(Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log("Internet Archive item detected: select a file with @N to download", file=sys.stderr)
|
||||
log(
|
||||
"Internet Archive item detected: select a file with @N to download",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
@@ -334,14 +367,15 @@ class Download_File(Cmdlet):
|
||||
title_hint: Optional[str],
|
||||
tags_hint: Optional[List[str]],
|
||||
media_kind_hint: Optional[str],
|
||||
full_metadata: Optional[Dict[str, Any]],
|
||||
full_metadata: Optional[Dict[str,
|
||||
Any]],
|
||||
progress: PipelineProgress,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
provider_hint: Optional[str] = None,
|
||||
) -> None:
|
||||
title_val = (
|
||||
title_hint or downloaded_path.stem or "Unknown"
|
||||
).strip() or downloaded_path.stem
|
||||
title_val = (title_hint or downloaded_path.stem
|
||||
or "Unknown").strip() or downloaded_path.stem
|
||||
hash_value = self._compute_file_hash(downloaded_path)
|
||||
tag: List[str] = []
|
||||
if tags_hint:
|
||||
@@ -349,16 +383,17 @@ class Download_File(Cmdlet):
|
||||
if not any(str(t).lower().startswith("title:") for t in tag):
|
||||
tag.insert(0, f"title:{title_val}")
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"path": str(downloaded_path),
|
||||
"hash": hash_value,
|
||||
"title": title_val,
|
||||
"action": "cmdlet:download-file",
|
||||
"download_mode": "file",
|
||||
"store": "local",
|
||||
"media_kind": media_kind_hint or "file",
|
||||
"tag": tag,
|
||||
}
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"path": str(downloaded_path),
|
||||
"hash": hash_value,
|
||||
"title": title_val,
|
||||
"action": "cmdlet:download-file",
|
||||
"download_mode": "file",
|
||||
"store": "local",
|
||||
"media_kind": media_kind_hint or "file",
|
||||
"tag": tag,
|
||||
}
|
||||
if provider_hint:
|
||||
payload["provider"] = str(provider_hint)
|
||||
if full_metadata:
|
||||
@@ -384,11 +419,14 @@ class Download_File(Cmdlet):
|
||||
*,
|
||||
raw_urls: Sequence[str],
|
||||
final_output_dir: Path,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
quiet_mode: bool,
|
||||
registry: Dict[str, Any],
|
||||
registry: Dict[str,
|
||||
Any],
|
||||
progress: PipelineProgress,
|
||||
) -> tuple[int, Optional[int]]:
|
||||
) -> tuple[int,
|
||||
Optional[int]]:
|
||||
downloaded_count = 0
|
||||
|
||||
SearchResult = registry.get("SearchResult")
|
||||
@@ -406,7 +444,8 @@ class Download_File(Cmdlet):
|
||||
except Exception:
|
||||
host = ""
|
||||
|
||||
is_telegram = host in {"t.me", "telegram.me"} or host.endswith(".t.me")
|
||||
is_telegram = host in {"t.me",
|
||||
"telegram.me"} or host.endswith(".t.me")
|
||||
if is_telegram and SearchResult:
|
||||
try:
|
||||
from ProviderCore.registry import get_provider as _get_provider
|
||||
@@ -423,7 +462,10 @@ class Download_File(Cmdlet):
|
||||
)
|
||||
|
||||
sr = SearchResult(
|
||||
table="telegram", title=str(url), path=str(url), full_metadata={}
|
||||
table="telegram",
|
||||
title=str(url),
|
||||
path=str(url),
|
||||
full_metadata={}
|
||||
)
|
||||
downloaded_path = None
|
||||
telegram_info: Optional[Dict[str, Any]] = None
|
||||
@@ -444,14 +486,21 @@ class Download_File(Cmdlet):
|
||||
try:
|
||||
chat_info_raw = telegram_info.get("chat")
|
||||
msg_info_raw = telegram_info.get("message")
|
||||
chat_info: Dict[str, Any] = (
|
||||
chat_info_raw if isinstance(chat_info_raw, dict) else {}
|
||||
)
|
||||
msg_info: Dict[str, Any] = (
|
||||
msg_info_raw if isinstance(msg_info_raw, dict) else {}
|
||||
)
|
||||
chat_info: Dict[str,
|
||||
Any] = (
|
||||
chat_info_raw
|
||||
if isinstance(chat_info_raw,
|
||||
dict) else {}
|
||||
)
|
||||
msg_info: Dict[str,
|
||||
Any] = (
|
||||
msg_info_raw
|
||||
if isinstance(msg_info_raw,
|
||||
dict) else {}
|
||||
)
|
||||
channel = str(
|
||||
chat_info.get("title") or chat_info.get("username") or ""
|
||||
chat_info.get("title") or chat_info.get("username")
|
||||
or ""
|
||||
).strip()
|
||||
post = msg_info.get("id")
|
||||
except Exception:
|
||||
@@ -501,16 +550,12 @@ class Download_File(Cmdlet):
|
||||
p = urlparse(str(url))
|
||||
h = (p.hostname or "").strip().lower()
|
||||
path = (p.path or "").strip().lower()
|
||||
if "libgen" in h and any(
|
||||
x in path
|
||||
for x in (
|
||||
if "libgen" in h and any(x in path for x in (
|
||||
"/edition.php",
|
||||
"/file.php",
|
||||
"/ads.php",
|
||||
"/get.php",
|
||||
"/series.php",
|
||||
)
|
||||
):
|
||||
"/series.php", )):
|
||||
provider_name = "libgen"
|
||||
except Exception:
|
||||
pass
|
||||
@@ -546,7 +591,10 @@ class Download_File(Cmdlet):
|
||||
progress.begin_steps(5)
|
||||
|
||||
def _progress(
|
||||
kind: str, done: int, total: Optional[int], label: str
|
||||
kind: str,
|
||||
done: int,
|
||||
total: Optional[int],
|
||||
label: str
|
||||
) -> None:
|
||||
# kind:
|
||||
# - "step": advance step text
|
||||
@@ -561,12 +609,21 @@ class Download_File(Cmdlet):
|
||||
d = int(done) if isinstance(done, int) else 0
|
||||
if t > 0:
|
||||
pct = int(
|
||||
round((max(0, min(d, t)) / max(1, t)) * 100.0)
|
||||
round(
|
||||
(max(0,
|
||||
min(d,
|
||||
t)) / max(1,
|
||||
t)) * 100.0
|
||||
)
|
||||
)
|
||||
progress.set_percent(pct)
|
||||
progress.set_status(f"downloading pages {d}/{t}")
|
||||
progress.set_status(
|
||||
f"downloading pages {d}/{t}"
|
||||
)
|
||||
else:
|
||||
progress.set_status(f"downloading pages {d}")
|
||||
progress.set_status(
|
||||
f"downloading pages {d}"
|
||||
)
|
||||
return
|
||||
|
||||
if kind == "bytes":
|
||||
@@ -576,14 +633,14 @@ class Download_File(Cmdlet):
|
||||
lbl = "download"
|
||||
progress.begin_transfer(label=lbl, total=total)
|
||||
progress.update_transfer(
|
||||
label=lbl, completed=done, total=total
|
||||
label=lbl,
|
||||
completed=done,
|
||||
total=total
|
||||
)
|
||||
try:
|
||||
if (
|
||||
isinstance(total, int)
|
||||
and total > 0
|
||||
and int(done) >= int(total)
|
||||
):
|
||||
if (isinstance(total,
|
||||
int) and total > 0
|
||||
and int(done) >= int(total)):
|
||||
progress.finish_transfer(label=lbl)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -591,7 +648,11 @@ class Download_File(Cmdlet):
|
||||
|
||||
progress_cb = _progress
|
||||
|
||||
downloaded_path = provider.download(sr, final_output_dir, progress_callback=progress_cb) # type: ignore[call-arg]
|
||||
downloaded_path = provider.download(
|
||||
sr,
|
||||
final_output_dir,
|
||||
progress_callback=progress_cb
|
||||
) # type: ignore[call-arg]
|
||||
except Exception as exc:
|
||||
raise DownloadError(str(exc))
|
||||
|
||||
@@ -635,7 +696,12 @@ class Download_File(Cmdlet):
|
||||
if callable(exec_fn):
|
||||
ret = exec_fn(
|
||||
None,
|
||||
["-provider", "libgen", "-query", fallback_query],
|
||||
[
|
||||
"-provider",
|
||||
"libgen",
|
||||
"-query",
|
||||
fallback_query
|
||||
],
|
||||
config,
|
||||
)
|
||||
try:
|
||||
@@ -643,7 +709,8 @@ class Download_File(Cmdlet):
|
||||
items = pipeline_context.get_last_result_items()
|
||||
if table is not None:
|
||||
pipeline_context.set_last_result_table_overlay(
|
||||
table, items
|
||||
table,
|
||||
items
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -665,7 +732,10 @@ class Download_File(Cmdlet):
|
||||
provider = get_provider(provider_name, config)
|
||||
if provider is not None and hasattr(provider, "download_url"):
|
||||
try:
|
||||
downloaded_path = provider.download_url(str(url), final_output_dir) # type: ignore[attr-defined]
|
||||
downloaded_path = provider.download_url(
|
||||
str(url),
|
||||
final_output_dir
|
||||
) # type: ignore[attr-defined]
|
||||
except Exception as exc:
|
||||
raise DownloadError(str(exc))
|
||||
|
||||
@@ -694,14 +764,20 @@ class Download_File(Cmdlet):
|
||||
path=str(url),
|
||||
full_metadata={},
|
||||
)
|
||||
downloaded_path = provider.download(sr_obj, final_output_dir) # type: ignore[call-arg]
|
||||
downloaded_path = provider.download(
|
||||
sr_obj,
|
||||
final_output_dir
|
||||
) # type: ignore[call-arg]
|
||||
except Exception:
|
||||
downloaded_path = None
|
||||
|
||||
# Refuse to fall back to direct-download for LibGen landing pages.
|
||||
# This prevents saving HTML (e.g. edition.php) as a bogus file.
|
||||
if (not downloaded_path) and str(provider_name).lower() == "libgen":
|
||||
raise DownloadError("LibGen URL did not resolve to a downloadable file")
|
||||
if (not downloaded_path
|
||||
) and str(provider_name).lower() == "libgen":
|
||||
raise DownloadError(
|
||||
"LibGen URL did not resolve to a downloadable file"
|
||||
)
|
||||
|
||||
if downloaded_path:
|
||||
emit_tags: Optional[List[str]] = None
|
||||
@@ -709,12 +785,15 @@ class Download_File(Cmdlet):
|
||||
title_hint = Path(str(downloaded_path)).stem
|
||||
media_kind_hint = "file"
|
||||
|
||||
if str(provider_name).lower() == "libgen" and sr_obj is not None:
|
||||
if str(provider_name
|
||||
).lower() == "libgen" and sr_obj is not None:
|
||||
media_kind_hint = "book"
|
||||
try:
|
||||
sr_tags = getattr(sr_obj, "tag", None)
|
||||
if isinstance(sr_tags, set) and sr_tags:
|
||||
emit_tags = sorted([str(t) for t in sr_tags if t])
|
||||
emit_tags = sorted(
|
||||
[str(t) for t in sr_tags if t]
|
||||
)
|
||||
except Exception:
|
||||
emit_tags = None
|
||||
|
||||
@@ -774,8 +853,10 @@ class Download_File(Cmdlet):
|
||||
self,
|
||||
*,
|
||||
piped_items: Sequence[Any],
|
||||
registry: Dict[str, Any],
|
||||
config: Dict[str, Any],
|
||||
registry: Dict[str,
|
||||
Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> List[Any]:
|
||||
get_search_provider = registry.get("get_search_provider")
|
||||
expanded_items: List[Any] = []
|
||||
@@ -786,18 +867,14 @@ class Download_File(Cmdlet):
|
||||
full_metadata = get_field(item, "full_metadata")
|
||||
target = get_field(item, "path") or get_field(item, "url")
|
||||
|
||||
if (
|
||||
str(table or "").lower() == "alldebrid"
|
||||
and str(media_kind or "").lower() == "folder"
|
||||
):
|
||||
if (str(table or "").lower() == "alldebrid"
|
||||
and str(media_kind or "").lower() == "folder"):
|
||||
magnet_id = None
|
||||
if isinstance(full_metadata, dict):
|
||||
magnet_id = full_metadata.get("magnet_id")
|
||||
if (
|
||||
magnet_id is None
|
||||
and isinstance(target, str)
|
||||
and target.lower().startswith("alldebrid:magnet:")
|
||||
):
|
||||
if (magnet_id is None and isinstance(target,
|
||||
str)
|
||||
and target.lower().startswith("alldebrid:magnet:")):
|
||||
try:
|
||||
magnet_id = int(target.split(":")[-1])
|
||||
except Exception:
|
||||
@@ -810,17 +887,18 @@ class Download_File(Cmdlet):
|
||||
files = provider.search(
|
||||
"*",
|
||||
limit=10_000,
|
||||
filters={"view": "files", "magnet_id": int(magnet_id)},
|
||||
filters={
|
||||
"view": "files",
|
||||
"magnet_id": int(magnet_id)
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
files = []
|
||||
|
||||
# If the magnet isn't ready, provider.search returns a single not-ready folder row.
|
||||
if (
|
||||
files
|
||||
and len(files) == 1
|
||||
and getattr(files[0], "media_kind", "") == "folder"
|
||||
):
|
||||
if (files and len(files) == 1 and getattr(files[0],
|
||||
"media_kind",
|
||||
"") == "folder"):
|
||||
detail = getattr(files[0], "detail", "")
|
||||
log(
|
||||
f"[download-file] AllDebrid magnet {magnet_id} not ready ({detail or 'unknown'})",
|
||||
@@ -829,7 +907,8 @@ class Download_File(Cmdlet):
|
||||
else:
|
||||
for sr in files:
|
||||
expanded_items.append(
|
||||
sr.to_dict() if hasattr(sr, "to_dict") else sr
|
||||
sr.to_dict() if hasattr(sr,
|
||||
"to_dict") else sr
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -844,9 +923,11 @@ class Download_File(Cmdlet):
|
||||
*,
|
||||
piped_items: Sequence[Any],
|
||||
final_output_dir: Path,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
quiet_mode: bool,
|
||||
registry: Dict[str, Any],
|
||||
registry: Dict[str,
|
||||
Any],
|
||||
progress: PipelineProgress,
|
||||
) -> int:
|
||||
downloaded_count = 0
|
||||
@@ -854,7 +935,9 @@ class Download_File(Cmdlet):
|
||||
SearchResult = registry.get("SearchResult")
|
||||
|
||||
expanded_items = self._expand_provider_items(
|
||||
piped_items=piped_items, registry=registry, config=config
|
||||
piped_items=piped_items,
|
||||
registry=registry,
|
||||
config=config
|
||||
)
|
||||
|
||||
for item in expanded_items:
|
||||
@@ -871,11 +954,10 @@ class Download_File(Cmdlet):
|
||||
tags_list = None
|
||||
|
||||
full_metadata = get_field(item, "full_metadata")
|
||||
if (
|
||||
(not full_metadata)
|
||||
and isinstance(item, dict)
|
||||
and isinstance(item.get("extra"), dict)
|
||||
):
|
||||
if ((not full_metadata) and isinstance(item,
|
||||
dict)
|
||||
and isinstance(item.get("extra"),
|
||||
dict)):
|
||||
extra_md = item["extra"].get("full_metadata")
|
||||
if isinstance(extra_md, dict):
|
||||
full_metadata = extra_md
|
||||
@@ -892,9 +974,13 @@ class Download_File(Cmdlet):
|
||||
table=str(table),
|
||||
title=str(title or "Unknown"),
|
||||
path=str(target or ""),
|
||||
full_metadata=full_metadata if isinstance(full_metadata, dict) else {},
|
||||
full_metadata=full_metadata
|
||||
if isinstance(full_metadata,
|
||||
dict) else {},
|
||||
)
|
||||
debug(
|
||||
f"[download-file] Downloading provider item via {table}: {sr.title}"
|
||||
)
|
||||
debug(f"[download-file] Downloading provider item via {table}: {sr.title}")
|
||||
|
||||
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
|
||||
output_dir = final_output_dir
|
||||
@@ -902,16 +988,21 @@ class Download_File(Cmdlet):
|
||||
if str(table).strip().lower() == "alldebrid":
|
||||
from ProviderCore.download import sanitize_filename as _sf
|
||||
|
||||
md = full_metadata if isinstance(full_metadata, dict) else {}
|
||||
md = full_metadata if isinstance(full_metadata,
|
||||
dict) else {}
|
||||
magnet_name = None
|
||||
if isinstance(md, dict):
|
||||
magnet_name = md.get("magnet_name") or md.get("folder")
|
||||
magnet_name = md.get("magnet_name"
|
||||
) or md.get("folder")
|
||||
if not magnet_name:
|
||||
magnet_name = (
|
||||
str(get_field(item, "detail") or "").strip() or None
|
||||
str(get_field(item,
|
||||
"detail") or "").strip() or None
|
||||
)
|
||||
|
||||
magnet_dir_name = _sf(str(magnet_name)) if magnet_name else ""
|
||||
magnet_dir_name = _sf(
|
||||
str(magnet_name)
|
||||
) if magnet_name else ""
|
||||
|
||||
# If user already chose -path that ends with the magnet folder name,
|
||||
# don't create a duplicate nested folder.
|
||||
@@ -919,12 +1010,14 @@ class Download_File(Cmdlet):
|
||||
base_tail = str(Path(output_dir).name or "")
|
||||
except Exception:
|
||||
base_tail = ""
|
||||
base_tail_norm = _sf(base_tail).lower() if base_tail.strip() else ""
|
||||
magnet_dir_norm = magnet_dir_name.lower() if magnet_dir_name else ""
|
||||
base_tail_norm = _sf(base_tail).lower(
|
||||
) if base_tail.strip() else ""
|
||||
magnet_dir_norm = magnet_dir_name.lower(
|
||||
) if magnet_dir_name else ""
|
||||
|
||||
if magnet_dir_name and (
|
||||
not base_tail_norm or base_tail_norm != magnet_dir_norm
|
||||
):
|
||||
if magnet_dir_name and (not base_tail_norm
|
||||
or base_tail_norm
|
||||
!= magnet_dir_norm):
|
||||
output_dir = Path(output_dir) / magnet_dir_name
|
||||
|
||||
relpath = None
|
||||
@@ -935,8 +1028,8 @@ class Download_File(Cmdlet):
|
||||
|
||||
if relpath:
|
||||
parts = [
|
||||
p
|
||||
for p in str(relpath).replace("\\", "/").split("/")
|
||||
p for p in str(relpath).replace("\\", "/"
|
||||
).split("/")
|
||||
if p and p not in {".", ".."}
|
||||
]
|
||||
|
||||
@@ -964,11 +1057,8 @@ class Download_File(Cmdlet):
|
||||
provider_sr = sr
|
||||
|
||||
# OpenLibrary: if provider download failed, do NOT try to download the OpenLibrary page HTML.
|
||||
if (
|
||||
downloaded_path is None
|
||||
and attempted_provider_download
|
||||
and str(table or "").lower() == "openlibrary"
|
||||
):
|
||||
if (downloaded_path is None and attempted_provider_download
|
||||
and str(table or "").lower() == "openlibrary"):
|
||||
availability = None
|
||||
reason = None
|
||||
if isinstance(full_metadata, dict):
|
||||
@@ -1002,7 +1092,10 @@ class Download_File(Cmdlet):
|
||||
|
||||
ret = exec_fn(
|
||||
None,
|
||||
["-provider", "libgen", "-query", fallback_query],
|
||||
["-provider",
|
||||
"libgen",
|
||||
"-query",
|
||||
fallback_query],
|
||||
config,
|
||||
)
|
||||
|
||||
@@ -1012,7 +1105,8 @@ class Download_File(Cmdlet):
|
||||
items_obj = pipeline_context.get_last_result_items()
|
||||
if table_obj is not None:
|
||||
pipeline_context.set_last_result_table_overlay(
|
||||
table_obj, items_obj
|
||||
table_obj,
|
||||
items_obj
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1027,16 +1121,15 @@ class Download_File(Cmdlet):
|
||||
continue
|
||||
|
||||
# Fallback: if we have a direct HTTP URL, download it directly
|
||||
if (
|
||||
downloaded_path is None
|
||||
and isinstance(target, str)
|
||||
and target.startswith("http")
|
||||
):
|
||||
if (downloaded_path is None and isinstance(target,
|
||||
str)
|
||||
and target.startswith("http")):
|
||||
# Guard: provider landing pages (e.g. LibGen ads.php) are HTML, not files.
|
||||
# Never download these as "files".
|
||||
if str(table or "").lower() == "libgen":
|
||||
low = target.lower()
|
||||
if ("/ads.php" in low) or ("/file.php" in low) or ("/index.php" in low):
|
||||
if ("/ads.php" in low) or ("/file.php" in low) or ("/index.php"
|
||||
in low):
|
||||
log(
|
||||
"[download-file] Refusing to download LibGen landing page (expected provider to resolve file link)",
|
||||
file=sys.stderr,
|
||||
@@ -1092,7 +1185,8 @@ class Download_File(Cmdlet):
|
||||
title_hint=str(title) if title else downloaded_path.stem,
|
||||
tags_hint=tags_list,
|
||||
media_kind_hint=str(media_kind) if media_kind else None,
|
||||
full_metadata=full_metadata if isinstance(full_metadata, dict) else None,
|
||||
full_metadata=full_metadata if isinstance(full_metadata,
|
||||
dict) else None,
|
||||
progress=progress,
|
||||
config=config,
|
||||
)
|
||||
@@ -1105,7 +1199,13 @@ class Download_File(Cmdlet):
|
||||
|
||||
return downloaded_count
|
||||
|
||||
def _run_impl(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
def _run_impl(
|
||||
self,
|
||||
result: Any,
|
||||
args: Sequence[str],
|
||||
config: Dict[str,
|
||||
Any]
|
||||
) -> int:
|
||||
"""Main download implementation for direct HTTP files."""
|
||||
progress = PipelineProgress(pipeline_context)
|
||||
prev_progress = None
|
||||
@@ -1139,26 +1239,26 @@ class Download_File(Cmdlet):
|
||||
|
||||
# UX: In piped mode, allow a single positional arg to be the destination directory.
|
||||
# Example: @1-4 | download-file "C:\\Users\\Me\\Downloads\\yoyo"
|
||||
if (
|
||||
had_piped_input
|
||||
and raw_url
|
||||
and len(raw_url) == 1
|
||||
and (not parsed.get("path"))
|
||||
and (not parsed.get("output"))
|
||||
):
|
||||
if (had_piped_input and raw_url and len(raw_url) == 1
|
||||
and (not parsed.get("path")) and (not parsed.get("output"))):
|
||||
candidate = str(raw_url[0] or "").strip()
|
||||
low = candidate.lower()
|
||||
looks_like_url = low.startswith(("http://", "https://", "ftp://"))
|
||||
looks_like_provider = low.startswith(
|
||||
("magnet:", "alldebrid:", "hydrus:", "ia:", "internetarchive:")
|
||||
("magnet:",
|
||||
"alldebrid:",
|
||||
"hydrus:",
|
||||
"ia:",
|
||||
"internetarchive:")
|
||||
)
|
||||
looks_like_windows_path = (
|
||||
(len(candidate) >= 2 and candidate[1] == ":")
|
||||
or candidate.startswith("\\\\")
|
||||
or candidate.startswith("\\")
|
||||
or candidate.endswith(("\\", "/"))
|
||||
or candidate.startswith("\\\\") or candidate.startswith("\\")
|
||||
or candidate.endswith(("\\",
|
||||
"/"))
|
||||
)
|
||||
if (not looks_like_url) and (not looks_like_provider) and looks_like_windows_path:
|
||||
if (not looks_like_url) and (
|
||||
not looks_like_provider) and looks_like_windows_path:
|
||||
parsed["path"] = candidate
|
||||
raw_url = []
|
||||
piped_items = self._collect_piped_items_if_no_urls(result, raw_url)
|
||||
@@ -1168,7 +1268,9 @@ class Download_File(Cmdlet):
|
||||
return 1
|
||||
|
||||
quiet_mode = (
|
||||
bool(config.get("_quiet_background_output")) if isinstance(config, dict) else False
|
||||
bool(config.get("_quiet_background_output"))
|
||||
if isinstance(config,
|
||||
dict) else False
|
||||
)
|
||||
ia_picker_exit = self._maybe_show_internetarchive_formats(
|
||||
raw_urls=raw_url,
|
||||
@@ -1194,7 +1296,9 @@ class Download_File(Cmdlet):
|
||||
preview = self._build_preview(raw_url, piped_items, total_items)
|
||||
|
||||
progress.ensure_local_ui(
|
||||
label="download-file", total_items=total_items, items_preview=preview
|
||||
label="download-file",
|
||||
total_items=total_items,
|
||||
items_preview=preview
|
||||
)
|
||||
|
||||
registry = self._load_provider_registry()
|
||||
@@ -1243,7 +1347,11 @@ class Download_File(Cmdlet):
|
||||
pass
|
||||
progress.close_local_ui(force_complete=True)
|
||||
|
||||
def _resolve_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]:
|
||||
def _resolve_output_dir(self,
|
||||
parsed: Dict[str,
|
||||
Any],
|
||||
config: Dict[str,
|
||||
Any]) -> Optional[Path]:
|
||||
"""Resolve the output directory from storage location or config."""
|
||||
output_dir_arg = parsed.get("path") or parsed.get("output")
|
||||
if output_dir_arg:
|
||||
@@ -1252,7 +1360,10 @@ class Download_File(Cmdlet):
|
||||
out_path.mkdir(parents=True, exist_ok=True)
|
||||
return out_path
|
||||
except Exception as e:
|
||||
log(f"Cannot use output directory {output_dir_arg}: {e}", file=sys.stderr)
|
||||
log(
|
||||
f"Cannot use output directory {output_dir_arg}: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return None
|
||||
|
||||
storage_location = parsed.get("storage")
|
||||
@@ -1267,7 +1378,7 @@ class Download_File(Cmdlet):
|
||||
|
||||
# Priority 2: Config default output/temp directory
|
||||
try:
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
|
||||
final_output_dir = resolve_output_dir(config)
|
||||
except Exception:
|
||||
@@ -1279,7 +1390,10 @@ class Download_File(Cmdlet):
|
||||
try:
|
||||
final_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
log(f"Cannot create output directory {final_output_dir}: {e}", file=sys.stderr)
|
||||
log(
|
||||
f"Cannot create output directory {final_output_dir}: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return None
|
||||
|
||||
return final_output_dir
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,7 +26,8 @@ class Download_Torrent(sh.Cmdlet):
|
||||
name="download-torrent",
|
||||
summary="Download torrent/magnet links via AllDebrid",
|
||||
usage="download-torrent <magnet|.torrent> [options]",
|
||||
alias=["torrent", "magnet"],
|
||||
alias=["torrent",
|
||||
"magnet"],
|
||||
arg=[
|
||||
sh.CmdletArg(
|
||||
name="magnet",
|
||||
@@ -78,7 +79,13 @@ class Download_Torrent(sh.Cmdlet):
|
||||
return 1
|
||||
for magnet_url in magnet_args:
|
||||
if background_mode:
|
||||
self._start_background_worker(magnet_url, output_dir, config, api_key, wait_timeout)
|
||||
self._start_background_worker(
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout
|
||||
)
|
||||
log(f"⧗ Torrent download queued in background: {magnet_url}")
|
||||
else:
|
||||
# Foreground mode: submit quickly, then continue processing in background
|
||||
@@ -88,7 +95,11 @@ class Download_Torrent(sh.Cmdlet):
|
||||
if magnet_id <= 0:
|
||||
continue
|
||||
self._start_background_magnet_worker(
|
||||
worker_id, magnet_id, output_dir, api_key, wait_timeout
|
||||
worker_id,
|
||||
magnet_id,
|
||||
output_dir,
|
||||
api_key,
|
||||
wait_timeout
|
||||
)
|
||||
log(f"⧗ Torrent processing started (ID: {magnet_id})")
|
||||
return 0
|
||||
@@ -116,11 +127,20 @@ class Download_Torrent(sh.Cmdlet):
|
||||
return 0
|
||||
|
||||
def _start_background_magnet_worker(
|
||||
self, worker_id: str, magnet_id: int, output_dir: Path, api_key: str, wait_timeout: int
|
||||
self,
|
||||
worker_id: str,
|
||||
magnet_id: int,
|
||||
output_dir: Path,
|
||||
api_key: str,
|
||||
wait_timeout: int
|
||||
) -> None:
|
||||
thread = threading.Thread(
|
||||
target=self._download_magnet_worker,
|
||||
args=(worker_id, magnet_id, output_dir, api_key, wait_timeout),
|
||||
args=(worker_id,
|
||||
magnet_id,
|
||||
output_dir,
|
||||
api_key,
|
||||
wait_timeout),
|
||||
daemon=True,
|
||||
name=f"TorrentWorker_{worker_id}",
|
||||
)
|
||||
@@ -155,7 +175,8 @@ class Download_Torrent(sh.Cmdlet):
|
||||
return
|
||||
|
||||
files_result = client.magnet_links([magnet_id])
|
||||
magnet_files = files_result.get(str(magnet_id), {})
|
||||
magnet_files = files_result.get(str(magnet_id),
|
||||
{})
|
||||
files_array = magnet_files.get("files", [])
|
||||
if not files_array:
|
||||
log(f"[Worker {worker_id}] No files found", file=sys.stderr)
|
||||
@@ -174,7 +195,8 @@ class Download_Torrent(sh.Cmdlet):
|
||||
worker_id: str,
|
||||
magnet_url: str,
|
||||
output_dir: Path,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
api_key: str,
|
||||
wait_timeout: int = 600,
|
||||
worker_manager: Optional[Any] = None,
|
||||
@@ -204,7 +226,8 @@ class Download_Torrent(sh.Cmdlet):
|
||||
log(f"[Worker {worker_id}] Timeout waiting for magnet", file=sys.stderr)
|
||||
return
|
||||
files_result = client.magnet_links([magnet_id])
|
||||
magnet_files = files_result.get(str(magnet_id), {})
|
||||
magnet_files = files_result.get(str(magnet_id),
|
||||
{})
|
||||
files_array = magnet_files.get("files", [])
|
||||
if not files_array:
|
||||
log(f"[Worker {worker_id}] No files found", file=sys.stderr)
|
||||
@@ -231,11 +254,23 @@ class Download_Torrent(sh.Cmdlet):
|
||||
except Exception as e:
|
||||
log(f"File download failed: {e}", file=sys.stderr)
|
||||
|
||||
def _start_background_worker(self, magnet_url, output_dir, config, api_key, wait_timeout):
|
||||
def _start_background_worker(
|
||||
self,
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout
|
||||
):
|
||||
worker_id = f"torrent_{uuid.uuid4().hex[:6]}"
|
||||
thread = threading.Thread(
|
||||
target=self._download_torrent_worker,
|
||||
args=(worker_id, magnet_url, output_dir, config, api_key, wait_timeout),
|
||||
args=(worker_id,
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout),
|
||||
daemon=True,
|
||||
name=f"TorrentWorker_{worker_id}",
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ import pipeline as ctx
|
||||
from . import _shared as sh
|
||||
from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
|
||||
|
||||
class Get_File(sh.Cmdlet):
|
||||
@@ -35,7 +35,10 @@ class Get_File(sh.Cmdlet):
|
||||
sh.SharedArgs.QUERY,
|
||||
sh.SharedArgs.STORE,
|
||||
sh.SharedArgs.PATH,
|
||||
sh.CmdletArg("name", description="Output filename (default: from metadata title)"),
|
||||
sh.CmdletArg(
|
||||
"name",
|
||||
description="Output filename (default: from metadata title)"
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
"- Exports file from storage backend to local path",
|
||||
@@ -66,7 +69,9 @@ class Get_File(sh.Cmdlet):
|
||||
debug(f"[get-file] file_hash={file_hash} store_name={store_name}")
|
||||
|
||||
if not file_hash:
|
||||
log('Error: No file hash provided (pipe an item or use -query "hash:<sha256>")')
|
||||
log(
|
||||
'Error: No file hash provided (pipe an item or use -query "hash:<sha256>")'
|
||||
)
|
||||
return 1
|
||||
|
||||
if not store_name:
|
||||
@@ -98,12 +103,18 @@ class Get_File(sh.Cmdlet):
|
||||
|
||||
def resolve_display_title() -> str:
|
||||
candidates = [
|
||||
sh.get_field(result, "title"),
|
||||
sh.get_field(result, "name"),
|
||||
sh.get_field(result, "filename"),
|
||||
(metadata.get("title") if isinstance(metadata, dict) else None),
|
||||
(metadata.get("name") if isinstance(metadata, dict) else None),
|
||||
(metadata.get("filename") if isinstance(metadata, dict) else None),
|
||||
sh.get_field(result,
|
||||
"title"),
|
||||
sh.get_field(result,
|
||||
"name"),
|
||||
sh.get_field(result,
|
||||
"filename"),
|
||||
(metadata.get("title") if isinstance(metadata,
|
||||
dict) else None),
|
||||
(metadata.get("name") if isinstance(metadata,
|
||||
dict) else None),
|
||||
(metadata.get("filename") if isinstance(metadata,
|
||||
dict) else None),
|
||||
]
|
||||
for candidate in candidates:
|
||||
if candidate is None:
|
||||
@@ -121,9 +132,9 @@ class Get_File(sh.Cmdlet):
|
||||
debug(f"[get-file] backend.get_file returned: {source_path}")
|
||||
|
||||
# Check if backend returned a URL (HydrusNetwork case)
|
||||
if isinstance(source_path, str) and (
|
||||
source_path.startswith("http://") or source_path.startswith("https://")
|
||||
):
|
||||
if isinstance(source_path,
|
||||
str) and (source_path.startswith("http://")
|
||||
or source_path.startswith("https://")):
|
||||
# Hydrus backend returns a URL; open it only for this explicit user action.
|
||||
try:
|
||||
webbrowser.open(source_path)
|
||||
@@ -186,9 +197,9 @@ class Get_File(sh.Cmdlet):
|
||||
filename = output_name
|
||||
else:
|
||||
title = (
|
||||
(metadata.get("title") if isinstance(metadata, dict) else None)
|
||||
or resolve_display_title()
|
||||
or "export"
|
||||
(metadata.get("title") if isinstance(metadata,
|
||||
dict) else None)
|
||||
or resolve_display_title() or "export"
|
||||
)
|
||||
filename = self._sanitize_filename(title)
|
||||
|
||||
@@ -231,15 +242,15 @@ class Get_File(sh.Cmdlet):
|
||||
return
|
||||
|
||||
if suffix in {
|
||||
".png",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".gif",
|
||||
".webp",
|
||||
".bmp",
|
||||
".tif",
|
||||
".tiff",
|
||||
".svg",
|
||||
".png",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".gif",
|
||||
".webp",
|
||||
".bmp",
|
||||
".tif",
|
||||
".tiff",
|
||||
".svg",
|
||||
}:
|
||||
# Use default web browser for images.
|
||||
if self._open_image_in_default_browser(path):
|
||||
@@ -250,11 +261,17 @@ class Get_File(sh.Cmdlet):
|
||||
return
|
||||
if sys.platform == "darwin":
|
||||
subprocess.Popen(
|
||||
["open", str(path)], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
|
||||
["open",
|
||||
str(path)],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
return
|
||||
subprocess.Popen(
|
||||
["xdg-open", str(path)], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
|
||||
["xdg-open",
|
||||
str(path)],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Error opening file: {exc}", file=sys.stderr)
|
||||
@@ -274,15 +291,21 @@ class Get_File(sh.Cmdlet):
|
||||
return False
|
||||
|
||||
class OneFileHandler(http.server.SimpleHTTPRequestHandler):
|
||||
|
||||
def __init__(self, *handler_args, **handler_kwargs):
|
||||
super().__init__(*handler_args, directory=str(directory), **handler_kwargs)
|
||||
super().__init__(
|
||||
*handler_args,
|
||||
directory=str(directory),
|
||||
**handler_kwargs
|
||||
)
|
||||
|
||||
def log_message(self, format: str, *args) -> None: # noqa: A003
|
||||
# Keep normal output clean.
|
||||
return
|
||||
|
||||
def do_GET(self) -> None: # noqa: N802
|
||||
if self.path in {"/", ""}:
|
||||
if self.path in {"/",
|
||||
""}:
|
||||
self.path = "/" + filename
|
||||
return super().do_GET()
|
||||
|
||||
@@ -292,7 +315,8 @@ class Get_File(sh.Cmdlet):
|
||||
self.send_error(404)
|
||||
|
||||
def do_HEAD(self) -> None: # noqa: N802
|
||||
if self.path in {"/", ""}:
|
||||
if self.path in {"/",
|
||||
""}:
|
||||
self.path = "/" + filename
|
||||
return super().do_HEAD()
|
||||
|
||||
@@ -311,7 +335,11 @@ class Get_File(sh.Cmdlet):
|
||||
|
||||
# Run server in the background.
|
||||
server_thread = threading.Thread(
|
||||
target=httpd.serve_forever, kwargs={"poll_interval": 0.2}, daemon=True
|
||||
target=httpd.serve_forever,
|
||||
kwargs={
|
||||
"poll_interval": 0.2
|
||||
},
|
||||
daemon=True
|
||||
)
|
||||
server_thread.start()
|
||||
|
||||
@@ -350,7 +378,9 @@ class Get_File(sh.Cmdlet):
|
||||
return False
|
||||
|
||||
# Create a stable wrapper filename to reduce temp-file spam.
|
||||
wrapper_path = Path(tempfile.gettempdir()) / f"medeia-open-image-{resolved.stem}.html"
|
||||
wrapper_path = Path(
|
||||
tempfile.gettempdir()
|
||||
) / f"medeia-open-image-{resolved.stem}.html"
|
||||
try:
|
||||
wrapper_path.write_text(
|
||||
"\n".join(
|
||||
@@ -381,7 +411,10 @@ class Get_File(sh.Cmdlet):
|
||||
"""Sanitize filename by removing invalid characters."""
|
||||
allowed_chars = []
|
||||
for ch in str(name):
|
||||
if ch.isalnum() or ch in {"-", "_", " ", "."}:
|
||||
if ch.isalnum() or ch in {"-",
|
||||
"_",
|
||||
" ",
|
||||
"."}:
|
||||
allowed_chars.append(ch)
|
||||
else:
|
||||
allowed_chars.append(" ")
|
||||
|
||||
@@ -88,7 +88,8 @@ class Get_Metadata(Cmdlet):
|
||||
url: list[str],
|
||||
hash_value: Optional[str],
|
||||
pages: Optional[int] = None,
|
||||
) -> Dict[str, Any]:
|
||||
) -> Dict[str,
|
||||
Any]:
|
||||
"""Build a table row dict with metadata fields."""
|
||||
size_mb = None
|
||||
size_int: Optional[int] = None
|
||||
@@ -114,13 +115,20 @@ class Get_Metadata(Cmdlet):
|
||||
duration_value = str(pages_int) if pages_int is not None else ""
|
||||
|
||||
columns = [
|
||||
("Title", title or ""),
|
||||
("Hash", hash_value or ""),
|
||||
("MIME", mime or ""),
|
||||
("Size(MB)", str(size_mb) if size_mb is not None else ""),
|
||||
(duration_label, duration_value),
|
||||
("Imported", imported_label),
|
||||
("Store", store or ""),
|
||||
("Title",
|
||||
title or ""),
|
||||
("Hash",
|
||||
hash_value or ""),
|
||||
("MIME",
|
||||
mime or ""),
|
||||
("Size(MB)",
|
||||
str(size_mb) if size_mb is not None else ""),
|
||||
(duration_label,
|
||||
duration_value),
|
||||
("Imported",
|
||||
imported_label),
|
||||
("Store",
|
||||
store or ""),
|
||||
]
|
||||
|
||||
return {
|
||||
@@ -142,7 +150,8 @@ class Get_Metadata(Cmdlet):
|
||||
def _add_table_body_row(table: ResultTable, row: Dict[str, Any]) -> None:
|
||||
"""Add a single row to the ResultTable using the prepared columns."""
|
||||
columns = row.get("columns") if isinstance(row, dict) else None
|
||||
lookup: Dict[str, Any] = {}
|
||||
lookup: Dict[str,
|
||||
Any] = {}
|
||||
if isinstance(columns, list):
|
||||
for col in columns:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
@@ -220,7 +229,9 @@ class Get_Metadata(Cmdlet):
|
||||
duration_seconds = metadata.get("duration_seconds")
|
||||
if duration_seconds is None:
|
||||
duration_seconds = metadata.get("length")
|
||||
if duration_seconds is None and isinstance(metadata.get("duration_ms"), (int, float)):
|
||||
if duration_seconds is None and isinstance(metadata.get("duration_ms"),
|
||||
(int,
|
||||
float)):
|
||||
try:
|
||||
duration_seconds = float(metadata["duration_ms"]) / 1000.0
|
||||
except Exception:
|
||||
@@ -234,7 +245,8 @@ class Get_Metadata(Cmdlet):
|
||||
except ValueError:
|
||||
if ":" in s:
|
||||
parts = [p.strip() for p in s.split(":") if p.strip()]
|
||||
if len(parts) in {2, 3} and all(p.isdigit() for p in parts):
|
||||
if len(parts) in {2,
|
||||
3} and all(p.isdigit() for p in parts):
|
||||
nums = [int(p) for p in parts]
|
||||
if len(nums) == 2:
|
||||
duration_seconds = float(nums[0] * 60 + nums[1])
|
||||
@@ -261,7 +273,8 @@ class Get_Metadata(Cmdlet):
|
||||
row = self._build_table_row(
|
||||
title=title,
|
||||
store=storage_source,
|
||||
path=metadata.get("path", ""),
|
||||
path=metadata.get("path",
|
||||
""),
|
||||
mime=mime_type,
|
||||
size_bytes=file_size,
|
||||
dur_seconds=duration_seconds,
|
||||
@@ -272,7 +285,10 @@ class Get_Metadata(Cmdlet):
|
||||
)
|
||||
|
||||
table_title = f"get-metadata: {title}" if title else "get-metadata"
|
||||
table = ResultTable(table_title).init_command(table_title, "get-metadata", list(args))
|
||||
table = ResultTable(table_title
|
||||
).init_command(table_title,
|
||||
"get-metadata",
|
||||
list(args))
|
||||
self._add_table_body_row(table, row)
|
||||
ctx.set_last_result_table_overlay(table, [row], row)
|
||||
ctx.emit(row)
|
||||
|
||||
@@ -21,12 +21,14 @@ from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Get_Note(Cmdlet):
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="get-note",
|
||||
summary="List notes on a file in a store.",
|
||||
usage='get-note -store <store> [-query "hash:<sha256>"]',
|
||||
alias=["get-notes", "get_note"],
|
||||
alias=["get-notes",
|
||||
"get_note"],
|
||||
arg=[
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
@@ -44,16 +46,21 @@ class Get_Note(Cmdlet):
|
||||
self.register()
|
||||
|
||||
def _resolve_hash(
|
||||
self, raw_hash: Optional[str], raw_path: Optional[str], override_hash: Optional[str]
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str]
|
||||
) -> Optional[str]:
|
||||
resolved = normalize_hash(override_hash) if override_hash else normalize_hash(raw_hash)
|
||||
resolved = normalize_hash(override_hash
|
||||
) if override_hash else normalize_hash(raw_hash)
|
||||
if resolved:
|
||||
return resolved
|
||||
if raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef"
|
||||
for c in stem.lower()):
|
||||
return stem.lower()
|
||||
if p.exists() and p.is_file():
|
||||
return sha256_file(p)
|
||||
@@ -70,13 +77,19 @@ class Get_Note(Cmdlet):
|
||||
store_override = parsed.get("store")
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("[get_note] Error: -query must be of the form hash:<sha256>", file=sys.stderr)
|
||||
log(
|
||||
"[get_note] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
results = normalize_result_input(result)
|
||||
if not results:
|
||||
if store_override and query_hash:
|
||||
results = [{"store": str(store_override), "hash": query_hash}]
|
||||
results = [{
|
||||
"store": str(store_override),
|
||||
"hash": query_hash
|
||||
}]
|
||||
else:
|
||||
log(
|
||||
'[get_note] Error: Requires piped item(s) or -store and -query "hash:<sha256>"',
|
||||
@@ -96,7 +109,10 @@ class Get_Note(Cmdlet):
|
||||
raw_path = res.get("path")
|
||||
|
||||
if not store_name:
|
||||
log("[get_note] Error: Missing -store and item has no store field", file=sys.stderr)
|
||||
log(
|
||||
"[get_note] Error: Missing -store and item has no store field",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
resolved_hash = self._resolve_hash(
|
||||
@@ -110,12 +126,18 @@ class Get_Note(Cmdlet):
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
except Exception as exc:
|
||||
log(f"[get_note] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"[get_note] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
notes = {}
|
||||
try:
|
||||
notes = backend.get_note(resolved_hash, config=config) or {}
|
||||
notes = backend.get_note(
|
||||
resolved_hash,
|
||||
config=config
|
||||
) or {}
|
||||
except Exception:
|
||||
notes = {}
|
||||
|
||||
@@ -137,8 +159,10 @@ class Get_Note(Cmdlet):
|
||||
"note_name": str(k),
|
||||
"note_text": raw_text,
|
||||
"columns": [
|
||||
("Name", str(k)),
|
||||
("Text", preview.strip()),
|
||||
("Name",
|
||||
str(k)),
|
||||
("Text",
|
||||
preview.strip()),
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -22,7 +22,7 @@ fetch_hydrus_metadata = sh.fetch_hydrus_metadata
|
||||
should_show_help = sh.should_show_help
|
||||
get_field = sh.get_field
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
from result_table import ResultTable
|
||||
from Store import Store
|
||||
|
||||
@@ -55,11 +55,15 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
while i < len(args_list):
|
||||
a = args_list[i]
|
||||
low = str(a).lower()
|
||||
if low in {"-query", "--query", "query"} and i + 1 < len(args_list):
|
||||
if low in {"-query",
|
||||
"--query",
|
||||
"query"} and i + 1 < len(args_list):
|
||||
override_query = str(args_list[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-store", "--store", "store"} and i + 1 < len(args_list):
|
||||
if low in {"-store",
|
||||
"--store",
|
||||
"store"} and i + 1 < len(args_list):
|
||||
override_store = str(args_list[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
@@ -93,15 +97,13 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
def _add_relationship(entry: Dict[str, Any]) -> None:
|
||||
"""Add relationship if not already present by hash or path."""
|
||||
for existing in found_relationships:
|
||||
if (
|
||||
entry.get("hash")
|
||||
and str(existing.get("hash", "")).lower() == str(entry["hash"]).lower()
|
||||
):
|
||||
if (entry.get("hash")
|
||||
and str(existing.get("hash",
|
||||
"")).lower() == str(entry["hash"]).lower()):
|
||||
return
|
||||
if (
|
||||
entry.get("path")
|
||||
and str(existing.get("path", "")).lower() == str(entry["path"]).lower()
|
||||
):
|
||||
if (entry.get("path")
|
||||
and str(existing.get("path",
|
||||
"")).lower() == str(entry["path"]).lower()):
|
||||
return
|
||||
found_relationships.append(entry)
|
||||
|
||||
@@ -112,14 +114,15 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_hash_for_operation(None, result))
|
||||
if override_hash else normalize_hash(get_hash_for_operation(None,
|
||||
result))
|
||||
)
|
||||
|
||||
if not source_title or source_title == "Unknown":
|
||||
source_title = (
|
||||
get_field(result, "title")
|
||||
or get_field(result, "name")
|
||||
get_field(result,
|
||||
"title") or get_field(result,
|
||||
"name")
|
||||
or (hash_hex[:16] + "..." if hash_hex else "Unknown")
|
||||
)
|
||||
|
||||
@@ -133,11 +136,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Folder store relationships
|
||||
# IMPORTANT: only treat the Folder backend as a local DB store.
|
||||
# Other backends may expose a location() method but are not SQLite folder stores.
|
||||
if (
|
||||
type(backend).__name__ == "Folder"
|
||||
and hasattr(backend, "location")
|
||||
and callable(getattr(backend, "location"))
|
||||
):
|
||||
if (type(backend).__name__ == "Folder" and hasattr(backend,
|
||||
"location")
|
||||
and callable(getattr(backend,
|
||||
"location"))):
|
||||
storage_path = Path(str(backend.location()))
|
||||
with API_folder_store(storage_path) as db:
|
||||
local_db_checked = True
|
||||
@@ -167,7 +169,8 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
|
||||
entry_type = (
|
||||
"king" if str(rel_type).lower() == "alt" else str(rel_type)
|
||||
"king" if str(rel_type).lower() == "alt" else
|
||||
str(rel_type)
|
||||
)
|
||||
if entry_type == "king":
|
||||
king_hashes.append(related_hash)
|
||||
@@ -176,7 +179,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
rel_tags = db.get_tags(related_hash)
|
||||
for t in rel_tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
if isinstance(
|
||||
t,
|
||||
str) and t.lower().startswith("title:"):
|
||||
related_title = t[6:].strip()
|
||||
break
|
||||
except Exception:
|
||||
@@ -208,13 +213,16 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
child_tags = db.get_tags(child_hash)
|
||||
for t in child_tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
if isinstance(t,
|
||||
str) and t.lower().startswith("title:"):
|
||||
child_title = t[6:].strip()
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
entry_type = "alt" if rel_type == "alt" else (rel_type or "related")
|
||||
entry_type = "alt" if rel_type == "alt" else (
|
||||
rel_type or "related"
|
||||
)
|
||||
_add_relationship(
|
||||
{
|
||||
"hash": child_hash,
|
||||
@@ -234,20 +242,25 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
for sib in siblings or []:
|
||||
sib_hash = normalize_hash(str(sib.get("hash") or ""))
|
||||
sib_type = str(sib.get("type") or "").strip().lower()
|
||||
if not sib_hash or sib_hash in {hash_hex, king_hash}:
|
||||
if not sib_hash or sib_hash in {hash_hex,
|
||||
king_hash}:
|
||||
continue
|
||||
|
||||
sib_title = sib_hash[:16] + "..."
|
||||
try:
|
||||
sib_tags = db.get_tags(sib_hash)
|
||||
for t in sib_tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
if isinstance(
|
||||
t,
|
||||
str) and t.lower().startswith("title:"):
|
||||
sib_title = t[6:].strip()
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
entry_type = "alt" if sib_type == "alt" else (sib_type or "related")
|
||||
entry_type = "alt" if sib_type == "alt" else (
|
||||
sib_type or "related"
|
||||
)
|
||||
_add_relationship(
|
||||
{
|
||||
"hash": sib_hash,
|
||||
@@ -268,8 +281,8 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_hash_for_operation(None, result))
|
||||
if override_hash else normalize_hash(get_hash_for_operation(None,
|
||||
result))
|
||||
)
|
||||
|
||||
if hash_hex and not local_db_checked:
|
||||
@@ -284,12 +297,16 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
store = Store(config)
|
||||
backend_obj = store[str(store_name)]
|
||||
candidate = getattr(backend_obj, "_client", None)
|
||||
if candidate is not None and hasattr(candidate, "get_file_relationships"):
|
||||
if candidate is not None and hasattr(candidate,
|
||||
"get_file_relationships"):
|
||||
client = candidate
|
||||
except Exception:
|
||||
client = None
|
||||
if client is None:
|
||||
log(f"Hydrus client unavailable for store '{store_name}'", file=sys.stderr)
|
||||
log(
|
||||
f"Hydrus client unavailable for store '{store_name}'",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
@@ -312,12 +329,13 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tag_result = backend_obj.get_tag(h)
|
||||
tags = (
|
||||
tag_result[0]
|
||||
if isinstance(tag_result, tuple) and tag_result
|
||||
else tag_result
|
||||
if isinstance(tag_result,
|
||||
tuple) and tag_result else tag_result
|
||||
)
|
||||
if isinstance(tags, list):
|
||||
for t in tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
if isinstance(t,
|
||||
str) and t.lower().startswith("title:"):
|
||||
val = t.split(":", 1)[1].strip()
|
||||
if val:
|
||||
return val
|
||||
@@ -349,7 +367,10 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
for group in storage.values():
|
||||
if isinstance(group, list):
|
||||
tag_candidates.extend(
|
||||
[str(x) for x in group if isinstance(x, str)]
|
||||
[
|
||||
str(x) for x in group
|
||||
if isinstance(x, str)
|
||||
]
|
||||
)
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list):
|
||||
@@ -358,7 +379,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
)
|
||||
flat = meta.get("tags_flat")
|
||||
if isinstance(flat, list):
|
||||
tag_candidates.extend([str(x) for x in flat if isinstance(x, str)])
|
||||
tag_candidates.extend(
|
||||
[str(x) for x in flat if isinstance(x, str)]
|
||||
)
|
||||
|
||||
for t in tag_candidates:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
@@ -373,7 +396,8 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if client:
|
||||
rel = client.get_file_relationships(hash_hex)
|
||||
if rel:
|
||||
file_rels = rel.get("file_relationships", {})
|
||||
file_rels = rel.get("file_relationships",
|
||||
{})
|
||||
this_file_rels = file_rels.get(hash_hex)
|
||||
|
||||
if this_file_rels:
|
||||
@@ -392,26 +416,28 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
key = str(rel_type_id)
|
||||
|
||||
# Handle metadata keys explicitly.
|
||||
if key in {"is_king", "king_is_on_file_domain", "king_is_local"}:
|
||||
if key in {"is_king",
|
||||
"king_is_on_file_domain",
|
||||
"king_is_local"}:
|
||||
continue
|
||||
|
||||
# Some Hydrus responses provide a direct king hash under the 'king' key.
|
||||
if key == "king":
|
||||
king_hash = (
|
||||
normalize_hash(rel_value)
|
||||
if isinstance(rel_value, str)
|
||||
else None
|
||||
if isinstance(rel_value,
|
||||
str) else None
|
||||
)
|
||||
if king_hash and king_hash != hash_hex:
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == king_hash
|
||||
for r in found_relationships
|
||||
):
|
||||
if not any(str(r.get("hash",
|
||||
"")).lower() == king_hash
|
||||
for r in found_relationships):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": king_hash,
|
||||
"type": "king",
|
||||
"title": _resolve_related_title(king_hash),
|
||||
"title":
|
||||
_resolve_related_title(king_hash),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
}
|
||||
@@ -425,39 +451,47 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
for rel_hash in rel_value:
|
||||
rel_hash_norm = (
|
||||
normalize_hash(rel_hash)
|
||||
if isinstance(rel_hash, str)
|
||||
else None
|
||||
if isinstance(rel_hash,
|
||||
str) else None
|
||||
)
|
||||
if not rel_hash_norm or rel_hash_norm == hash_hex:
|
||||
continue
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == rel_hash_norm
|
||||
for r in found_relationships
|
||||
):
|
||||
if not any(str(r.get("hash",
|
||||
"")).lower() == rel_hash_norm
|
||||
for r in found_relationships):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
"hash":
|
||||
rel_hash_norm,
|
||||
"type":
|
||||
rel_name,
|
||||
"title":
|
||||
_resolve_related_title(rel_hash_norm),
|
||||
"path":
|
||||
None,
|
||||
"store":
|
||||
store_label,
|
||||
}
|
||||
)
|
||||
# Defensive: sometimes the API may return a single hash string.
|
||||
elif isinstance(rel_value, str):
|
||||
rel_hash_norm = normalize_hash(rel_value)
|
||||
if rel_hash_norm and rel_hash_norm != hash_hex:
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == rel_hash_norm
|
||||
for r in found_relationships
|
||||
):
|
||||
if not any(str(r.get("hash",
|
||||
"")).lower() == rel_hash_norm
|
||||
for r in found_relationships):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
"hash":
|
||||
rel_hash_norm,
|
||||
"type":
|
||||
rel_name,
|
||||
"title":
|
||||
_resolve_related_title(rel_hash_norm),
|
||||
"path":
|
||||
None,
|
||||
"store":
|
||||
store_label,
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
@@ -471,13 +505,18 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
from rich_display import stdout_console
|
||||
|
||||
title = source_title or (hash_hex[:16] + "..." if hash_hex else "Item")
|
||||
stdout_console().print(Panel(f"{title} has no relationships", title="Relationships"))
|
||||
stdout_console().print(
|
||||
Panel(f"{title} has no relationships",
|
||||
title="Relationships")
|
||||
)
|
||||
except Exception:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
# Display results
|
||||
table = ResultTable(f"Relationships: {source_title}").init_command("get-relationship", [])
|
||||
table = ResultTable(f"Relationships: {source_title}"
|
||||
).init_command("get-relationship",
|
||||
[])
|
||||
|
||||
# Sort by type then title
|
||||
# Custom sort order: King first, then Derivative, then others
|
||||
@@ -487,7 +526,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 0
|
||||
elif t == "derivative":
|
||||
return 1
|
||||
elif t in {"alternative", "alternate", "alt"}:
|
||||
elif t in {"alternative",
|
||||
"alternate",
|
||||
"alt"}:
|
||||
return 2
|
||||
elif t == "duplicate":
|
||||
return 3
|
||||
@@ -520,7 +561,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Set selection args
|
||||
table.set_row_selection_args(
|
||||
i, ["-store", str(item["store"]), "-query", f"hash:{item['hash']}"]
|
||||
i,
|
||||
["-store",
|
||||
str(item["store"]),
|
||||
"-query",
|
||||
f"hash:{item['hash']}"]
|
||||
)
|
||||
|
||||
ctx.set_last_result_table(table, pipeline_results)
|
||||
|
||||
@@ -39,8 +39,7 @@ CmdletArg = sh.CmdletArg
|
||||
SharedArgs = sh.SharedArgs
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
get_field = sh.get_field
|
||||
from config import get_local_storage_path
|
||||
|
||||
from SYS.config import get_local_storage_path
|
||||
|
||||
try:
|
||||
from metadata import extract_title
|
||||
@@ -148,7 +147,8 @@ def _resolve_candidate_urls_for_item(
|
||||
result: Any,
|
||||
backend: Any,
|
||||
file_hash: str,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> List[str]:
|
||||
"""Get candidate URLs from backend and/or piped result."""
|
||||
try:
|
||||
@@ -165,7 +165,10 @@ def _resolve_candidate_urls_for_item(
|
||||
urls.extend(normalize_urls(backend_urls))
|
||||
else:
|
||||
urls.extend(
|
||||
[str(u).strip() for u in backend_urls if isinstance(u, str) and str(u).strip()]
|
||||
[
|
||||
str(u).strip() for u in backend_urls
|
||||
if isinstance(u, str) and str(u).strip()
|
||||
]
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -180,7 +183,10 @@ def _resolve_candidate_urls_for_item(
|
||||
raw = meta.get("url")
|
||||
if isinstance(raw, list):
|
||||
urls.extend(
|
||||
[str(u).strip() for u in raw if isinstance(u, str) and str(u).strip()]
|
||||
[
|
||||
str(u).strip() for u in raw
|
||||
if isinstance(u, str) and str(u).strip()
|
||||
]
|
||||
)
|
||||
elif isinstance(raw, str) and raw.strip():
|
||||
urls.append(raw.strip())
|
||||
@@ -203,7 +209,9 @@ def _resolve_candidate_urls_for_item(
|
||||
if isinstance(val, str) and val.strip():
|
||||
urls.append(val.strip())
|
||||
elif isinstance(val, list):
|
||||
urls.extend([str(u).strip() for u in val if isinstance(u, str) and str(u).strip()])
|
||||
urls.extend(
|
||||
[str(u).strip() for u in val if isinstance(u, str) and str(u).strip()]
|
||||
)
|
||||
|
||||
meta_field = _get(result, "metadata", None)
|
||||
if isinstance(meta_field, dict) and meta_field.get("url"):
|
||||
@@ -211,7 +219,9 @@ def _resolve_candidate_urls_for_item(
|
||||
if normalize_urls:
|
||||
urls.extend(normalize_urls(val))
|
||||
elif isinstance(val, list):
|
||||
urls.extend([str(u).strip() for u in val if isinstance(u, str) and str(u).strip()])
|
||||
urls.extend(
|
||||
[str(u).strip() for u in val if isinstance(u, str) and str(u).strip()]
|
||||
)
|
||||
elif isinstance(val, str) and val.strip():
|
||||
urls.append(val.strip())
|
||||
|
||||
@@ -263,7 +273,6 @@ def _pick_supported_ytdlp_url(urls: List[str]) -> Optional[str]:
|
||||
_scrape_isbn_metadata = _ol_scrape_isbn_metadata # type: ignore[assignment]
|
||||
_scrape_openlibrary_metadata = _ol_scrape_openlibrary_metadata # type: ignore[assignment]
|
||||
|
||||
|
||||
# Tag item for ResultTable display and piping
|
||||
from dataclasses import dataclass
|
||||
|
||||
@@ -308,7 +317,8 @@ def _emit_tags_as_table(
|
||||
file_hash: Optional[str],
|
||||
store: str = "hydrus",
|
||||
service_name: Optional[str] = None,
|
||||
config: Optional[Dict[str, Any]] = None,
|
||||
config: Optional[Dict[str,
|
||||
Any]] = None,
|
||||
item_title: Optional[str] = None,
|
||||
path: Optional[str] = None,
|
||||
subject: Optional[Any] = None,
|
||||
@@ -357,7 +367,9 @@ def _emit_tags_as_table(
|
||||
|
||||
def _filter_scraped_tags(tags: List[str]) -> List[str]:
|
||||
"""Filter out tags we don't want to import from scraping."""
|
||||
blocked = {"title", "artist", "source"}
|
||||
blocked = {"title",
|
||||
"artist",
|
||||
"source"}
|
||||
out: List[str] = []
|
||||
seen: set[str] = set()
|
||||
for t in tags:
|
||||
@@ -529,12 +541,14 @@ def _handle_title_rename(old_path: Path, tags_list: List[str]) -> Optional[Path]
|
||||
new_tags_path = old_path.parent / (new_name + ".tag")
|
||||
if new_tags_path.exists():
|
||||
log(
|
||||
f"Warning: Target sidecar already exists: {new_tags_path.name}", file=sys.stderr
|
||||
f"Warning: Target sidecar already exists: {new_tags_path.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
else:
|
||||
old_tags_path.rename(new_tags_path)
|
||||
log(
|
||||
f"Renamed sidecar: {old_tags_path.name} → {new_tags_path.name}", file=sys.stderr
|
||||
f"Renamed sidecar: {old_tags_path.name} → {new_tags_path.name}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
return new_path
|
||||
@@ -564,7 +578,10 @@ def _read_sidecar_fallback(p: Path) -> tuple[Optional[str], List[str], List[str]
|
||||
h: Optional[str] = None
|
||||
|
||||
# Namespaces to exclude from tags
|
||||
excluded_namespaces = {"hash", "url", "url", "relationship"}
|
||||
excluded_namespaces = {"hash",
|
||||
"url",
|
||||
"url",
|
||||
"relationship"}
|
||||
|
||||
for line in raw.splitlines():
|
||||
s = line.strip()
|
||||
@@ -594,7 +611,11 @@ def _read_sidecar_fallback(p: Path) -> tuple[Optional[str], List[str], List[str]
|
||||
|
||||
|
||||
def _write_sidecar(
|
||||
p: Path, media: Path, tag_list: List[str], url: List[str], hash_in_sidecar: Optional[str]
|
||||
p: Path,
|
||||
media: Path,
|
||||
tag_list: List[str],
|
||||
url: List[str],
|
||||
hash_in_sidecar: Optional[str]
|
||||
) -> Path:
|
||||
"""Write tags to sidecar file and handle title-based renaming.
|
||||
|
||||
@@ -634,15 +655,17 @@ def _emit_tag_payload(
|
||||
tags_list: List[str],
|
||||
*,
|
||||
hash_value: Optional[str],
|
||||
extra: Optional[Dict[str, Any]] = None,
|
||||
extra: Optional[Dict[str,
|
||||
Any]] = None,
|
||||
store_label: Optional[str] = None,
|
||||
) -> int:
|
||||
"""Emit tag values as structured payload to pipeline."""
|
||||
payload: Dict[str, Any] = {
|
||||
"source": source,
|
||||
"tag": list(tags_list),
|
||||
"count": len(tags_list),
|
||||
}
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"source": source,
|
||||
"tag": list(tags_list),
|
||||
"count": len(tags_list),
|
||||
}
|
||||
if hash_value:
|
||||
payload["hash"] = hash_value
|
||||
if extra:
|
||||
@@ -662,7 +685,11 @@ def _emit_tag_payload(
|
||||
if ctx.get_stage_context() is not None:
|
||||
for idx, tag_name in enumerate(tags_list, start=1):
|
||||
tag_item = TagItem(
|
||||
tag_name=tag_name, tag_index=idx, hash=hash_value, store=source, service_name=None
|
||||
tag_name=tag_name,
|
||||
tag_index=idx,
|
||||
hash=hash_value,
|
||||
store=source,
|
||||
service_name=None
|
||||
)
|
||||
ctx.emit(tag_item)
|
||||
else:
|
||||
@@ -730,7 +757,12 @@ def _extract_tag_value(tags_list: List[str], namespace: str) -> Optional[str]:
|
||||
|
||||
def _scrape_url_metadata(
|
||||
url: str,
|
||||
) -> Tuple[Optional[str], List[str], List[Tuple[str, str]], List[Dict[str, Any]]]:
|
||||
) -> Tuple[Optional[str],
|
||||
List[str],
|
||||
List[Tuple[str,
|
||||
str]],
|
||||
List[Dict[str,
|
||||
Any]]]:
|
||||
"""Scrape metadata from a URL using yt-dlp.
|
||||
|
||||
Returns:
|
||||
@@ -810,10 +842,12 @@ def _scrape_url_metadata(
|
||||
playlist_items.append(
|
||||
{
|
||||
"index": idx,
|
||||
"id": entry.get("id", f"track_{idx}"),
|
||||
"id": entry.get("id",
|
||||
f"track_{idx}"),
|
||||
"title": item_title,
|
||||
"duration": item_duration,
|
||||
"url": entry.get("url") or entry.get("webpage_url", ""),
|
||||
"url": entry.get("url") or entry.get("webpage_url",
|
||||
""),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -837,14 +871,16 @@ def _scrape_url_metadata(
|
||||
|
||||
for tag in entry_tags:
|
||||
# Extract the namespace (part before the colon)
|
||||
tag_namespace = tag.split(":", 1)[0].lower() if ":" in tag else None
|
||||
tag_namespace = tag.split(":",
|
||||
1)[0].lower(
|
||||
) if ":" in tag else None
|
||||
|
||||
# Skip if this namespace already exists in tags (from album level)
|
||||
if tag_namespace and tag_namespace in single_value_namespaces:
|
||||
# Check if any tag with this namespace already exists in tags
|
||||
already_has_namespace = any(
|
||||
t.split(":", 1)[0].lower() == tag_namespace
|
||||
for t in tags
|
||||
t.split(":",
|
||||
1)[0].lower() == tag_namespace for t in tags
|
||||
if ":" in t
|
||||
)
|
||||
if already_has_namespace:
|
||||
@@ -858,8 +894,21 @@ def _scrape_url_metadata(
|
||||
elif (data.get("playlist_count") or 0) > 0 and "entries" not in data:
|
||||
try:
|
||||
# Make a second call with --flat-playlist to get the actual tracks
|
||||
flat_cmd = ["yt-dlp", "-j", "--no-warnings", "--flat-playlist", "-f", "best", url]
|
||||
flat_result = subprocess.run(flat_cmd, capture_output=True, text=True, timeout=30)
|
||||
flat_cmd = [
|
||||
"yt-dlp",
|
||||
"-j",
|
||||
"--no-warnings",
|
||||
"--flat-playlist",
|
||||
"-f",
|
||||
"best",
|
||||
url
|
||||
]
|
||||
flat_result = subprocess.run(
|
||||
flat_cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if flat_result.returncode == 0:
|
||||
flat_lines = flat_result.stdout.strip().split("\n")
|
||||
# With --flat-playlist, each line is a separate track JSON object
|
||||
@@ -868,15 +917,27 @@ def _scrape_url_metadata(
|
||||
if line.strip().startswith("{"):
|
||||
try:
|
||||
entry = json_module.loads(line)
|
||||
item_title = entry.get("title", entry.get("id", f"Track {idx}"))
|
||||
item_title = entry.get(
|
||||
"title",
|
||||
entry.get("id",
|
||||
f"Track {idx}")
|
||||
)
|
||||
item_duration = entry.get("duration", 0)
|
||||
playlist_items.append(
|
||||
{
|
||||
"index": idx,
|
||||
"id": entry.get("id", f"track_{idx}"),
|
||||
"title": item_title,
|
||||
"duration": item_duration,
|
||||
"url": entry.get("url") or entry.get("webpage_url", ""),
|
||||
"index":
|
||||
idx,
|
||||
"id":
|
||||
entry.get("id",
|
||||
f"track_{idx}"),
|
||||
"title":
|
||||
item_title,
|
||||
"duration":
|
||||
item_duration,
|
||||
"url":
|
||||
entry.get("url")
|
||||
or entry.get("webpage_url",
|
||||
""),
|
||||
}
|
||||
)
|
||||
except json_module.JSONDecodeError:
|
||||
@@ -935,7 +996,9 @@ def _extract_url_formats(formats: list) -> List[Tuple[str, str]]:
|
||||
if height < 480:
|
||||
continue
|
||||
res_key = f"{height}p"
|
||||
if res_key not in video_formats or tbr > video_formats[res_key].get("tbr", 0):
|
||||
if res_key not in video_formats or tbr > video_formats[res_key].get(
|
||||
"tbr",
|
||||
0):
|
||||
video_formats[res_key] = {
|
||||
"label": f"{height}p ({ext})",
|
||||
"format_id": format_id,
|
||||
@@ -945,7 +1008,9 @@ def _extract_url_formats(formats: list) -> List[Tuple[str, str]]:
|
||||
# Audio-only format
|
||||
elif acodec and acodec != "none" and (not vcodec or vcodec == "none"):
|
||||
audio_key = f"audio_{abr}"
|
||||
if audio_key not in audio_formats or abr > audio_formats[audio_key].get("abr", 0):
|
||||
if audio_key not in audio_formats or abr > audio_formats[audio_key].get(
|
||||
"abr",
|
||||
0):
|
||||
audio_formats[audio_key] = {
|
||||
"label": f"audio ({ext})",
|
||||
"format_id": format_id,
|
||||
@@ -955,9 +1020,9 @@ def _extract_url_formats(formats: list) -> List[Tuple[str, str]]:
|
||||
result = []
|
||||
|
||||
# Add video formats in descending resolution order
|
||||
for res in sorted(
|
||||
video_formats.keys(), key=lambda x: int(x.replace("p", "")), reverse=True
|
||||
):
|
||||
for res in sorted(video_formats.keys(),
|
||||
key=lambda x: int(x.replace("p", "")),
|
||||
reverse=True):
|
||||
fmt = video_formats[res]
|
||||
result.append((fmt["label"], fmt["format_id"]))
|
||||
|
||||
@@ -1019,12 +1084,15 @@ def _perform_scraping(tags_list: List[str]) -> List[str]:
|
||||
log(f"Scraping OpenLibrary: {olid}")
|
||||
new_tags.extend(_scrape_openlibrary_metadata(olid))
|
||||
elif "isbn_13" in identifiers or "isbn_10" in identifiers or "isbn" in identifiers:
|
||||
isbn = identifiers.get("isbn_13") or identifiers.get("isbn_10") or identifiers.get("isbn")
|
||||
isbn = identifiers.get("isbn_13") or identifiers.get(
|
||||
"isbn_10"
|
||||
) or identifiers.get("isbn")
|
||||
if isbn:
|
||||
log(f"Scraping ISBN: {isbn}")
|
||||
new_tags.extend(_scrape_isbn_metadata(isbn))
|
||||
|
||||
existing_tags_lower = {tag.lower() for tag in tags_list}
|
||||
existing_tags_lower = {tag.lower()
|
||||
for tag in tags_list}
|
||||
scraped_unique = []
|
||||
seen = set()
|
||||
for tag in new_tags:
|
||||
@@ -1074,7 +1142,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
f"[get_tag] Numeric selection arg {token} out of range (items={len(items_pool)})"
|
||||
)
|
||||
except Exception as exc:
|
||||
debug(f"[get_tag] Failed to resolve numeric selection arg {token}: {exc}")
|
||||
debug(
|
||||
f"[get_tag] Failed to resolve numeric selection arg {token}: {exc}"
|
||||
)
|
||||
|
||||
# Helper to get field from both dict and object
|
||||
def get_field(obj: Any, field: str, default: Any = None) -> Any:
|
||||
@@ -1087,7 +1157,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
parsed_args = parse_cmdlet_args(args_list, CMDLET)
|
||||
|
||||
# Detect if -scrape flag was provided without a value (parse_cmdlet_args skips missing values)
|
||||
scrape_flag_present = any(str(arg).lower() in {"-scrape", "--scrape"} for arg in args_list)
|
||||
scrape_flag_present = any(
|
||||
str(arg).lower() in {"-scrape",
|
||||
"--scrape"} for arg in args_list
|
||||
)
|
||||
|
||||
# Extract values
|
||||
query_raw = parsed_args.get("query")
|
||||
@@ -1122,39 +1195,54 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# NOTE: We intentionally do not reuse _scrape_url_metadata() here because it
|
||||
# performs namespace deduplication that would collapse multi-valued tags.
|
||||
file_hash = normalize_hash(hash_override) or normalize_hash(
|
||||
get_field(result, "hash", None)
|
||||
get_field(result,
|
||||
"hash",
|
||||
None)
|
||||
)
|
||||
store_name = get_field(result, "store", None)
|
||||
subject_path = (
|
||||
get_field(result, "path", None)
|
||||
or get_field(result, "target", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"path",
|
||||
None) or get_field(result,
|
||||
"target",
|
||||
None)
|
||||
or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
item_title = (
|
||||
get_field(result, "title", None)
|
||||
or get_field(result, "name", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"title",
|
||||
None) or get_field(result,
|
||||
"name",
|
||||
None)
|
||||
or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
|
||||
# Only run overwrite-apply when the item is store-backed.
|
||||
# If this is a URL-only PipeObject, fall through to provider mode below.
|
||||
if (
|
||||
file_hash
|
||||
and store_name
|
||||
and str(file_hash).strip().lower() != "unknown"
|
||||
and str(store_name).strip().upper() not in {"PATH", "URL"}
|
||||
):
|
||||
if (file_hash and store_name and str(file_hash).strip().lower() != "unknown"
|
||||
and str(store_name).strip().upper() not in {"PATH",
|
||||
"URL"}):
|
||||
try:
|
||||
from Store import Store
|
||||
|
||||
storage = Store(config)
|
||||
backend = storage[str(store_name)]
|
||||
except Exception as exc:
|
||||
log(f"Failed to resolve store backend '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to resolve store backend '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
candidate_urls = _resolve_candidate_urls_for_item(
|
||||
result, backend, file_hash, config
|
||||
result,
|
||||
backend,
|
||||
file_hash,
|
||||
config
|
||||
)
|
||||
scrape_target = _pick_supported_ytdlp_url(candidate_urls)
|
||||
if not scrape_target:
|
||||
@@ -1201,7 +1289,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
tags.extend(
|
||||
_extract_subtitle_tags(
|
||||
info_for_subs if isinstance(info_for_subs, dict) else {}
|
||||
info_for_subs if isinstance(info_for_subs,
|
||||
dict) else {}
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
@@ -1220,7 +1309,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
existing_tags = []
|
||||
try:
|
||||
if existing_tags:
|
||||
backend.delete_tag(file_hash, list(existing_tags), config=config)
|
||||
backend.delete_tag(
|
||||
file_hash,
|
||||
list(existing_tags),
|
||||
config=config
|
||||
)
|
||||
except Exception as exc:
|
||||
debug(f"[get_tag] ytdlp overwrite: delete_tag failed: {exc}")
|
||||
try:
|
||||
@@ -1250,7 +1343,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"store": str(store_name),
|
||||
"path": str(subject_path) if subject_path else None,
|
||||
"title": item_title,
|
||||
"extra": {"applied_provider": "ytdlp", "scrape_url": scrape_target},
|
||||
"extra": {
|
||||
"applied_provider": "ytdlp",
|
||||
"scrape_url": scrape_target
|
||||
},
|
||||
},
|
||||
)
|
||||
return 0
|
||||
@@ -1264,7 +1360,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
output = {
|
||||
"title": title,
|
||||
"tag": tags,
|
||||
"formats": [(label, fmt_id) for label, fmt_id in formats],
|
||||
"formats": [(label,
|
||||
fmt_id) for label, fmt_id in formats],
|
||||
"playlist_items": playlist_items,
|
||||
}
|
||||
print(json_module.dumps(output, ensure_ascii=False))
|
||||
@@ -1281,7 +1378,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# the piped PipeObject). Always prefer the current store-backed tags when possible.
|
||||
identifier_tags: List[str] = []
|
||||
file_hash_for_scrape = normalize_hash(hash_override) or normalize_hash(
|
||||
get_field(result, "hash", None)
|
||||
get_field(result,
|
||||
"hash",
|
||||
None)
|
||||
)
|
||||
store_for_scrape = get_field(result, "store", None)
|
||||
if file_hash_for_scrape and store_for_scrape:
|
||||
@@ -1292,7 +1391,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
backend = storage[str(store_for_scrape)]
|
||||
current_tags, _src = backend.get_tag(file_hash_for_scrape, config=config)
|
||||
if isinstance(current_tags, (list, tuple, set)) and current_tags:
|
||||
identifier_tags = [str(t) for t in current_tags if isinstance(t, (str, bytes))]
|
||||
identifier_tags = [
|
||||
str(t) for t in current_tags if isinstance(t, (str, bytes))
|
||||
]
|
||||
except Exception:
|
||||
# Fall back to whatever is present on the piped result if store lookup fails.
|
||||
pass
|
||||
@@ -1301,27 +1402,34 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if not identifier_tags:
|
||||
result_tags = get_field(result, "tag", None)
|
||||
if isinstance(result_tags, list):
|
||||
identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))]
|
||||
identifier_tags = [
|
||||
str(t) for t in result_tags if isinstance(t, (str, bytes))
|
||||
]
|
||||
|
||||
# As a last resort, try local sidecar only when the item is not store-backed.
|
||||
if not identifier_tags and (not file_hash_for_scrape or not store_for_scrape):
|
||||
file_path = (
|
||||
get_field(result, "target", None)
|
||||
or get_field(result, "path", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"target",
|
||||
None) or get_field(result,
|
||||
"path",
|
||||
None)
|
||||
or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
if (
|
||||
isinstance(file_path, str)
|
||||
and file_path
|
||||
and not file_path.lower().startswith(("http://", "https://"))
|
||||
):
|
||||
if (isinstance(file_path,
|
||||
str) and file_path and not file_path.lower().startswith(
|
||||
("http://",
|
||||
"https://"))):
|
||||
try:
|
||||
media_path = Path(str(file_path))
|
||||
if media_path.exists():
|
||||
tags_from_sidecar = read_sidecar(media_path)
|
||||
if isinstance(tags_from_sidecar, list):
|
||||
identifier_tags = [
|
||||
str(t) for t in tags_from_sidecar if isinstance(t, (str, bytes))
|
||||
str(t) for t in tags_from_sidecar
|
||||
if isinstance(t, (str, bytes))
|
||||
]
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1332,12 +1440,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
identifiers = _extract_scrapable_identifiers(identifier_tags)
|
||||
identifier_query: Optional[str] = None
|
||||
if identifiers:
|
||||
if provider.name in {"openlibrary", "googlebooks", "google"}:
|
||||
if provider.name in {"openlibrary",
|
||||
"googlebooks",
|
||||
"google"}:
|
||||
identifier_query = (
|
||||
identifiers.get("isbn_13")
|
||||
or identifiers.get("isbn_10")
|
||||
or identifiers.get("isbn")
|
||||
or identifiers.get("openlibrary")
|
||||
identifiers.get("isbn_13") or identifiers.get("isbn_10")
|
||||
or identifiers.get("isbn") or identifiers.get("openlibrary")
|
||||
)
|
||||
elif provider.name == "itunes":
|
||||
identifier_query = identifiers.get("musicbrainz") or identifiers.get(
|
||||
@@ -1346,16 +1454,26 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Determine query from identifier first, else title on the result or filename
|
||||
title_hint = (
|
||||
title_from_tags or get_field(result, "title", None) or get_field(result, "name", None)
|
||||
title_from_tags or get_field(result,
|
||||
"title",
|
||||
None) or get_field(result,
|
||||
"name",
|
||||
None)
|
||||
)
|
||||
if not title_hint:
|
||||
file_path = get_field(result, "path", None) or get_field(result, "filename", None)
|
||||
file_path = get_field(result,
|
||||
"path",
|
||||
None) or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
if file_path:
|
||||
title_hint = Path(str(file_path)).stem
|
||||
artist_hint = (
|
||||
artist_from_tags
|
||||
or get_field(result, "artist", None)
|
||||
or get_field(result, "uploader", None)
|
||||
artist_from_tags or get_field(result,
|
||||
"artist",
|
||||
None) or get_field(result,
|
||||
"uploader",
|
||||
None)
|
||||
)
|
||||
if not artist_hint:
|
||||
meta_field = get_field(result, "metadata", None)
|
||||
@@ -1365,12 +1483,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
artist_hint = str(meta_artist)
|
||||
|
||||
combined_query: Optional[str] = None
|
||||
if (
|
||||
not identifier_query
|
||||
and title_hint
|
||||
and artist_hint
|
||||
and provider.name in {"itunes", "musicbrainz"}
|
||||
):
|
||||
if (not identifier_query and title_hint and artist_hint
|
||||
and provider.name in {"itunes",
|
||||
"musicbrainz"}):
|
||||
if provider.name == "musicbrainz":
|
||||
combined_query = f'recording:"{title_hint}" AND artist:"{artist_hint}"'
|
||||
else:
|
||||
@@ -1380,18 +1495,27 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
url_hint: Optional[str] = None
|
||||
if provider.name == "ytdlp":
|
||||
raw_url = (
|
||||
get_field(result, "url", None)
|
||||
or get_field(result, "source_url", None)
|
||||
or get_field(result, "target", None)
|
||||
get_field(result,
|
||||
"url",
|
||||
None) or get_field(result,
|
||||
"source_url",
|
||||
None) or get_field(result,
|
||||
"target",
|
||||
None)
|
||||
)
|
||||
if isinstance(raw_url, list) and raw_url:
|
||||
raw_url = raw_url[0]
|
||||
if isinstance(raw_url, str) and raw_url.strip().startswith(("http://", "https://")):
|
||||
if isinstance(raw_url,
|
||||
str) and raw_url.strip().startswith(("http://",
|
||||
"https://")):
|
||||
url_hint = raw_url.strip()
|
||||
|
||||
query_hint = url_hint or identifier_query or combined_query or title_hint
|
||||
if not query_hint:
|
||||
log("No title or identifier available to search for metadata", file=sys.stderr)
|
||||
log(
|
||||
"No title or identifier available to search for metadata",
|
||||
file=sys.stderr
|
||||
)
|
||||
return 1
|
||||
|
||||
if identifier_query:
|
||||
@@ -1423,7 +1547,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
config=config,
|
||||
item_title=str(items[0].get("title") or "ytdlp"),
|
||||
path=None,
|
||||
subject={"provider": "ytdlp", "url": str(query_hint)},
|
||||
subject={
|
||||
"provider": "ytdlp",
|
||||
"url": str(query_hint)
|
||||
},
|
||||
)
|
||||
return 0
|
||||
|
||||
@@ -1433,15 +1560,21 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
table.set_source_command("get-tag", [])
|
||||
selection_payload = []
|
||||
hash_for_payload = normalize_hash(hash_override) or normalize_hash(
|
||||
get_field(result, "hash", None)
|
||||
get_field(result,
|
||||
"hash",
|
||||
None)
|
||||
)
|
||||
store_for_payload = get_field(result, "store", None)
|
||||
# Preserve a consistent path field when present so selecting a metadata row
|
||||
# keeps referring to the original file.
|
||||
path_for_payload = (
|
||||
get_field(result, "path", None)
|
||||
or get_field(result, "target", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"path",
|
||||
None) or get_field(result,
|
||||
"target",
|
||||
None) or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
for idx, item in enumerate(items):
|
||||
tags = _filter_scraped_tags(provider.to_tags(item))
|
||||
@@ -1488,22 +1621,35 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
result_provider = get_field(result, "provider", None)
|
||||
result_tags = get_field(result, "tag", None)
|
||||
if result_provider and isinstance(result_tags, list) and result_tags:
|
||||
file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None))
|
||||
file_hash = normalize_hash(hash_override) or normalize_hash(
|
||||
get_field(result,
|
||||
"hash",
|
||||
None)
|
||||
)
|
||||
store_name = get_field(result, "store", None)
|
||||
subject_path = (
|
||||
get_field(result, "path", None)
|
||||
or get_field(result, "target", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"path",
|
||||
None) or get_field(result,
|
||||
"target",
|
||||
None) or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
if not file_hash or not store_name:
|
||||
log("Selected metadata row is missing hash/store; cannot apply tags", file=sys.stderr)
|
||||
log(
|
||||
"Selected metadata row is missing hash/store; cannot apply tags",
|
||||
file=sys.stderr
|
||||
)
|
||||
_emit_tags_as_table(
|
||||
tags_list=[str(t) for t in result_tags if t is not None],
|
||||
file_hash=file_hash,
|
||||
store=str(store_name or "local"),
|
||||
service_name=None,
|
||||
config=config,
|
||||
item_title=str(get_field(result, "title", None) or result_provider),
|
||||
item_title=str(get_field(result,
|
||||
"title",
|
||||
None) or result_provider),
|
||||
path=str(subject_path) if subject_path else None,
|
||||
subject=result,
|
||||
)
|
||||
@@ -1513,7 +1659,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if str(result_provider).strip().lower() == "ytdlp":
|
||||
apply_tags = [str(t) for t in result_tags if t is not None]
|
||||
else:
|
||||
apply_tags = _filter_scraped_tags([str(t) for t in result_tags if t is not None])
|
||||
apply_tags = _filter_scraped_tags(
|
||||
[str(t) for t in result_tags if t is not None]
|
||||
)
|
||||
if not apply_tags:
|
||||
log(
|
||||
"No applicable scraped tags to apply (title:/artist:/source: are skipped)",
|
||||
@@ -1547,17 +1695,25 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
service_name=None,
|
||||
config=config,
|
||||
item_title=str(
|
||||
get_field(result, "title", None)
|
||||
or get_field(result, "name", None)
|
||||
or str(result_provider)
|
||||
get_field(result,
|
||||
"title",
|
||||
None) or get_field(result,
|
||||
"name",
|
||||
None) or str(result_provider)
|
||||
),
|
||||
path=str(subject_path) if subject_path else None,
|
||||
subject={
|
||||
"hash": file_hash,
|
||||
"store": str(store_name),
|
||||
"path": str(subject_path) if subject_path else None,
|
||||
"title": get_field(result, "title", None) or get_field(result, "name", None),
|
||||
"extra": {"applied_provider": str(result_provider)},
|
||||
"title": get_field(result,
|
||||
"title",
|
||||
None) or get_field(result,
|
||||
"name",
|
||||
None),
|
||||
"extra": {
|
||||
"applied_provider": str(result_provider)
|
||||
},
|
||||
},
|
||||
)
|
||||
return 0
|
||||
@@ -1603,28 +1759,37 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Always output to ResultTable (pipeline mode only)
|
||||
# Extract title for table header
|
||||
item_title = (
|
||||
get_field(result, "title", None)
|
||||
or get_field(result, "name", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"title",
|
||||
None) or get_field(result,
|
||||
"name",
|
||||
None) or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
|
||||
# Build a subject payload representing the file whose tags are being shown
|
||||
subject_store = get_field(result, "store", None) or store_name
|
||||
subject_path = (
|
||||
get_field(result, "path", None)
|
||||
or get_field(result, "target", None)
|
||||
or get_field(result, "filename", None)
|
||||
get_field(result,
|
||||
"path",
|
||||
None) or get_field(result,
|
||||
"target",
|
||||
None) or get_field(result,
|
||||
"filename",
|
||||
None)
|
||||
)
|
||||
subject_payload: Dict[str, Any] = {
|
||||
"tag": list(current),
|
||||
"title": item_title,
|
||||
"name": item_title,
|
||||
"store": subject_store,
|
||||
"service_name": service_name,
|
||||
"extra": {
|
||||
"tag": list(current),
|
||||
},
|
||||
}
|
||||
subject_payload: Dict[str,
|
||||
Any] = {
|
||||
"tag": list(current),
|
||||
"title": item_title,
|
||||
"name": item_title,
|
||||
"store": subject_store,
|
||||
"service_name": service_name,
|
||||
"extra": {
|
||||
"tag": list(current),
|
||||
},
|
||||
}
|
||||
if file_hash:
|
||||
subject_payload["hash"] = file_hash
|
||||
if subject_path:
|
||||
@@ -1646,7 +1811,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# If emit requested or store key provided, emit payload
|
||||
if emit_mode:
|
||||
_emit_tag_payload(source, current, hash_value=file_hash, store_label=store_label)
|
||||
_emit_tag_payload(
|
||||
source,
|
||||
current,
|
||||
hash_value=file_hash,
|
||||
store_label=store_label
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
@@ -1671,7 +1841,8 @@ class Get_Tag(Cmdlet):
|
||||
super().__init__(
|
||||
name="get-tag",
|
||||
summary="Get tag values from Hydrus or local sidecar metadata",
|
||||
usage='get-tag [-query "hash:<sha256>"] [--store <key>] [--emit] [-scrape <url|provider>]',
|
||||
usage=
|
||||
'get-tag [-query "hash:<sha256>"] [--store <key>] [--emit] [-scrape <url|provider>]',
|
||||
alias=[],
|
||||
arg=[
|
||||
SharedArgs.QUERY,
|
||||
@@ -1690,7 +1861,8 @@ class Get_Tag(Cmdlet):
|
||||
CmdletArg(
|
||||
name="-scrape",
|
||||
type="string",
|
||||
description="Scrape metadata from URL/provider, or use 'ytdlp' to scrape from the item's URL and overwrite tags",
|
||||
description=
|
||||
"Scrape metadata from URL/provider, or use 'ytdlp' to scrape from the item's URL and overwrite tags",
|
||||
required=False,
|
||||
choices=_SCRAPE_CHOICES,
|
||||
),
|
||||
|
||||
@@ -36,7 +36,9 @@ class Get_Url(Cmdlet):
|
||||
name="get-url",
|
||||
summary="List url associated with a file, or search urls by pattern",
|
||||
usage='@1 | get-url OR get-url -url "https://www.youtube.com/watch?v=xx"',
|
||||
arg=[SharedArgs.QUERY, SharedArgs.STORE, SharedArgs.URL],
|
||||
arg=[SharedArgs.QUERY,
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.URL],
|
||||
detail=[
|
||||
"- Get url for file: @1 | get-url (requires hash+store from result)",
|
||||
'- Search url across stores: get-url -url "www.google.com" (strips protocol & www prefix)',
|
||||
@@ -79,9 +81,11 @@ class Get_Url(Cmdlet):
|
||||
# Use fnmatch for wildcard matching (* and ?)
|
||||
return fnmatch(normalized_url, normalized_pattern)
|
||||
|
||||
def _search_urls_across_stores(
|
||||
self, pattern: str, config: Dict[str, Any]
|
||||
) -> Tuple[List[UrlItem], List[str]]:
|
||||
def _search_urls_across_stores(self,
|
||||
pattern: str,
|
||||
config: Dict[str,
|
||||
Any]) -> Tuple[List[UrlItem],
|
||||
List[str]]:
|
||||
"""Search for URLs matching pattern across all stores.
|
||||
|
||||
Returns:
|
||||
@@ -92,7 +96,8 @@ class Get_Url(Cmdlet):
|
||||
|
||||
try:
|
||||
storage = Store(config)
|
||||
store_names = storage.list_backends() if hasattr(storage, "list_backends") else []
|
||||
store_names = storage.list_backends() if hasattr(storage,
|
||||
"list_backends") else []
|
||||
|
||||
if not store_names:
|
||||
log("Error: No stores configured", file=sys.stderr)
|
||||
@@ -111,7 +116,8 @@ class Get_Url(Cmdlet):
|
||||
search_results = backend.search("*", limit=1000)
|
||||
if search_results:
|
||||
for result in search_results:
|
||||
file_hash = result.get("hash") or result.get("file_hash")
|
||||
file_hash = result.get("hash"
|
||||
) or result.get("file_hash")
|
||||
if not file_hash:
|
||||
continue
|
||||
|
||||
@@ -119,7 +125,8 @@ class Get_Url(Cmdlet):
|
||||
urls = backend.get_url(file_hash)
|
||||
if urls:
|
||||
for url in urls:
|
||||
if self._match_url_pattern(str(url), pattern):
|
||||
if self._match_url_pattern(str(url),
|
||||
pattern):
|
||||
items.append(
|
||||
UrlItem(
|
||||
url=str(url),
|
||||
@@ -137,7 +144,10 @@ class Get_Url(Cmdlet):
|
||||
except KeyError:
|
||||
continue
|
||||
except Exception as exc:
|
||||
debug(f"Error searching store '{store_name}': {exc}", file=sys.stderr)
|
||||
debug(
|
||||
f"Error searching store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
return items, list(found_stores)
|
||||
@@ -165,15 +175,16 @@ class Get_Url(Cmdlet):
|
||||
from result_table import ResultTable
|
||||
|
||||
table = (
|
||||
ResultTable("URL Search Results", max_columns=3)
|
||||
.set_preserve_order(True)
|
||||
.set_table("urls")
|
||||
.set_value_case("preserve")
|
||||
ResultTable(
|
||||
"URL Search Results",
|
||||
max_columns=3
|
||||
).set_preserve_order(True).set_table("urls").set_value_case("preserve")
|
||||
)
|
||||
table.set_source_command("get-url", ["-url", search_pattern])
|
||||
|
||||
# Group by store for display
|
||||
by_store: Dict[str, List[UrlItem]] = {}
|
||||
by_store: Dict[str,
|
||||
List[UrlItem]] = {}
|
||||
for item in items:
|
||||
if item.store not in by_store:
|
||||
by_store[item.store] = []
|
||||
@@ -195,7 +206,9 @@ class Get_Url(Cmdlet):
|
||||
ctx.emit(item)
|
||||
|
||||
ctx.set_last_result_table(table if items else None, items, subject=result)
|
||||
log(f"Found {len(items)} matching url(s) in {len(stores_searched)} store(s)")
|
||||
log(
|
||||
f"Found {len(items)} matching url(s) in {len(stores_searched)} store(s)"
|
||||
)
|
||||
return 0
|
||||
|
||||
# Original mode: Get URLs for a specific file by hash+store
|
||||
@@ -209,7 +222,9 @@ class Get_Url(Cmdlet):
|
||||
store_name = parsed.get("store") or get_field(result, "store")
|
||||
|
||||
if not file_hash:
|
||||
log('Error: No file hash provided (pipe an item or use -query "hash:<sha256>")')
|
||||
log(
|
||||
'Error: No file hash provided (pipe an item or use -query "hash:<sha256>")'
|
||||
)
|
||||
return 1
|
||||
|
||||
if not store_name:
|
||||
@@ -237,10 +252,10 @@ class Get_Url(Cmdlet):
|
||||
table_title = f"Title: {title}"
|
||||
|
||||
table = (
|
||||
ResultTable(table_title, max_columns=1)
|
||||
.set_preserve_order(True)
|
||||
.set_table("url")
|
||||
.set_value_case("preserve")
|
||||
ResultTable(
|
||||
table_title,
|
||||
max_columns=1
|
||||
).set_preserve_order(True).set_table("url").set_value_case("preserve")
|
||||
)
|
||||
table.set_source_command("get-url", [])
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import subprocess as _subprocess
|
||||
import shutil as _shutil
|
||||
import re as _re
|
||||
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
|
||||
from . import _shared as sh
|
||||
|
||||
@@ -62,7 +62,8 @@ except ImportError:
|
||||
def dedup_tags_by_namespace(tags: List[str]) -> List[str]:
|
||||
return tags
|
||||
|
||||
def merge_multiple_tag_lists(sources: List[List[str]], strategy: str = "first") -> List[str]:
|
||||
def merge_multiple_tag_lists(sources: List[List[str]],
|
||||
strategy: str = "first") -> List[str]:
|
||||
out: List[str] = []
|
||||
seen: set[str] = set()
|
||||
for src in sources:
|
||||
@@ -167,7 +168,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
cookiefile = None
|
||||
|
||||
fmts = list_formats(
|
||||
sample_url, no_playlist=False, playlist_items=None, cookiefile=cookiefile
|
||||
sample_url,
|
||||
no_playlist=False,
|
||||
playlist_items=None,
|
||||
cookiefile=cookiefile
|
||||
)
|
||||
if isinstance(fmts, list) and fmts:
|
||||
has_video = False
|
||||
@@ -263,7 +267,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
elif isinstance(url, list):
|
||||
source_url.extend(url)
|
||||
else:
|
||||
title = get_field(item, "title", "unknown") or get_field(item, "id", "unknown")
|
||||
title = get_field(item,
|
||||
"title",
|
||||
"unknown") or get_field(item,
|
||||
"id",
|
||||
"unknown")
|
||||
log(f"Warning: Could not locate file for item: {title}", file=sys.stderr)
|
||||
|
||||
if len(source_files) < 2:
|
||||
@@ -274,31 +282,45 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file_types = set()
|
||||
for f in source_files:
|
||||
suffix = f.suffix.lower()
|
||||
if suffix in {".mp3", ".flac", ".wav", ".m4a", ".aac", ".ogg", ".opus", ".mka"}:
|
||||
if suffix in {".mp3",
|
||||
".flac",
|
||||
".wav",
|
||||
".m4a",
|
||||
".aac",
|
||||
".ogg",
|
||||
".opus",
|
||||
".mka"}:
|
||||
file_types.add("audio")
|
||||
elif suffix in {
|
||||
".mp4",
|
||||
".mkv",
|
||||
".webm",
|
||||
".mov",
|
||||
".avi",
|
||||
".flv",
|
||||
".mpg",
|
||||
".mpeg",
|
||||
".ts",
|
||||
".m4v",
|
||||
".wmv",
|
||||
".mp4",
|
||||
".mkv",
|
||||
".webm",
|
||||
".mov",
|
||||
".avi",
|
||||
".flv",
|
||||
".mpg",
|
||||
".mpeg",
|
||||
".ts",
|
||||
".m4v",
|
||||
".wmv",
|
||||
}:
|
||||
file_types.add("video")
|
||||
elif suffix in {".pdf"}:
|
||||
file_types.add("pdf")
|
||||
elif suffix in {".txt", ".srt", ".vtt", ".md", ".log"}:
|
||||
elif suffix in {".txt",
|
||||
".srt",
|
||||
".vtt",
|
||||
".md",
|
||||
".log"}:
|
||||
file_types.add("text")
|
||||
else:
|
||||
file_types.add("other")
|
||||
|
||||
if len(file_types) > 1 and "other" not in file_types:
|
||||
log(f"Mixed file types detected: {', '.join(sorted(file_types))}", file=sys.stderr)
|
||||
log(
|
||||
f"Mixed file types detected: {', '.join(sorted(file_types))}",
|
||||
file=sys.stderr
|
||||
)
|
||||
log(f"Can only merge files of the same type", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
@@ -331,7 +353,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
base_dir = first_file.parent
|
||||
output_path = (
|
||||
Path(base_dir) / f"{first_file.stem} (merged).{_ext_for_format(output_format)}"
|
||||
Path(base_dir) /
|
||||
f"{first_file.stem} (merged).{_ext_for_format(output_format)}"
|
||||
)
|
||||
|
||||
# Ensure output directory exists
|
||||
@@ -393,7 +416,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# - .tag sidecars (if present)
|
||||
# Keep all unique plain tags, and keep the first value for namespaced tags.
|
||||
merged_tags = merge_multiple_tag_lists(
|
||||
source_item_tag_lists + ([source_tags] if source_tags else []), strategy="combine"
|
||||
source_item_tag_lists + ([source_tags] if source_tags else []),
|
||||
strategy="combine"
|
||||
)
|
||||
|
||||
# Ensure we always have a title tag (and make sure it's the chosen title)
|
||||
@@ -437,7 +461,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tag_file.unlink()
|
||||
log(f"Deleted: {tag_file.name}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
log(f"Warning: Could not delete {tag_file.name}: {e}", file=sys.stderr)
|
||||
log(
|
||||
f"Warning: Could not delete {tag_file.name}: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -455,7 +482,10 @@ def _sanitize_name(text: str) -> str:
|
||||
"""Sanitize filename."""
|
||||
allowed = []
|
||||
for ch in text:
|
||||
allowed.append(ch if (ch.isalnum() or ch in {"-", "_", " ", "."}) else " ")
|
||||
allowed.append(ch if (ch.isalnum() or ch in {"-",
|
||||
"_",
|
||||
" ",
|
||||
"."}) else " ")
|
||||
return (" ".join("".join(allowed).split()) or "merged").strip()
|
||||
|
||||
|
||||
@@ -512,7 +542,10 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
]
|
||||
|
||||
probe_result = _subprocess.run(
|
||||
ffprobe_cmd, capture_output=True, text=True, timeout=10
|
||||
ffprobe_cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10
|
||||
)
|
||||
if probe_result.returncode == 0 and probe_result.stdout.strip():
|
||||
try:
|
||||
@@ -528,7 +561,9 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
)
|
||||
duration_sec = 0
|
||||
except Exception as e:
|
||||
logger.warning(f"[merge-file] Could not get duration for {file_path.name}: {e}")
|
||||
logger.warning(
|
||||
f"[merge-file] Could not get duration for {file_path.name}: {e}"
|
||||
)
|
||||
duration_sec = 0
|
||||
|
||||
# Create chapter entry - use title: tag from metadata if available
|
||||
@@ -542,12 +577,15 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
if tags:
|
||||
# Look for title: tag
|
||||
for tag in tags:
|
||||
if isinstance(tag, str) and tag.lower().startswith("title:"):
|
||||
if isinstance(tag,
|
||||
str) and tag.lower().startswith("title:"):
|
||||
# Extract the title value after the colon
|
||||
title = tag.split(":", 1)[1].strip()
|
||||
break
|
||||
except Exception as e:
|
||||
logger.debug(f"[merge-file] Could not read metadata for {file_path.name}: {e}")
|
||||
logger.debug(
|
||||
f"[merge-file] Could not read metadata for {file_path.name}: {e}"
|
||||
)
|
||||
pass # Fall back to filename
|
||||
|
||||
# Convert seconds to HH:MM:SS.mmm format
|
||||
@@ -626,7 +664,10 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
metadata_lines.append(f'title={chapter["title"]}')
|
||||
|
||||
metadata_file.write_text("\n".join(metadata_lines), encoding="utf-8")
|
||||
log(f"Created chapters metadata file with {len(chapters)} chapters", file=sys.stderr)
|
||||
log(
|
||||
f"Created chapters metadata file with {len(chapters)} chapters",
|
||||
file=sys.stderr
|
||||
)
|
||||
logger.info(f"[merge-file] Created {len(chapters)} chapters")
|
||||
|
||||
# Step 4: Build FFmpeg command to merge and embed chapters
|
||||
@@ -639,7 +680,8 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
# Audio codec selection for first input
|
||||
if output_format == "mp3":
|
||||
cmd.extend(["-c:a", "libmp3lame", "-q:a", "2"])
|
||||
elif output_format in {"m4a", "m4b"}:
|
||||
elif output_format in {"m4a",
|
||||
"m4b"}:
|
||||
# Use copy if possible (much faster), otherwise re-encode
|
||||
# Check if inputs are already AAC/M4A to avoid re-encoding
|
||||
# For now, default to copy if format matches, otherwise re-encode
|
||||
@@ -714,7 +756,10 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
if process.returncode != 0:
|
||||
log(f"FFmpeg error: {stderr}", file=sys.stderr)
|
||||
raise _subprocess.CalledProcessError(
|
||||
process.returncode, cmd, output=stdout, stderr=stderr
|
||||
process.returncode,
|
||||
cmd,
|
||||
output=stdout,
|
||||
stderr=stderr
|
||||
)
|
||||
|
||||
print_final_progress(output.name, int(total_duration_sec * 1000), 0)
|
||||
@@ -807,17 +852,24 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
logger.warning(f"[merge-file] Chapter embedding did not create output")
|
||||
logger.warning(
|
||||
f"[merge-file] Chapter embedding did not create output"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"[merge-file] Chapter embedding failed: {e}")
|
||||
log(
|
||||
f"Warning: Chapter embedding failed, using merge without chapters",
|
||||
file=sys.stderr,
|
||||
)
|
||||
elif output_format in {"m4a", "m4b"} or output.suffix.lower() in [".m4a", ".m4b", ".mp4"]:
|
||||
elif output_format in {"m4a",
|
||||
"m4b"} or output.suffix.lower() in [".m4a",
|
||||
".m4b",
|
||||
".mp4"]:
|
||||
# MP4/M4A format has native chapter support via iTunes metadata atoms
|
||||
log(f"Embedding chapters into MP4 container...", file=sys.stderr)
|
||||
logger.info(f"[merge-file] Adding chapters to M4A/MP4 file via iTunes metadata")
|
||||
logger.info(
|
||||
f"[merge-file] Adding chapters to M4A/MP4 file via iTunes metadata"
|
||||
)
|
||||
|
||||
temp_output = output.parent / f".temp_{output.stem}{output.suffix}"
|
||||
|
||||
@@ -864,7 +916,10 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
if output.exists():
|
||||
output.unlink()
|
||||
shutil.move(str(temp_output), str(output))
|
||||
log(f"✓ Chapters successfully embedded in MP4!", file=sys.stderr)
|
||||
log(
|
||||
f"✓ Chapters successfully embedded in MP4!",
|
||||
file=sys.stderr
|
||||
)
|
||||
logger.info(f"[merge-file] MP4 chapters embedded successfully")
|
||||
except Exception as e:
|
||||
logger.warning(f"[merge-file] Could not replace file: {e}")
|
||||
@@ -877,7 +932,9 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
logger.warning(f"[merge-file] MP4 chapter embedding did not create output")
|
||||
logger.warning(
|
||||
f"[merge-file] MP4 chapter embedding did not create output"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"[merge-file] MP4 chapter embedding failed: {e}")
|
||||
log(
|
||||
@@ -886,7 +943,9 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
)
|
||||
else:
|
||||
# For other formats, chapters would require external tools
|
||||
logger.info(f"[merge-file] Format {output_format} does not have native chapter support")
|
||||
logger.info(
|
||||
f"[merge-file] Format {output_format} does not have native chapter support"
|
||||
)
|
||||
log(f"Note: For chapter support, use MKA or M4A format", file=sys.stderr)
|
||||
|
||||
# Clean up temp files
|
||||
@@ -944,7 +1003,16 @@ def _merge_video(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
]
|
||||
)
|
||||
elif output_format == "mkv":
|
||||
cmd.extend(["-c:v", "libx265", "-preset", "fast", "-c:a", "aac", "-b:a", "192k"])
|
||||
cmd.extend(
|
||||
["-c:v",
|
||||
"libx265",
|
||||
"-preset",
|
||||
"fast",
|
||||
"-c:a",
|
||||
"aac",
|
||||
"-b:a",
|
||||
"192k"]
|
||||
)
|
||||
else:
|
||||
cmd.extend(["-c", "copy"]) # Copy without re-encoding
|
||||
|
||||
@@ -994,7 +1062,10 @@ def _merge_text(files: List[Path], output: Path) -> bool:
|
||||
def _merge_pdf(files: List[Path], output: Path) -> bool:
|
||||
"""Merge PDF files."""
|
||||
if (not HAS_PYPDF) or (PdfWriter is None) or (PdfReader is None):
|
||||
log("pypdf is required for PDF merging. Install with: pip install pypdf", file=sys.stderr)
|
||||
log(
|
||||
"pypdf is required for PDF merging. Install with: pip install pypdf",
|
||||
file=sys.stderr
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -1022,16 +1093,21 @@ def _merge_pdf(files: List[Path], output: Path) -> bool:
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="merge-file",
|
||||
summary="Merge multiple files into a single output file. Supports audio, video, PDF, and text merging with optional cleanup.",
|
||||
usage="merge-file [-delete] [-path <path>] [-format <auto|mka|m4a|m4b|mp3|aac|opus|mp4|mkv|pdf|txt>]",
|
||||
summary=
|
||||
"Merge multiple files into a single output file. Supports audio, video, PDF, and text merging with optional cleanup.",
|
||||
usage=
|
||||
"merge-file [-delete] [-path <path>] [-format <auto|mka|m4a|m4b|mp3|aac|opus|mp4|mkv|pdf|txt>]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
"-delete", type="flag", description="Delete source files after successful merge."
|
||||
"-delete",
|
||||
type="flag",
|
||||
description="Delete source files after successful merge."
|
||||
),
|
||||
SharedArgs.PATH,
|
||||
CmdletArg(
|
||||
"-format",
|
||||
description="Output format (auto/mka/m4a/m4b/mp3/aac/opus/mp4/mkv/pdf/txt). Default: auto-detect from first file.",
|
||||
description=
|
||||
"Output format (auto/mka/m4a/m4b/mp3/aac/opus/mp4/mkv/pdf/txt). Default: auto-detect from first file.",
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
|
||||
@@ -36,7 +36,6 @@ import pipeline as pipeline_context
|
||||
# CMDLET Metadata Declaration
|
||||
# ============================================================================
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Playwright & Screenshot Dependencies
|
||||
# ============================================================================
|
||||
@@ -44,13 +43,13 @@ import pipeline as pipeline_context
|
||||
from tool.playwright import HAS_PLAYWRIGHT, PlaywrightTimeoutError, PlaywrightTool
|
||||
|
||||
try:
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
except ImportError:
|
||||
try:
|
||||
_parent_dir = str(Path(__file__).parent.parent)
|
||||
if _parent_dir not in sys.path:
|
||||
sys.path.insert(0, _parent_dir)
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
except ImportError:
|
||||
resolve_output_dir = None
|
||||
|
||||
@@ -64,7 +63,11 @@ USER_AGENT = (
|
||||
"Chrome/120.0.0.0 Safari/537.36"
|
||||
)
|
||||
|
||||
DEFAULT_VIEWPORT: dict[str, int] = {"width": 1920, "height": 1080}
|
||||
DEFAULT_VIEWPORT: dict[str,
|
||||
int] = {
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
}
|
||||
ARCHIVE_TIMEOUT = 30.0
|
||||
|
||||
# WebP has a hard maximum dimension per side.
|
||||
@@ -72,35 +75,36 @@ ARCHIVE_TIMEOUT = 30.0
|
||||
WEBP_MAX_DIM = 16_383
|
||||
|
||||
# Configurable selectors for specific websites
|
||||
SITE_SELECTORS: Dict[str, List[str]] = {
|
||||
"twitter.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"x.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"instagram.com": [
|
||||
"article[role='presentation']",
|
||||
"article[role='article']",
|
||||
"div[role='dialog'] article",
|
||||
"section main article",
|
||||
],
|
||||
"reddit.com": [
|
||||
"shreddit-post",
|
||||
"div[data-testid='post-container']",
|
||||
"div[data-click-id='background']",
|
||||
"article",
|
||||
],
|
||||
"rumble.com": [
|
||||
"rumble-player, iframe.rumble",
|
||||
"div.video-item--main",
|
||||
"main article",
|
||||
],
|
||||
}
|
||||
SITE_SELECTORS: Dict[str,
|
||||
List[str]] = {
|
||||
"twitter.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"x.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"instagram.com": [
|
||||
"article[role='presentation']",
|
||||
"article[role='article']",
|
||||
"div[role='dialog'] article",
|
||||
"section main article",
|
||||
],
|
||||
"reddit.com": [
|
||||
"shreddit-post",
|
||||
"div[data-testid='post-container']",
|
||||
"div[data-click-id='background']",
|
||||
"article",
|
||||
],
|
||||
"rumble.com": [
|
||||
"rumble-player, iframe.rumble",
|
||||
"div.video-item--main",
|
||||
"main article",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class ScreenshotError(RuntimeError):
|
||||
@@ -172,9 +176,13 @@ def _tags_from_url(url: str) -> List[str]:
|
||||
try:
|
||||
parsed = urlsplit(u)
|
||||
host = (
|
||||
str(getattr(parsed, "hostname", None) or getattr(parsed, "netloc", "") or "")
|
||||
.strip()
|
||||
.lower()
|
||||
str(
|
||||
getattr(parsed,
|
||||
"hostname",
|
||||
None) or getattr(parsed,
|
||||
"netloc",
|
||||
"") or ""
|
||||
).strip().lower()
|
||||
)
|
||||
except Exception:
|
||||
parsed = None
|
||||
@@ -187,7 +195,7 @@ def _tags_from_url(url: str) -> List[str]:
|
||||
if ":" in host:
|
||||
host = host.split(":", 1)[0]
|
||||
if host.startswith("www."):
|
||||
host = host[len("www.") :]
|
||||
host = host[len("www."):]
|
||||
|
||||
path = ""
|
||||
if parsed is not None:
|
||||
@@ -230,7 +238,7 @@ def _title_from_url(url: str) -> str:
|
||||
"""Return the normalized title derived from a URL's last path segment."""
|
||||
for t in _tags_from_url(url):
|
||||
if str(t).lower().startswith("title:"):
|
||||
return str(t)[len("title:") :].strip()
|
||||
return str(t)[len("title:"):].strip()
|
||||
return ""
|
||||
|
||||
|
||||
@@ -239,9 +247,12 @@ def _normalise_format(fmt: Optional[str]) -> str:
|
||||
if not fmt:
|
||||
return "webp"
|
||||
value = fmt.strip().lower()
|
||||
if value in {"jpg", "jpeg"}:
|
||||
if value in {"jpg",
|
||||
"jpeg"}:
|
||||
return "jpeg"
|
||||
if value in {"png", "pdf", "webp"}:
|
||||
if value in {"png",
|
||||
"pdf",
|
||||
"webp"}:
|
||||
return value
|
||||
return "webp"
|
||||
|
||||
@@ -285,11 +296,12 @@ def _convert_to_webp(
|
||||
try:
|
||||
with Image.open(src_png) as im:
|
||||
did_downscale = False
|
||||
save_kwargs: Dict[str, Any] = {
|
||||
"format": "WEBP",
|
||||
"quality": int(quality),
|
||||
"method": int(method),
|
||||
}
|
||||
save_kwargs: Dict[str,
|
||||
Any] = {
|
||||
"format": "WEBP",
|
||||
"quality": int(quality),
|
||||
"method": int(method),
|
||||
}
|
||||
|
||||
# Preserve alpha when present; Pillow handles it for WEBP.
|
||||
# Normalize palette images to RGBA to avoid odd palette artifacts.
|
||||
@@ -303,12 +315,9 @@ def _convert_to_webp(
|
||||
except Exception:
|
||||
w, h = 0, 0
|
||||
|
||||
if (
|
||||
downscale_if_oversize
|
||||
and isinstance(max_dim, int)
|
||||
and max_dim > 0
|
||||
and (w > max_dim or h > max_dim)
|
||||
):
|
||||
if (downscale_if_oversize and isinstance(max_dim,
|
||||
int) and max_dim > 0
|
||||
and (w > max_dim or h > max_dim)):
|
||||
scale = 1.0
|
||||
try:
|
||||
scale = min(float(max_dim) / float(w), float(max_dim) / float(h))
|
||||
@@ -322,7 +331,13 @@ def _convert_to_webp(
|
||||
f"[_convert_to_webp] Image exceeds WebP limit ({w}x{h}); downscaling -> {new_w}x{new_h}"
|
||||
)
|
||||
try:
|
||||
resample = getattr(getattr(Image, "Resampling", Image), "LANCZOS", None)
|
||||
resample = getattr(
|
||||
getattr(Image,
|
||||
"Resampling",
|
||||
Image),
|
||||
"LANCZOS",
|
||||
None
|
||||
)
|
||||
if resample is None:
|
||||
resample = getattr(Image, "LANCZOS", 1)
|
||||
im = im.resize((new_w, new_h), resample=resample)
|
||||
@@ -367,13 +382,20 @@ def _selectors_for_url(url: str) -> List[str]:
|
||||
|
||||
|
||||
def _platform_preprocess(
|
||||
url: str, page: Any, warnings: List[str], timeout_ms: int = 10_000
|
||||
url: str,
|
||||
page: Any,
|
||||
warnings: List[str],
|
||||
timeout_ms: int = 10_000
|
||||
) -> None:
|
||||
"""Best-effort page tweaks for popular platforms before capture."""
|
||||
try:
|
||||
u = str(url or "").lower()
|
||||
|
||||
def _try_click_buttons(names: List[str], passes: int = 2, per_timeout: int = 700) -> int:
|
||||
def _try_click_buttons(
|
||||
names: List[str],
|
||||
passes: int = 2,
|
||||
per_timeout: int = 700
|
||||
) -> int:
|
||||
clicks = 0
|
||||
for _ in range(max(1, int(passes))):
|
||||
for name in names:
|
||||
@@ -411,7 +433,9 @@ def _platform_preprocess(
|
||||
|
||||
def _submit_wayback(url: str, timeout: float) -> Optional[str]:
|
||||
encoded = quote(url, safe="/:?=&")
|
||||
with HTTPClient(headers={"User-Agent": USER_AGENT}) as client:
|
||||
with HTTPClient(headers={
|
||||
"User-Agent": USER_AGENT
|
||||
}) as client:
|
||||
response = client.get(f"https://web.archive.org/save/{encoded}")
|
||||
content_location = response.headers.get("Content-Location")
|
||||
if content_location:
|
||||
@@ -422,7 +446,9 @@ def _submit_wayback(url: str, timeout: float) -> Optional[str]:
|
||||
def _submit_archive_today(url: str, timeout: float) -> Optional[str]:
|
||||
"""Submit URL to Archive.today."""
|
||||
encoded = quote(url, safe=":/?#[]@!$&'()*+,;=")
|
||||
with HTTPClient(headers={"User-Agent": USER_AGENT}) as client:
|
||||
with HTTPClient(headers={
|
||||
"User-Agent": USER_AGENT
|
||||
}) as client:
|
||||
response = client.get(f"https://archive.today/submit/?url={encoded}")
|
||||
response.raise_for_status()
|
||||
final = str(response.url)
|
||||
@@ -434,7 +460,9 @@ def _submit_archive_today(url: str, timeout: float) -> Optional[str]:
|
||||
def _submit_archive_ph(url: str, timeout: float) -> Optional[str]:
|
||||
"""Submit URL to Archive.ph."""
|
||||
encoded = quote(url, safe=":/?#[]@!$&'()*+,;=")
|
||||
with HTTPClient(headers={"User-Agent": USER_AGENT}) as client:
|
||||
with HTTPClient(headers={
|
||||
"User-Agent": USER_AGENT
|
||||
}) as client:
|
||||
response = client.get(f"https://archive.ph/submit/?url={encoded}")
|
||||
response.raise_for_status()
|
||||
final = str(response.url)
|
||||
@@ -460,7 +488,9 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]:
|
||||
warnings.append(f"archive {label} rate limited (HTTP 429)")
|
||||
debug(f"{label}: Rate limited (HTTP 429)")
|
||||
else:
|
||||
warnings.append(f"archive {label} failed: HTTP {exc.response.status_code}")
|
||||
warnings.append(
|
||||
f"archive {label} failed: HTTP {exc.response.status_code}"
|
||||
)
|
||||
debug(f"{label}: HTTP {exc.response.status_code}")
|
||||
except httpx.RequestError as exc:
|
||||
warnings.append(f"archive {label} failed: {exc}")
|
||||
@@ -480,7 +510,9 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]:
|
||||
def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
||||
"""Prepare and validate output path for screenshot."""
|
||||
ensure_directory(options.output_dir)
|
||||
explicit_format = _normalise_format(options.output_format) if options.output_format else None
|
||||
explicit_format = _normalise_format(
|
||||
options.output_format
|
||||
) if options.output_format else None
|
||||
inferred_format: Optional[str] = None
|
||||
if options.output_path is not None:
|
||||
path = options.output_path
|
||||
@@ -506,7 +538,10 @@ def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
||||
|
||||
|
||||
def _capture(
|
||||
options: ScreenshotOptions, destination: Path, warnings: List[str], progress: PipelineProgress
|
||||
options: ScreenshotOptions,
|
||||
destination: Path,
|
||||
warnings: List[str],
|
||||
progress: PipelineProgress
|
||||
) -> None:
|
||||
"""Capture screenshot using Playwright."""
|
||||
debug(f"[_capture] Starting capture for {options.url} -> {destination}")
|
||||
@@ -517,9 +552,11 @@ def _capture(
|
||||
# Ensure Chromium engine is used for the screen-shot cmdlet (force for consistency)
|
||||
try:
|
||||
current_browser = (
|
||||
getattr(tool.defaults, "browser", "").lower()
|
||||
if getattr(tool, "defaults", None) is not None
|
||||
else ""
|
||||
getattr(tool.defaults,
|
||||
"browser",
|
||||
"").lower() if getattr(tool,
|
||||
"defaults",
|
||||
None) is not None else ""
|
||||
)
|
||||
if current_browser != "chromium":
|
||||
debug(
|
||||
@@ -527,12 +564,18 @@ def _capture(
|
||||
)
|
||||
base_cfg = {}
|
||||
try:
|
||||
base_cfg = dict(getattr(tool, "_config", {}) or {})
|
||||
base_cfg = dict(getattr(tool,
|
||||
"_config",
|
||||
{}) or {})
|
||||
except Exception:
|
||||
base_cfg = {}
|
||||
tool_block = dict(base_cfg.get("tool") or {}) if isinstance(base_cfg, dict) else {}
|
||||
tool_block = dict(base_cfg.get("tool") or {}
|
||||
) if isinstance(base_cfg,
|
||||
dict) else {}
|
||||
pw_block = (
|
||||
dict(tool_block.get("playwright") or {}) if isinstance(tool_block, dict) else {}
|
||||
dict(tool_block.get("playwright") or {})
|
||||
if isinstance(tool_block,
|
||||
dict) else {}
|
||||
)
|
||||
pw_block["browser"] = "chromium"
|
||||
tool_block["playwright"] = pw_block
|
||||
@@ -540,7 +583,13 @@ def _capture(
|
||||
base_cfg["tool"] = tool_block
|
||||
tool = PlaywrightTool(base_cfg)
|
||||
except Exception:
|
||||
tool = PlaywrightTool({"tool": {"playwright": {"browser": "chromium"}}})
|
||||
tool = PlaywrightTool({
|
||||
"tool": {
|
||||
"playwright": {
|
||||
"browser": "chromium"
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
tool.debug_dump()
|
||||
|
||||
@@ -550,7 +599,9 @@ def _capture(
|
||||
debug(f"[_capture] Format: {format_name}, Headless: {headless}")
|
||||
|
||||
if format_name == "pdf" and not options.headless:
|
||||
warnings.append("pdf output requires headless Chromium; overriding headless mode")
|
||||
warnings.append(
|
||||
"pdf output requires headless Chromium; overriding headless mode"
|
||||
)
|
||||
|
||||
try:
|
||||
with tool.open_page(headless=headless) as page:
|
||||
@@ -572,11 +623,15 @@ def _capture(
|
||||
page.wait_for_selector("article", timeout=10_000)
|
||||
debug("Article element found")
|
||||
except PlaywrightTimeoutError:
|
||||
warnings.append("<article> selector not found; capturing fallback")
|
||||
warnings.append(
|
||||
"<article> selector not found; capturing fallback"
|
||||
)
|
||||
debug("Article element not found; using fallback")
|
||||
|
||||
if options.wait_after_load > 0:
|
||||
debug(f"Waiting {options.wait_after_load}s for page stabilization...")
|
||||
debug(
|
||||
f"Waiting {options.wait_after_load}s for page stabilization..."
|
||||
)
|
||||
time.sleep(min(10.0, max(0.0, options.wait_after_load)))
|
||||
|
||||
progress.step("loading stabilized")
|
||||
@@ -617,7 +672,9 @@ def _capture(
|
||||
try:
|
||||
debug(f"Trying selector: {sel}")
|
||||
el = page.wait_for_selector(
|
||||
sel, timeout=max(0, int(options.selector_timeout_ms))
|
||||
sel,
|
||||
timeout=max(0,
|
||||
int(options.selector_timeout_ms))
|
||||
)
|
||||
except PlaywrightTimeoutError:
|
||||
debug(f"Selector not found: {sel}")
|
||||
@@ -639,7 +696,9 @@ def _capture(
|
||||
debug("Element captured successfully")
|
||||
break
|
||||
except Exception as exc:
|
||||
warnings.append(f"element capture failed for '{sel}': {exc}")
|
||||
warnings.append(
|
||||
f"element capture failed for '{sel}': {exc}"
|
||||
)
|
||||
debug(f"Failed to capture element: {exc}")
|
||||
# Fallback to default capture paths
|
||||
if element_captured:
|
||||
@@ -653,7 +712,10 @@ def _capture(
|
||||
progress.step("capturing saved")
|
||||
else:
|
||||
debug(f"Capturing full page to {destination}...")
|
||||
screenshot_kwargs: Dict[str, Any] = {"path": str(destination)}
|
||||
screenshot_kwargs: Dict[str,
|
||||
Any] = {
|
||||
"path": str(destination)
|
||||
}
|
||||
if format_name == "jpeg":
|
||||
screenshot_kwargs["type"] = "jpeg"
|
||||
screenshot_kwargs["quality"] = 90
|
||||
@@ -675,10 +737,8 @@ def _capture(
|
||||
except Exception as exc:
|
||||
debug(f"[_capture] Exception launching browser/page: {exc}")
|
||||
msg = str(exc).lower()
|
||||
if any(
|
||||
k in msg
|
||||
for k in ["executable", "not found", "no such file", "cannot find", "install"]
|
||||
):
|
||||
if any(k in msg for k in ["executable", "not found", "no such file",
|
||||
"cannot find", "install"]):
|
||||
raise ScreenshotError(
|
||||
"Chromium Playwright browser binaries not found. Install them: python ./scripts/bootstrap.py --playwright-only --browsers chromium"
|
||||
) from exc
|
||||
@@ -691,7 +751,10 @@ def _capture(
|
||||
raise ScreenshotError(f"Failed to capture screenshot: {exc}") from exc
|
||||
|
||||
|
||||
def _capture_screenshot(options: ScreenshotOptions, progress: PipelineProgress) -> ScreenshotResult:
|
||||
def _capture_screenshot(
|
||||
options: ScreenshotOptions,
|
||||
progress: PipelineProgress
|
||||
) -> ScreenshotResult:
|
||||
"""Capture a screenshot for the given options."""
|
||||
debug(f"[_capture_screenshot] Preparing capture for {options.url}")
|
||||
requested_format = _normalise_format(options.output_format)
|
||||
@@ -702,7 +765,8 @@ def _capture_screenshot(options: ScreenshotOptions, progress: PipelineProgress)
|
||||
will_convert = requested_format == "webp"
|
||||
will_archive = bool(options.archive and options.url)
|
||||
total_steps = (
|
||||
9 + (1 if will_target else 0) + (1 if will_convert else 0) + (1 if will_archive else 0)
|
||||
9 + (1 if will_target else 0) + (1 if will_convert else 0) +
|
||||
(1 if will_archive else 0)
|
||||
)
|
||||
progress.begin_steps(total_steps)
|
||||
progress.step("loading starting")
|
||||
@@ -712,7 +776,9 @@ def _capture_screenshot(options: ScreenshotOptions, progress: PipelineProgress)
|
||||
capture_path = destination
|
||||
if requested_format == "webp":
|
||||
capture_path = unique_path(destination.with_suffix(".png"))
|
||||
debug(f"[_capture_screenshot] Requested webp; capturing intermediate png -> {capture_path}")
|
||||
debug(
|
||||
f"[_capture_screenshot] Requested webp; capturing intermediate png -> {capture_path}"
|
||||
)
|
||||
options.output_format = "png"
|
||||
_capture(options, capture_path, warnings, progress)
|
||||
|
||||
@@ -808,7 +874,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# [tool=playwright]
|
||||
# format="pdf"
|
||||
try:
|
||||
tool_cfg = config.get("tool", {}) if isinstance(config, dict) else {}
|
||||
tool_cfg = config.get("tool",
|
||||
{}) if isinstance(config,
|
||||
dict) else {}
|
||||
pw_cfg = tool_cfg.get("playwright") if isinstance(tool_cfg, dict) else None
|
||||
if isinstance(pw_cfg, dict):
|
||||
format_value = pw_cfg.get("format")
|
||||
@@ -839,7 +907,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Extract url from piped results
|
||||
if piped_results:
|
||||
for item in piped_results:
|
||||
url = get_field(item, "path") or get_field(item, "url") or get_field(item, "target")
|
||||
url = get_field(item,
|
||||
"path") or get_field(item,
|
||||
"url"
|
||||
) or get_field(item,
|
||||
"target")
|
||||
|
||||
if url:
|
||||
url_to_process.append((str(url), item))
|
||||
@@ -910,6 +982,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
all_emitted = []
|
||||
exit_code = 0
|
||||
|
||||
# ========================================================================
|
||||
# PROCESS url AND CAPTURE SCREENSHOTS
|
||||
# ========================================================================
|
||||
@@ -970,8 +1043,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"playwright": {
|
||||
"browser": "chromium",
|
||||
"user_agent": "native",
|
||||
"viewport_width": int(DEFAULT_VIEWPORT.get("width", 1920)),
|
||||
"viewport_height": int(DEFAULT_VIEWPORT.get("height", 1080)),
|
||||
"viewport_width": int(DEFAULT_VIEWPORT.get("width",
|
||||
1920)),
|
||||
"viewport_height":
|
||||
int(DEFAULT_VIEWPORT.get("height",
|
||||
1080)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -995,7 +1071,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if manual_target_selectors:
|
||||
options.prefer_platform_target = True
|
||||
options.target_selectors = manual_target_selectors
|
||||
debug(f"[screen_shot] Using explicit selector(s): {manual_target_selectors}")
|
||||
debug(
|
||||
f"[screen_shot] Using explicit selector(s): {manual_target_selectors}"
|
||||
)
|
||||
elif auto_selectors:
|
||||
options.prefer_platform_target = True
|
||||
options.target_selectors = auto_selectors
|
||||
@@ -1022,9 +1100,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
capture_date = ""
|
||||
try:
|
||||
capture_date = (
|
||||
datetime.fromtimestamp(screenshot_result.path.stat().st_mtime)
|
||||
.date()
|
||||
.isoformat()
|
||||
datetime.fromtimestamp(screenshot_result.path.stat().st_mtime
|
||||
).date().isoformat()
|
||||
)
|
||||
except Exception:
|
||||
capture_date = datetime.now().date().isoformat()
|
||||
@@ -1035,14 +1112,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
upstream_tags = _extract_item_tags(origin_item)
|
||||
filtered_upstream_tags = [
|
||||
t
|
||||
for t in upstream_tags
|
||||
t for t in upstream_tags
|
||||
if not str(t).strip().lower().startswith(("type:", "date:"))
|
||||
]
|
||||
|
||||
url_tags = _tags_from_url(url)
|
||||
merged_tags = unique_preserve_order(
|
||||
["type:screenshot", f"date:{capture_date}"] + filtered_upstream_tags + url_tags
|
||||
["type:screenshot",
|
||||
f"date:{capture_date}"] + filtered_upstream_tags + url_tags
|
||||
)
|
||||
|
||||
pipe_obj = create_pipe_object_result(
|
||||
@@ -1097,13 +1174,20 @@ CMDLET = Cmdlet(
|
||||
name="screen-shot",
|
||||
summary="Capture a website screenshot",
|
||||
usage="screen-shot <url> [options]",
|
||||
alias=["screenshot", "ss"],
|
||||
alias=["screenshot",
|
||||
"ss"],
|
||||
arg=[
|
||||
SharedArgs.URL,
|
||||
CmdletArg(
|
||||
name="format", type="string", description="Output format: webp, png, jpeg, or pdf"
|
||||
name="format",
|
||||
type="string",
|
||||
description="Output format: webp, png, jpeg, or pdf"
|
||||
),
|
||||
CmdletArg(
|
||||
name="selector",
|
||||
type="string",
|
||||
description="CSS selector for element capture"
|
||||
),
|
||||
CmdletArg(name="selector", type="string", description="CSS selector for element capture"),
|
||||
SharedArgs.PATH,
|
||||
],
|
||||
detail=[
|
||||
|
||||
@@ -22,7 +22,7 @@ import pipeline as ctx
|
||||
|
||||
# Optional dependencies
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
except Exception: # pragma: no cover
|
||||
get_local_storage_path = None # type: ignore
|
||||
|
||||
@@ -33,14 +33,16 @@ class Search_Provider(Cmdlet):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
name="search-provider",
|
||||
summary="Search external providers (bandcamp, libgen, soulseek, youtube, alldebrid, loc, internetarchive)",
|
||||
summary=
|
||||
"Search external providers (bandcamp, libgen, soulseek, youtube, alldebrid, loc, internetarchive)",
|
||||
usage="search-provider -provider <provider> <query> [-limit N] [-open ID]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
"provider",
|
||||
type="string",
|
||||
required=True,
|
||||
description="Provider name: bandcamp, libgen, soulseek, youtube, alldebrid, loc, internetarchive",
|
||||
description=
|
||||
"Provider name: bandcamp, libgen, soulseek, youtube, alldebrid, loc, internetarchive",
|
||||
),
|
||||
CmdletArg(
|
||||
"query",
|
||||
@@ -49,12 +51,15 @@ class Search_Provider(Cmdlet):
|
||||
description="Search query (supports provider-specific syntax)",
|
||||
),
|
||||
CmdletArg(
|
||||
"limit", type="int", description="Maximum results to return (default: 50)"
|
||||
"limit",
|
||||
type="int",
|
||||
description="Maximum results to return (default: 50)"
|
||||
),
|
||||
CmdletArg(
|
||||
"open",
|
||||
type="int",
|
||||
description="(alldebrid) Open folder/magnet by ID and list its files",
|
||||
description=
|
||||
"(alldebrid) Open folder/magnet by ID and list its files",
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
@@ -100,11 +105,21 @@ class Search_Provider(Cmdlet):
|
||||
# Dynamic flag variants from cmdlet arg definitions.
|
||||
flag_registry = self.build_flag_registry()
|
||||
provider_flags = {
|
||||
f.lower() for f in (flag_registry.get("provider") or {"-provider", "--provider"})
|
||||
f.lower()
|
||||
for f in (flag_registry.get("provider") or {"-provider", "--provider"})
|
||||
}
|
||||
query_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("query") or {"-query", "--query"})
|
||||
}
|
||||
limit_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("limit") or {"-limit", "--limit"})
|
||||
}
|
||||
open_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("open") or {"-open", "--open"})
|
||||
}
|
||||
query_flags = {f.lower() for f in (flag_registry.get("query") or {"-query", "--query"})}
|
||||
limit_flags = {f.lower() for f in (flag_registry.get("limit") or {"-limit", "--limit"})}
|
||||
open_flags = {f.lower() for f in (flag_registry.get("open") or {"-open", "--open"})}
|
||||
|
||||
provider_name: Optional[str] = None
|
||||
query: Optional[str] = None
|
||||
@@ -166,7 +181,9 @@ class Search_Provider(Cmdlet):
|
||||
log(f" {status} {name}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
debug(f"[search-provider] provider={provider_name}, query={query}, limit={limit}")
|
||||
debug(
|
||||
f"[search-provider] provider={provider_name}, query={query}, limit={limit}"
|
||||
)
|
||||
|
||||
# Get provider
|
||||
provider = get_search_provider(provider_name, config)
|
||||
@@ -180,7 +197,9 @@ class Search_Provider(Cmdlet):
|
||||
return 1
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
library_root = get_local_storage_path(config or {}) if get_local_storage_path else None
|
||||
library_root = get_local_storage_path(
|
||||
config or {}
|
||||
) if get_local_storage_path else None
|
||||
|
||||
db = None
|
||||
if library_root:
|
||||
@@ -219,7 +238,8 @@ class Search_Provider(Cmdlet):
|
||||
provider_label = "LoC"
|
||||
else:
|
||||
provider_label = (
|
||||
provider_text[:1].upper() + provider_text[1:] if provider_text else "Provider"
|
||||
provider_text[:1].upper() +
|
||||
provider_text[1:] if provider_text else "Provider"
|
||||
)
|
||||
|
||||
if provider_lower == "alldebrid" and open_id is not None:
|
||||
@@ -236,11 +256,22 @@ class Search_Provider(Cmdlet):
|
||||
if open_id is not None:
|
||||
# Second-stage: show files for selected folder/magnet.
|
||||
results = provider.search(
|
||||
query, limit=limit, filters={"view": "files", "magnet_id": open_id}
|
||||
query,
|
||||
limit=limit,
|
||||
filters={
|
||||
"view": "files",
|
||||
"magnet_id": open_id
|
||||
}
|
||||
)
|
||||
else:
|
||||
# Default: show folders (magnets) so user can select @N.
|
||||
results = provider.search(query, limit=limit, filters={"view": "folders"})
|
||||
results = provider.search(
|
||||
query,
|
||||
limit=limit,
|
||||
filters={
|
||||
"view": "folders"
|
||||
}
|
||||
)
|
||||
else:
|
||||
results = provider.search(query, limit=limit)
|
||||
debug(f"[search-provider] Got {len(results)} results")
|
||||
@@ -256,8 +287,8 @@ class Search_Provider(Cmdlet):
|
||||
for search_result in results:
|
||||
item_dict = (
|
||||
search_result.to_dict()
|
||||
if hasattr(search_result, "to_dict")
|
||||
else dict(search_result)
|
||||
if hasattr(search_result,
|
||||
"to_dict") else dict(search_result)
|
||||
)
|
||||
|
||||
# Ensure table field is set (should be by provider, but just in case)
|
||||
@@ -265,21 +296,26 @@ class Search_Provider(Cmdlet):
|
||||
item_dict["table"] = provider_name
|
||||
|
||||
row_index = len(table.rows)
|
||||
table.add_result(search_result) # ResultTable handles SearchResult objects
|
||||
table.add_result(
|
||||
search_result
|
||||
) # ResultTable handles SearchResult objects
|
||||
|
||||
# For AllDebrid folder rows, allow @N to open and show files.
|
||||
try:
|
||||
if (
|
||||
provider_lower == "alldebrid"
|
||||
and getattr(search_result, "media_kind", "") == "folder"
|
||||
):
|
||||
if (provider_lower == "alldebrid" and getattr(search_result,
|
||||
"media_kind",
|
||||
"") == "folder"):
|
||||
magnet_id = None
|
||||
meta = getattr(search_result, "full_metadata", None)
|
||||
if isinstance(meta, dict):
|
||||
magnet_id = meta.get("magnet_id")
|
||||
if magnet_id is not None:
|
||||
table.set_row_selection_args(
|
||||
row_index, ["-open", str(magnet_id), "-query", "*"]
|
||||
row_index,
|
||||
["-open",
|
||||
str(magnet_id),
|
||||
"-query",
|
||||
"*"]
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -34,8 +34,9 @@ from . import _shared as sh
|
||||
)
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
STORAGE_ORIGINS = {"local", "hydrus", "folder"}
|
||||
STORAGE_ORIGINS = {"local",
|
||||
"hydrus",
|
||||
"folder"}
|
||||
|
||||
|
||||
class Search_Store(Cmdlet):
|
||||
@@ -47,7 +48,11 @@ class Search_Store(Cmdlet):
|
||||
summary="Search storage backends (Folder, Hydrus) for files.",
|
||||
usage="search-store [-query <query>] [-store BACKEND] [-limit N]",
|
||||
arg=[
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
||||
CmdletArg(
|
||||
"limit",
|
||||
type="integer",
|
||||
description="Limit results (default: 100)"
|
||||
),
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
],
|
||||
@@ -95,7 +100,8 @@ class Search_Store(Cmdlet):
|
||||
# Ensure we have title field
|
||||
if "title" not in payload:
|
||||
payload["title"] = (
|
||||
payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
payload.get("name") or payload.get("target") or payload.get("path")
|
||||
or "Result"
|
||||
)
|
||||
|
||||
# Ensure we have ext field
|
||||
@@ -123,9 +129,13 @@ class Search_Store(Cmdlet):
|
||||
|
||||
args_list = [str(arg) for arg in (args or [])]
|
||||
|
||||
refresh_mode = any(str(a).strip().lower() in {"--refresh", "-refresh"} for a in args_list)
|
||||
refresh_mode = any(
|
||||
str(a).strip().lower() in {"--refresh",
|
||||
"-refresh"} for a in args_list
|
||||
)
|
||||
|
||||
def _format_command_title(command: str, raw_args: List[str]) -> str:
|
||||
|
||||
def _quote(value: str) -> str:
|
||||
text = str(value)
|
||||
if not text:
|
||||
@@ -136,8 +146,7 @@ class Search_Store(Cmdlet):
|
||||
return '"' + text.replace('"', '\\"') + '"'
|
||||
|
||||
cleaned = [
|
||||
str(a)
|
||||
for a in (raw_args or [])
|
||||
str(a) for a in (raw_args or [])
|
||||
if str(a).strip().lower() not in {"--refresh", "-refresh"}
|
||||
]
|
||||
if not cleaned:
|
||||
@@ -147,21 +156,32 @@ class Search_Store(Cmdlet):
|
||||
raw_title = None
|
||||
try:
|
||||
raw_title = (
|
||||
ctx.get_current_stage_text("") if hasattr(ctx, "get_current_stage_text") else None
|
||||
ctx.get_current_stage_text("")
|
||||
if hasattr(ctx,
|
||||
"get_current_stage_text") else None
|
||||
)
|
||||
except Exception:
|
||||
raw_title = None
|
||||
|
||||
command_title = (str(raw_title).strip() if raw_title else "") or _format_command_title(
|
||||
"search-store", list(args_list)
|
||||
)
|
||||
command_title = (str(raw_title).strip() if raw_title else
|
||||
"") or _format_command_title("search-store",
|
||||
list(args_list))
|
||||
|
||||
# Build dynamic flag variants from cmdlet arg definitions.
|
||||
# This avoids hardcoding flag spellings in parsing loops.
|
||||
flag_registry = self.build_flag_registry()
|
||||
query_flags = {f.lower() for f in (flag_registry.get("query") or {"-query", "--query"})}
|
||||
store_flags = {f.lower() for f in (flag_registry.get("store") or {"-store", "--store"})}
|
||||
limit_flags = {f.lower() for f in (flag_registry.get("limit") or {"-limit", "--limit"})}
|
||||
query_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("query") or {"-query", "--query"})
|
||||
}
|
||||
store_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("store") or {"-store", "--store"})
|
||||
}
|
||||
limit_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("limit") or {"-limit", "--limit"})
|
||||
}
|
||||
|
||||
# Parse arguments
|
||||
query = ""
|
||||
@@ -212,7 +232,7 @@ class Search_Store(Cmdlet):
|
||||
return 1
|
||||
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
import uuid
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
@@ -306,7 +326,8 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
path_str = None
|
||||
|
||||
meta_obj: Dict[str, Any] = {}
|
||||
meta_obj: Dict[str,
|
||||
Any] = {}
|
||||
try:
|
||||
meta_obj = resolved_backend.get_metadata(h) or {}
|
||||
except Exception:
|
||||
@@ -321,8 +342,7 @@ class Search_Store(Cmdlet):
|
||||
maybe_tags = tag_result
|
||||
if isinstance(maybe_tags, list):
|
||||
tags_list = [
|
||||
str(t).strip()
|
||||
for t in maybe_tags
|
||||
str(t).strip() for t in maybe_tags
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
except Exception:
|
||||
@@ -336,7 +356,9 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
title_from_tag = None
|
||||
|
||||
title = title_from_tag or meta_obj.get("title") or meta_obj.get("name")
|
||||
title = title_from_tag or meta_obj.get("title") or meta_obj.get(
|
||||
"name"
|
||||
)
|
||||
if not title and path_str:
|
||||
try:
|
||||
title = Path(path_str).stem
|
||||
@@ -365,15 +387,16 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
size_bytes_int = None
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"title": str(title or h),
|
||||
"hash": h,
|
||||
"store": resolved_backend_name,
|
||||
"path": path_str,
|
||||
"ext": self._normalize_extension(ext_val),
|
||||
"size_bytes": size_bytes_int,
|
||||
"tag": tags_list,
|
||||
}
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"title": str(title or h),
|
||||
"hash": h,
|
||||
"store": resolved_backend_name,
|
||||
"path": path_str,
|
||||
"ext": self._normalize_extension(ext_val),
|
||||
"size_bytes": size_bytes_int,
|
||||
"tag": tags_list,
|
||||
}
|
||||
|
||||
table.add_result(payload)
|
||||
results_list.append(payload)
|
||||
@@ -383,10 +406,17 @@ class Search_Store(Cmdlet):
|
||||
table.title = command_title
|
||||
|
||||
if refresh_mode:
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
ctx.set_last_result_table_preserve_history(
|
||||
table,
|
||||
results_list
|
||||
)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.append_worker_stdout(
|
||||
worker_id,
|
||||
json.dumps(results_list,
|
||||
indent=2)
|
||||
)
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
return 0
|
||||
|
||||
@@ -413,7 +443,9 @@ class Search_Store(Cmdlet):
|
||||
return 1
|
||||
debug(f"[search-store] Searching '{backend_to_search}'")
|
||||
results = target_backend.search(query, limit=limit)
|
||||
debug(f"[search-store] '{backend_to_search}' -> {len(results or [])} result(s)")
|
||||
debug(
|
||||
f"[search-store] '{backend_to_search}' -> {len(results or [])} result(s)"
|
||||
)
|
||||
else:
|
||||
all_results = []
|
||||
for backend_name in storage.list_searchable_backends():
|
||||
@@ -422,7 +454,10 @@ class Search_Store(Cmdlet):
|
||||
searched_backends.append(backend_name)
|
||||
|
||||
debug(f"[search-store] Searching '{backend_name}'")
|
||||
backend_results = backend.search(query, limit=limit - len(all_results))
|
||||
backend_results = backend.search(
|
||||
query,
|
||||
limit=limit - len(all_results)
|
||||
)
|
||||
debug(
|
||||
f"[search-store] '{backend_name}' -> {len(backend_results or [])} result(s)"
|
||||
)
|
||||
@@ -431,7 +466,10 @@ class Search_Store(Cmdlet):
|
||||
if len(all_results) >= limit:
|
||||
break
|
||||
except Exception as exc:
|
||||
log(f"Backend {backend_name} search failed: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"Backend {backend_name} search failed: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
results = all_results[:limit]
|
||||
|
||||
if results:
|
||||
@@ -440,9 +478,13 @@ class Search_Store(Cmdlet):
|
||||
def _as_dict(obj: Any) -> Dict[str, Any]:
|
||||
if isinstance(obj, dict):
|
||||
return dict(obj)
|
||||
if hasattr(obj, "to_dict") and callable(getattr(obj, "to_dict")):
|
||||
if hasattr(obj,
|
||||
"to_dict") and callable(getattr(obj,
|
||||
"to_dict")):
|
||||
return obj.to_dict() # type: ignore[arg-type]
|
||||
return {"title": str(obj)}
|
||||
return {
|
||||
"title": str(obj)
|
||||
}
|
||||
|
||||
item_dict = _as_dict(item)
|
||||
if store_filter:
|
||||
@@ -470,7 +512,11 @@ class Search_Store(Cmdlet):
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.append_worker_stdout(
|
||||
worker_id,
|
||||
json.dumps(results_list,
|
||||
indent=2)
|
||||
)
|
||||
else:
|
||||
log("No results found", file=sys.stderr)
|
||||
if refresh_mode:
|
||||
|
||||
@@ -28,23 +28,32 @@ import pipeline as ctx
|
||||
CMDLET = Cmdlet(
|
||||
name="trim-file",
|
||||
summary="Trim a media file using ffmpeg.",
|
||||
usage="trim-file [-path <path>] [-input <path-or-url>] -range <start-end> [-outdir <dir>] [-delete]",
|
||||
usage=
|
||||
"trim-file [-path <path>] [-input <path-or-url>] -range <start-end> [-outdir <dir>] [-delete]",
|
||||
arg=[
|
||||
CmdletArg("-path", description="Path to the file (optional if piped)."),
|
||||
CmdletArg("-path",
|
||||
description="Path to the file (optional if piped)."),
|
||||
CmdletArg(
|
||||
"-input",
|
||||
description="Override input media source (path or URL). Useful when piping store metadata but trimming from an mpv stream URL.",
|
||||
description=
|
||||
"Override input media source (path or URL). Useful when piping store metadata but trimming from an mpv stream URL.",
|
||||
),
|
||||
CmdletArg(
|
||||
"-range",
|
||||
required=True,
|
||||
description="Time range to trim (e.g. '3:45-3:55', '00:03:45-00:03:55', or '1h3m-1h10m30s').",
|
||||
description=
|
||||
"Time range to trim (e.g. '3:45-3:55', '00:03:45-00:03:55', or '1h3m-1h10m30s').",
|
||||
),
|
||||
CmdletArg(
|
||||
"-outdir",
|
||||
description="Output directory for the clip (defaults to source folder for local files; otherwise uses config temp/videos).",
|
||||
description=
|
||||
"Output directory for the clip (defaults to source folder for local files; otherwise uses config temp/videos).",
|
||||
),
|
||||
CmdletArg(
|
||||
"-delete",
|
||||
type="flag",
|
||||
description="Delete the original file after trimming."
|
||||
),
|
||||
CmdletArg("-delete", type="flag", description="Delete the original file after trimming."),
|
||||
],
|
||||
detail=[
|
||||
"Creates a new file with 'clip_' prefix in the filename.",
|
||||
@@ -153,7 +162,12 @@ def _extract_store_name(item: Any) -> Optional[str]:
|
||||
|
||||
|
||||
def _persist_alt_relationship(
|
||||
*, config: Dict[str, Any], store_name: str, alt_hash: str, king_hash: str
|
||||
*,
|
||||
config: Dict[str,
|
||||
Any],
|
||||
store_name: str,
|
||||
alt_hash: str,
|
||||
king_hash: str
|
||||
) -> None:
|
||||
"""Persist directional alt -> king relationship in the given backend."""
|
||||
try:
|
||||
@@ -169,17 +183,21 @@ def _persist_alt_relationship(
|
||||
|
||||
# Folder-backed local DB
|
||||
try:
|
||||
if (
|
||||
type(backend).__name__ == "Folder"
|
||||
and hasattr(backend, "location")
|
||||
and callable(getattr(backend, "location"))
|
||||
):
|
||||
if (type(backend).__name__ == "Folder" and hasattr(backend,
|
||||
"location")
|
||||
and callable(getattr(backend,
|
||||
"location"))):
|
||||
from API.folder import API_folder_store
|
||||
from pathlib import Path
|
||||
|
||||
root = Path(str(backend.location())).expanduser()
|
||||
with API_folder_store(root) as db:
|
||||
db.set_relationship_by_hash(alt_norm, king_norm, "alt", bidirectional=False)
|
||||
db.set_relationship_by_hash(
|
||||
alt_norm,
|
||||
king_norm,
|
||||
"alt",
|
||||
bidirectional=False
|
||||
)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
@@ -194,7 +212,10 @@ def _persist_alt_relationship(
|
||||
|
||||
|
||||
def _trim_media(
|
||||
input_source: str, output_path: Path, start_seconds: float, duration_seconds: float
|
||||
input_source: str,
|
||||
output_path: Path,
|
||||
start_seconds: float,
|
||||
duration_seconds: float
|
||||
) -> bool:
|
||||
"""Trim media using ffmpeg.
|
||||
|
||||
@@ -276,7 +297,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# If path arg provided, add it to inputs
|
||||
if path_arg:
|
||||
inputs.append({"path": path_arg})
|
||||
inputs.append({
|
||||
"path": path_arg
|
||||
})
|
||||
|
||||
if not inputs:
|
||||
log("No input files provided.", file=sys.stderr)
|
||||
@@ -317,13 +340,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if outdir_arg:
|
||||
output_dir = Path(str(outdir_arg)).expanduser()
|
||||
elif store_name:
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
|
||||
output_dir = resolve_output_dir(config or {})
|
||||
elif path_obj is not None:
|
||||
output_dir = path_obj.parent
|
||||
else:
|
||||
from config import resolve_output_dir
|
||||
from SYS.config import resolve_output_dir
|
||||
|
||||
output_dir = resolve_output_dir(config or {})
|
||||
|
||||
@@ -450,7 +473,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Best-effort resolve stored path for folder backends.
|
||||
try:
|
||||
if type(backend).__name__ == "Folder" and hasattr(backend, "get_file"):
|
||||
if type(backend).__name__ == "Folder" and hasattr(
|
||||
backend,
|
||||
"get_file"):
|
||||
p = backend.get_file(str(stored_hash))
|
||||
if isinstance(p, Path):
|
||||
stored_path = str(p)
|
||||
@@ -459,7 +484,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
stored_path = None
|
||||
except Exception as exc:
|
||||
log(f"Failed to add clip to store '{store_name}': {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"Failed to add clip to store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# If we stored it, persist relationship alt -> king in that store.
|
||||
if stored_store and stored_hash and source_hash:
|
||||
|
||||
Reference in New Issue
Block a user