lkjlkj
This commit is contained in:
@@ -14,7 +14,7 @@ def register(names: Iterable[str]):
|
||||
"""Decorator to register a function under one or more command names.
|
||||
|
||||
Usage:
|
||||
@register(["add-tag", "add-tags"])
|
||||
@register(["add-tags"])
|
||||
def _run(result, args, config) -> int: ...
|
||||
"""
|
||||
def _wrap(fn: Cmdlet) -> Cmdlet:
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
"""Shared utilities for cmdlets and funacts.
|
||||
|
||||
This module provides common utility functions for working with hashes, tags,
|
||||
relationship data, and other frequently-needed operations.
|
||||
"""
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -192,7 +189,7 @@ class SharedArgs:
|
||||
DELETE_FLAG = CmdletArg(
|
||||
"delete",
|
||||
type="flag",
|
||||
description="Delete the file and its .tags after successful operation."
|
||||
description="Delete the file and its .tag after successful operation."
|
||||
)
|
||||
|
||||
# Metadata arguments
|
||||
@@ -1092,7 +1089,7 @@ def create_pipe_object_result(
|
||||
hash_value: Optional[str] = None,
|
||||
is_temp: bool = False,
|
||||
parent_hash: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
tag: Optional[List[str]] = None,
|
||||
**extra: Any
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a PipeObject-compatible result dict for pipeline chaining.
|
||||
@@ -1109,7 +1106,7 @@ def create_pipe_object_result(
|
||||
hash_value: SHA-256 hash of file (for integrity)
|
||||
is_temp: If True, this is a temporary/intermediate artifact
|
||||
parent_hash: Hash of the parent file in the chain (for provenance)
|
||||
tags: List of tags to apply
|
||||
tag: List of tag values to apply
|
||||
**extra: Additional fields
|
||||
|
||||
Returns:
|
||||
@@ -1130,8 +1127,8 @@ def create_pipe_object_result(
|
||||
result['is_temp'] = True
|
||||
if parent_hash:
|
||||
result['parent_hash'] = parent_hash
|
||||
if tags:
|
||||
result['tags'] = tags
|
||||
if tag:
|
||||
result['tag'] = tag
|
||||
|
||||
# Canonical store field: use source for compatibility
|
||||
try:
|
||||
@@ -1350,33 +1347,46 @@ def collapse_namespace_tags(tags: Optional[Iterable[Any]], namespace: str, prefe
|
||||
return result
|
||||
|
||||
|
||||
def extract_tags_from_result(result: Any) -> list[str]:
|
||||
tags: list[str] = []
|
||||
if isinstance(result, models.PipeObject):
|
||||
tags.extend(result.tags or [])
|
||||
tags.extend(result.extra.get('tags', []))
|
||||
elif hasattr(result, 'tags'):
|
||||
# Handle objects with tags attribute (e.g. SearchResult)
|
||||
val = getattr(result, 'tags')
|
||||
if isinstance(val, (list, set, tuple)):
|
||||
tags.extend(val)
|
||||
elif isinstance(val, str):
|
||||
tags.append(val)
|
||||
|
||||
if isinstance(result, dict):
|
||||
raw_tags = result.get('tags')
|
||||
if isinstance(raw_tags, list):
|
||||
tags.extend(raw_tags)
|
||||
elif isinstance(raw_tags, str):
|
||||
tags.append(raw_tags)
|
||||
extra = result.get('extra')
|
||||
if isinstance(extra, dict):
|
||||
extra_tags = extra.get('tags')
|
||||
if isinstance(extra_tags, list):
|
||||
tags.extend(extra_tags)
|
||||
elif isinstance(extra_tags, str):
|
||||
tags.append(extra_tags)
|
||||
return merge_sequences(tags, case_sensitive=True)
|
||||
def collapse_namespace_tag(tags: Optional[Iterable[Any]], namespace: str, prefer: str = "last") -> list[str]:
|
||||
"""Singular alias for collapse_namespace_tags.
|
||||
|
||||
Some cmdlets prefer the singular name; keep behavior centralized.
|
||||
"""
|
||||
return collapse_namespace_tags(tags, namespace, prefer=prefer)
|
||||
|
||||
|
||||
def extract_tag_from_result(result: Any) -> list[str]:
|
||||
tag: list[str] = []
|
||||
if isinstance(result, models.PipeObject):
|
||||
tag.extend(result.tag or [])
|
||||
if isinstance(result.extra, dict):
|
||||
extra_tag = result.extra.get('tag')
|
||||
if isinstance(extra_tag, list):
|
||||
tag.extend(extra_tag)
|
||||
elif isinstance(extra_tag, str):
|
||||
tag.append(extra_tag)
|
||||
elif hasattr(result, 'tag'):
|
||||
# Handle objects with tag attribute (e.g. SearchResult)
|
||||
val = getattr(result, 'tag')
|
||||
if isinstance(val, (list, set, tuple)):
|
||||
tag.extend(val)
|
||||
elif isinstance(val, str):
|
||||
tag.append(val)
|
||||
|
||||
if isinstance(result, dict):
|
||||
raw_tag = result.get('tag')
|
||||
if isinstance(raw_tag, list):
|
||||
tag.extend(raw_tag)
|
||||
elif isinstance(raw_tag, str):
|
||||
tag.append(raw_tag)
|
||||
extra = result.get('extra')
|
||||
if isinstance(extra, dict):
|
||||
extra_tag = extra.get('tag')
|
||||
if isinstance(extra_tag, list):
|
||||
tag.extend(extra_tag)
|
||||
elif isinstance(extra_tag, str):
|
||||
tag.append(extra_tag)
|
||||
return merge_sequences(tag, case_sensitive=True)
|
||||
|
||||
|
||||
def extract_title_from_result(result: Any) -> Optional[str]:
|
||||
@@ -1469,7 +1479,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
debug(f" target={getattr(value, 'target', None)}")
|
||||
debug(f" hash={getattr(value, 'hash', None)}")
|
||||
debug(f" media_kind={getattr(value, 'media_kind', None)}")
|
||||
debug(f" tags={getattr(value, 'tags', None)}")
|
||||
debug(f" tag={getattr(value, 'tag', None)}")
|
||||
debug(f" tag_summary={getattr(value, 'tag_summary', None)}")
|
||||
debug(f" size_bytes={getattr(value, 'size_bytes', None)}")
|
||||
debug(f" duration_seconds={getattr(value, 'duration_seconds', None)}")
|
||||
@@ -1483,7 +1493,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
return value
|
||||
|
||||
known_keys = {
|
||||
"hash", "store", "tags", "title", "url", "source_url", "duration", "metadata",
|
||||
"hash", "store", "tag", "title", "url", "source_url", "duration", "metadata",
|
||||
"warnings", "path", "relationships", "is_temp", "action", "parent_hash",
|
||||
}
|
||||
|
||||
@@ -1542,18 +1552,14 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
# Extract relationships
|
||||
rels = value.get("relationships") or {}
|
||||
|
||||
# Consolidate tags: prefer tags_set over tags, tag_summary
|
||||
tags_val = []
|
||||
if "tags_set" in value and value["tags_set"]:
|
||||
tags_val = list(value["tags_set"])
|
||||
elif "tags" in value and isinstance(value["tags"], (list, set)):
|
||||
tags_val = list(value["tags"])
|
||||
elif "tag" in value:
|
||||
# Single tag string or list
|
||||
if isinstance(value["tag"], list):
|
||||
tags_val = value["tag"] # Already a list
|
||||
else:
|
||||
tags_val = [value["tag"]] # Wrap single string in list
|
||||
# Canonical tag: accept list or single string
|
||||
tag_val: list[str] = []
|
||||
if "tag" in value:
|
||||
raw_tag = value["tag"]
|
||||
if isinstance(raw_tag, list):
|
||||
tag_val = [str(t) for t in raw_tag if t is not None]
|
||||
elif isinstance(raw_tag, str):
|
||||
tag_val = [raw_tag]
|
||||
|
||||
# Consolidate path: prefer explicit path key, but NOT target if it's a URL
|
||||
path_val = value.get("path")
|
||||
@@ -1580,7 +1586,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
pipe_obj = models.PipeObject(
|
||||
hash=hash_val,
|
||||
store=store_val,
|
||||
tags=tags_val,
|
||||
tag=tag_val,
|
||||
title=title_val,
|
||||
url=url_val,
|
||||
source_url=value.get("source_url"),
|
||||
@@ -1624,7 +1630,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
store=store_val,
|
||||
path=str(path_val) if path_val and path_val != "unknown" else None,
|
||||
title=title_val,
|
||||
tags=[],
|
||||
tag=[],
|
||||
extra={},
|
||||
)
|
||||
|
||||
|
||||
@@ -12,10 +12,10 @@ from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from ._shared import (
|
||||
Cmdlet, CmdletArg, parse_cmdlet_args, SharedArgs,
|
||||
extract_tags_from_result, extract_title_from_result, extract_url_from_result,
|
||||
extract_tag_from_result, extract_title_from_result, extract_url_from_result,
|
||||
merge_sequences, extract_relationships, extract_duration, coerce_to_pipe_object
|
||||
)
|
||||
from ._shared import collapse_namespace_tags
|
||||
from ._shared import collapse_namespace_tag
|
||||
from API.folder import read_sidecar, find_sidecar, write_sidecar, API_folder_store
|
||||
from SYS.utils import sha256_file, unique_path
|
||||
from metadata import write_metadata
|
||||
@@ -419,14 +419,14 @@ class Add_File(Cmdlet):
|
||||
hash_value: str,
|
||||
store: str,
|
||||
path: Optional[str],
|
||||
tags: List[str],
|
||||
tag: List[str],
|
||||
title: Optional[str],
|
||||
extra_updates: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
pipe_obj.hash = hash_value
|
||||
pipe_obj.store = store
|
||||
pipe_obj.path = path
|
||||
pipe_obj.tags = tags
|
||||
pipe_obj.tag = tag
|
||||
if title:
|
||||
pipe_obj.title = title
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
@@ -452,10 +452,10 @@ class Add_File(Cmdlet):
|
||||
Prepare tags, url, and title for the file.
|
||||
Returns (tags, url, preferred_title, file_hash)
|
||||
"""
|
||||
tags_from_result = list(pipe_obj.tags or [])
|
||||
tags_from_result = list(pipe_obj.tag or [])
|
||||
if not tags_from_result:
|
||||
try:
|
||||
tags_from_result = list(extract_tags_from_result(result) or [])
|
||||
tags_from_result = list(extract_tag_from_result(result) or [])
|
||||
except Exception:
|
||||
tags_from_result = []
|
||||
|
||||
@@ -488,7 +488,7 @@ class Add_File(Cmdlet):
|
||||
return tag
|
||||
|
||||
tags_from_result_no_title = [t for t in tags_from_result if not str(t).strip().lower().startswith("title:")]
|
||||
sidecar_tags = collapse_namespace_tags([normalize_title_tag(t) for t in sidecar_tags], "title", prefer="last")
|
||||
sidecar_tags = collapse_namespace_tag([normalize_title_tag(t) for t in sidecar_tags], "title", prefer="last")
|
||||
sidecar_tags_filtered = [t for t in sidecar_tags if not str(t).strip().lower().startswith("title:")]
|
||||
|
||||
merged_tags = merge_sequences(tags_from_result_no_title, sidecar_tags_filtered, case_sensitive=True)
|
||||
@@ -501,7 +501,7 @@ class Add_File(Cmdlet):
|
||||
file_hash = Add_File._resolve_file_hash(result, media_path, pipe_obj, sidecar_hash)
|
||||
|
||||
# Persist back to PipeObject
|
||||
pipe_obj.tags = merged_tags
|
||||
pipe_obj.tag = merged_tags
|
||||
if preferred_title and not pipe_obj.title:
|
||||
pipe_obj.title = preferred_title
|
||||
if file_hash and not pipe_obj.hash:
|
||||
@@ -591,7 +591,7 @@ class Add_File(Cmdlet):
|
||||
hash_value=f_hash or "unknown",
|
||||
store="local",
|
||||
path=str(target_path),
|
||||
tags=tags,
|
||||
tag=tags,
|
||||
title=chosen_title,
|
||||
extra_updates=extra_updates,
|
||||
)
|
||||
@@ -729,7 +729,7 @@ class Add_File(Cmdlet):
|
||||
hash_value=f_hash or "unknown",
|
||||
store=provider_name or "provider",
|
||||
path=file_path,
|
||||
tags=pipe_obj.tags,
|
||||
tag=pipe_obj.tag,
|
||||
title=pipe_obj.title or (media_path.name if media_path else None),
|
||||
extra_updates=extra_updates,
|
||||
)
|
||||
@@ -782,7 +782,7 @@ class Add_File(Cmdlet):
|
||||
hash_value=file_identifier if len(file_identifier) == 64 else f_hash or "unknown",
|
||||
store=backend_name,
|
||||
path=stored_path,
|
||||
tags=tags,
|
||||
tag=tags,
|
||||
title=title or pipe_obj.title or media_path.name,
|
||||
extra_updates={
|
||||
"url": url,
|
||||
@@ -907,8 +907,6 @@ class Add_File(Cmdlet):
|
||||
possible_sidecars = [
|
||||
source_path.with_suffix(source_path.suffix + ".json"),
|
||||
source_path.with_name(source_path.name + ".tag"),
|
||||
source_path.with_name(source_path.name + ".tags"),
|
||||
source_path.with_name(source_path.name + ".tags.txt"),
|
||||
source_path.with_name(source_path.name + ".metadata"),
|
||||
source_path.with_name(source_path.name + ".notes"),
|
||||
]
|
||||
@@ -944,8 +942,6 @@ class Add_File(Cmdlet):
|
||||
media_path.parent / (media_path.name + '.metadata'),
|
||||
media_path.parent / (media_path.name + '.notes'),
|
||||
media_path.parent / (media_path.name + '.tag'),
|
||||
media_path.parent / (media_path.name + '.tags'),
|
||||
media_path.parent / (media_path.name + '.tags.txt'),
|
||||
]
|
||||
for target in targets:
|
||||
try:
|
||||
|
||||
@@ -9,10 +9,172 @@ from SYS.logger import log
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from ._shared import normalize_result_input, filter_results_by_temp
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from API.folder import write_sidecar, API_folder_store
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args, collapse_namespace_tags, should_show_help, get_field
|
||||
from config import get_local_storage_path
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
normalize_hash,
|
||||
parse_tag_arguments,
|
||||
expand_tag_groups,
|
||||
parse_cmdlet_args,
|
||||
collapse_namespace_tag,
|
||||
should_show_help,
|
||||
get_field,
|
||||
)
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
"""Return the value of the first title: tag if present."""
|
||||
for t in tags:
|
||||
if t.lower().startswith("title:"):
|
||||
value = t.split(":", 1)[1].strip()
|
||||
return value or None
|
||||
return None
|
||||
|
||||
|
||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
"""Update result object/dict title fields and columns in-place."""
|
||||
if not title_value:
|
||||
return
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.title = title_value
|
||||
# Update columns if present (Title column assumed index 0)
|
||||
columns = getattr(res, "columns", None)
|
||||
if isinstance(columns, list) and columns:
|
||||
label, *_ = columns[0]
|
||||
if str(label).lower() == "title":
|
||||
columns[0] = (label, title_value)
|
||||
elif isinstance(res, dict):
|
||||
res["title"] = title_value
|
||||
cols = res.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, _val = col
|
||||
if str(label).lower() == "title":
|
||||
updated.append((label, title_value))
|
||||
changed = True
|
||||
else:
|
||||
updated.append(col)
|
||||
else:
|
||||
updated.append(col)
|
||||
if changed:
|
||||
res["columns"] = updated
|
||||
|
||||
|
||||
def _matches_target(item: Any, target_hash: Optional[str], target_path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target (canonical fields only)."""
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
target_hash_l = target_hash.lower() if target_hash else None
|
||||
target_path_l = target_path.lower() if target_path else None
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [norm(item.get("hash"))]
|
||||
paths = [norm(item.get("path"))]
|
||||
else:
|
||||
hashes = [norm(get_field(item, "hash"))]
|
||||
paths = [norm(get_field(item, "path"))]
|
||||
|
||||
if target_hash_l and target_hash_l in hashes:
|
||||
return True
|
||||
if target_path_l and target_path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _update_item_title_fields(item: Any, new_title: str) -> None:
|
||||
"""Mutate an item to reflect a new title in plain fields and columns."""
|
||||
if isinstance(item, models.PipeObject):
|
||||
item.title = new_title
|
||||
columns = getattr(item, "columns", None)
|
||||
if isinstance(columns, list) and columns:
|
||||
label, *_ = columns[0]
|
||||
if str(label).lower() == "title":
|
||||
columns[0] = (label, new_title)
|
||||
elif isinstance(item, dict):
|
||||
item["title"] = new_title
|
||||
cols = item.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated_cols = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, _val = col
|
||||
if str(label).lower() == "title":
|
||||
updated_cols.append((label, new_title))
|
||||
changed = True
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
if changed:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
|
||||
def _refresh_result_table_title(new_title: str, target_hash: Optional[str], target_path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
items = ctx.get_last_result_items()
|
||||
if not last_table or not items:
|
||||
return
|
||||
|
||||
updated_items = []
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if _matches_target(item, target_hash, target_path):
|
||||
_update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
pass
|
||||
updated_items.append(item)
|
||||
if not match_found:
|
||||
return
|
||||
|
||||
new_table = last_table.copy_with_title(getattr(last_table, "title", ""))
|
||||
|
||||
for item in updated_items:
|
||||
new_table.add_result(item)
|
||||
|
||||
# Keep the underlying history intact; update only the overlay so @.. can
|
||||
# clear the overlay then continue back to prior tables (e.g., the search list).
|
||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _refresh_tag_view(res: Any, target_hash: Optional[str], store_name: Optional[str], target_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
if not target_hash or not store_name:
|
||||
return
|
||||
|
||||
refresh_args: List[str] = ["-hash", target_hash, "-store", store_name]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and _matches_target(subject, target_hash, target_path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -22,23 +184,23 @@ class Add_Tag(Cmdlet):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="add-tag",
|
||||
summary="Add a tag to a Hydrus file or write it to a local .tags sidecar.",
|
||||
usage="add-tag [-hash <sha256>] [-store <backend>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
||||
summary="Add tag to a file in a store.",
|
||||
usage="add-tag -store <store> [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
SharedArgs.STORE,
|
||||
CmdletArg("-duplicate", type="string", description="Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)"),
|
||||
CmdletArg("-list", type="string", description="Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult)."),
|
||||
CmdletArg("--all", type="flag", description="Include temporary files in tagging (by default, only tags non-temporary files)."),
|
||||
CmdletArg("tags", type="string", required=False, description="One or more tags to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tags from pipeline payload.", variadic=True),
|
||||
CmdletArg("--all", type="flag", description="Include temporary files in tagging (by default, only tag non-temporary files)."),
|
||||
CmdletArg("tag", type="string", required=False, description="One or more tag to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tag from pipeline payload.", variadic=True),
|
||||
],
|
||||
detail=[
|
||||
"- By default, only tags non-temporary files (from pipelines). Use --all to tag everything.",
|
||||
"- Without -hash and when the selection is a local file, tags are written to <file>.tags.",
|
||||
"- With a Hydrus hash, tags are sent to the 'my tags' service.",
|
||||
"- Multiple tags can be comma-separated or space-separated.",
|
||||
"- By default, only tag non-temporary files (from pipelines). Use --all to tag everything.",
|
||||
"- Requires a store backend: use -store or pipe items that include store.",
|
||||
"- If -hash is not provided, uses the piped item's hash (or derives from its path when possible).",
|
||||
"- Multiple tag can be comma-separated or space-separated.",
|
||||
"- Use -list to include predefined tag lists from adjective.json: -list philosophy,occult",
|
||||
"- Tags can also reference lists with curly braces: add-tag {philosophy} \"other:tag\"",
|
||||
"- tag can also reference lists with curly braces: add-tag {philosophy} \"other:tag\"",
|
||||
"- Use -duplicate to copy EXISTING tag values to new namespaces:",
|
||||
" Explicit format: -duplicate title:album,artist (copies title: to album: and artist:)",
|
||||
" Inferred format: -duplicate title,album,artist (first is source, rest are targets)",
|
||||
@@ -50,180 +212,20 @@ class Add_Tag(Cmdlet):
|
||||
)
|
||||
self.register()
|
||||
|
||||
@staticmethod
|
||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
"""Return the value of the first title: tag if present."""
|
||||
for tag in tags:
|
||||
if isinstance(tag, str) and tag.lower().startswith("title:"):
|
||||
value = tag.split(":", 1)[1].strip()
|
||||
if value:
|
||||
return value
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
"""Update result object/dict title fields and columns in-place."""
|
||||
if not title_value:
|
||||
return
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.title = title_value
|
||||
if hasattr(res, "columns") and isinstance(res.columns, list) and res.columns:
|
||||
label, *_ = res.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
res.columns[0] = (res.columns[0][0], title_value)
|
||||
elif isinstance(res, dict):
|
||||
res["title"] = title_value
|
||||
cols = res.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated.append((label, title_value))
|
||||
changed = True
|
||||
else:
|
||||
updated.append(col)
|
||||
else:
|
||||
updated.append(col)
|
||||
if changed:
|
||||
res["columns"] = updated
|
||||
|
||||
@staticmethod
|
||||
def _matches_target(item: Any, file_hash: Optional[str], path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target."""
|
||||
file_hash_l = file_hash.lower() if file_hash else None
|
||||
path_l = path.lower() if path else None
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
hash_fields = ["hash"]
|
||||
path_fields = ["path", "target"]
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [norm(item.get(field)) for field in hash_fields]
|
||||
paths = [norm(item.get(field)) for field in path_fields]
|
||||
else:
|
||||
hashes = [norm(get_field(item, field)) for field in hash_fields]
|
||||
paths = [norm(get_field(item, field)) for field in path_fields]
|
||||
|
||||
if file_hash_l and file_hash_l in hashes:
|
||||
return True
|
||||
if path_l and path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _update_item_title_fields(item: Any, new_title: str) -> None:
|
||||
"""Mutate an item to reflect a new title in plain fields and columns."""
|
||||
if isinstance(item, models.PipeObject):
|
||||
item.title = new_title
|
||||
if hasattr(item, "columns") and isinstance(item.columns, list) and item.columns:
|
||||
label, *_ = item.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
item.columns[0] = (label, new_title)
|
||||
elif isinstance(item, dict):
|
||||
item["title"] = new_title
|
||||
cols = item.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated_cols = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated_cols.append((label, new_title))
|
||||
changed = True
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
if changed:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
def _refresh_result_table_title(self, new_title: str, file_hash: Optional[str], path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
items = ctx.get_last_result_items()
|
||||
if not last_table or not items:
|
||||
return
|
||||
|
||||
updated_items = []
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if self._matches_target(item, file_hash, path):
|
||||
self._update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
pass
|
||||
updated_items.append(item)
|
||||
if not match_found:
|
||||
return
|
||||
|
||||
from result_table import ResultTable # Local import to avoid circular dependency
|
||||
|
||||
new_table = last_table.copy_with_title(getattr(last_table, "title", ""))
|
||||
|
||||
for item in updated_items:
|
||||
new_table.add_result(item)
|
||||
|
||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _refresh_tags_view(self, res: Any, file_hash: Optional[str], path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
target_hash = file_hash
|
||||
refresh_args: List[str] = []
|
||||
if target_hash:
|
||||
refresh_args = ["-hash", target_hash]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and self._matches_target(subject, file_hash, path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if target_hash:
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Add a tag to a file with smart filtering for pipeline results."""
|
||||
"""Add tag to a file with smart filtering for pipeline results."""
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
# Check for --all flag
|
||||
include_temp = parsed.get("all", False)
|
||||
|
||||
# Get explicit -hash and -store overrides from CLI
|
||||
hash_override = normalize_hash(parsed.get("hash"))
|
||||
store_override = parsed.get("store")
|
||||
|
||||
# Normalize input to list
|
||||
results = normalize_result_input(result)
|
||||
|
||||
# If no piped results but we have -hash flag, create a minimal synthetic result
|
||||
if not results and hash_override:
|
||||
results = [{"hash": hash_override, "is_temp": False}]
|
||||
if store_override:
|
||||
results[0]["store"] = store_override
|
||||
|
||||
# Filter by temp status (unless --all is set)
|
||||
if not include_temp:
|
||||
@@ -233,34 +235,35 @@ class Add_Tag(Cmdlet):
|
||||
log("No valid files to tag (all results were temporary; use --all to include temporary files)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get tags from arguments (or fallback to pipeline payload)
|
||||
raw_tags = parsed.get("tags", [])
|
||||
if isinstance(raw_tags, str):
|
||||
raw_tags = [raw_tags]
|
||||
# Get tag from arguments (or fallback to pipeline payload)
|
||||
raw_tag = parsed.get("tag", [])
|
||||
if isinstance(raw_tag, str):
|
||||
raw_tag = [raw_tag]
|
||||
|
||||
# Fallback: if no tags provided explicitly, try to pull from first result payload
|
||||
if not raw_tags and results:
|
||||
# Fallback: if no tag provided explicitly, try to pull from first result payload
|
||||
if not raw_tag and results:
|
||||
first = results[0]
|
||||
payload_tags = None
|
||||
payload_tag = None
|
||||
|
||||
# Try multiple tag lookup strategies in order
|
||||
tag_lookups = [
|
||||
lambda x: x.extra.get("tags") if isinstance(x, models.PipeObject) and isinstance(x.extra, dict) else None,
|
||||
lambda x: x.get("tags") if isinstance(x, dict) else None,
|
||||
lambda x: x.get("extra", {}).get("tags") if isinstance(x, dict) and isinstance(x.get("extra"), dict) else None,
|
||||
lambda x: getattr(x, "tags", None),
|
||||
lambda x: getattr(x, "tag", None),
|
||||
lambda x: x.get("tag") if isinstance(x, dict) else None,
|
||||
]
|
||||
|
||||
for lookup in tag_lookups:
|
||||
try:
|
||||
payload_tags = lookup(first)
|
||||
if payload_tags:
|
||||
payload_tag = lookup(first)
|
||||
if payload_tag:
|
||||
break
|
||||
except (AttributeError, TypeError, KeyError):
|
||||
continue
|
||||
if payload_tags:
|
||||
if isinstance(payload_tags, str):
|
||||
raw_tags = [payload_tags]
|
||||
elif isinstance(payload_tags, list):
|
||||
raw_tags = payload_tags
|
||||
|
||||
if payload_tag:
|
||||
if isinstance(payload_tag, str):
|
||||
raw_tag = [payload_tag]
|
||||
elif isinstance(payload_tag, list):
|
||||
raw_tag = payload_tag
|
||||
|
||||
# Handle -list argument (convert to {list} syntax)
|
||||
list_arg = parsed.get("list")
|
||||
@@ -268,222 +271,184 @@ class Add_Tag(Cmdlet):
|
||||
for l in list_arg.split(','):
|
||||
l = l.strip()
|
||||
if l:
|
||||
raw_tags.append(f"{{{l}}}")
|
||||
raw_tag.append(f"{{{l}}}")
|
||||
|
||||
# Parse and expand tags
|
||||
tags_to_add = parse_tag_arguments(raw_tags)
|
||||
tags_to_add = expand_tag_groups(tags_to_add)
|
||||
# Parse and expand tag
|
||||
tag_to_add = parse_tag_arguments(raw_tag)
|
||||
tag_to_add = expand_tag_groups(tag_to_add)
|
||||
|
||||
# Allow hash override via namespaced token (e.g., "hash:abcdef...")
|
||||
extracted_hash = None
|
||||
filtered_tags: List[str] = []
|
||||
for tag in tags_to_add:
|
||||
filtered_tag: List[str] = []
|
||||
for tag in tag_to_add:
|
||||
if isinstance(tag, str) and tag.lower().startswith("hash:"):
|
||||
_, _, hash_val = tag.partition(":")
|
||||
if hash_val:
|
||||
extracted_hash = normalize_hash(hash_val.strip())
|
||||
continue
|
||||
filtered_tags.append(tag)
|
||||
tags_to_add = filtered_tags
|
||||
filtered_tag.append(tag)
|
||||
tag_to_add = filtered_tag
|
||||
|
||||
if not tags_to_add:
|
||||
log("No tags provided to add", file=sys.stderr)
|
||||
if not tag_to_add:
|
||||
log("No tag provided to add", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
def _find_library_root(path_obj: Path) -> Optional[Path]:
|
||||
candidates = []
|
||||
cfg_root = get_local_storage_path(config) if config else None
|
||||
if cfg_root:
|
||||
try:
|
||||
candidates.append(Path(cfg_root).expanduser())
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
for candidate in candidates:
|
||||
if (candidate / "medios-macina.db").exists():
|
||||
return candidate
|
||||
for parent in [path_obj] + list(path_obj.parents):
|
||||
if (parent / "medios-macina.db").exists():
|
||||
return parent
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
# Get other flags
|
||||
# Get other flags (hash override can come from -hash or hash: token)
|
||||
hash_override = normalize_hash(parsed.get("hash")) or extracted_hash
|
||||
duplicate_arg = parsed.get("duplicate")
|
||||
|
||||
if not tags_to_add and not duplicate_arg:
|
||||
# Write sidecar files with the tags that are already in the result dicts
|
||||
sidecar_count = 0
|
||||
for res in results:
|
||||
# Handle both dict and PipeObject formats
|
||||
file_path = None
|
||||
tags = []
|
||||
file_hash = ""
|
||||
# Use canonical field access with get_field for both dict and objects
|
||||
file_path = get_field(res, "path")
|
||||
# Try tags from top-level 'tags' or from 'extra.tags'
|
||||
tags = get_field(res, "tags") or (get_field(res, "extra") or {}).get("tags", [])
|
||||
file_hash = get_field(res, "hash") or ""
|
||||
if not file_path:
|
||||
log(f"[add_tag] Warning: Result has no path, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
if tags:
|
||||
# Write sidecar file for this file with its tags
|
||||
try:
|
||||
sidecar_path = write_sidecar(Path(file_path), tags, [], file_hash)
|
||||
log(f"[add_tag] Wrote {len(tags)} tag(s) to sidecar: {sidecar_path}", file=sys.stderr)
|
||||
sidecar_count += 1
|
||||
except Exception as e:
|
||||
log(f"[add_tag] Warning: Failed to write sidecar for {file_path}: {e}", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
if sidecar_count > 0:
|
||||
log(f"[add_tag] Wrote {sidecar_count} sidecar file(s) with embedded tags", file=sys.stderr)
|
||||
else:
|
||||
log(f"[add_tag] No tags to write - passed {len(results)} result(s) through unchanged", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
# Main loop: process results with tags to add
|
||||
total_new_tags = 0
|
||||
# tag ARE provided - apply them to each store-backed result
|
||||
total_added = 0
|
||||
total_modified = 0
|
||||
|
||||
store_override = parsed.get("store")
|
||||
|
||||
for res in results:
|
||||
# Extract file info from result
|
||||
file_path = None
|
||||
existing_tags = []
|
||||
file_hash = ""
|
||||
storage_source = None
|
||||
|
||||
# Use canonical getters for fields from both dicts and PipeObject
|
||||
file_path = get_field(res, "path")
|
||||
existing_tags = get_field(res, "tags") or []
|
||||
if not existing_tags:
|
||||
existing_tags = (get_field(res, "extra", {}) or {}).get("tags") or []
|
||||
file_hash = get_field(res, "hash") or ""
|
||||
store_name = store_override or get_field(res, "store")
|
||||
|
||||
original_tags_lower = {str(t).lower() for t in existing_tags if isinstance(t, str)}
|
||||
original_title = self._extract_title_tag(list(existing_tags))
|
||||
|
||||
# Apply CLI overrides if provided
|
||||
if hash_override and not file_hash:
|
||||
file_hash = hash_override
|
||||
store_name: Optional[str]
|
||||
raw_hash: Optional[str]
|
||||
raw_path: Optional[str]
|
||||
|
||||
if isinstance(res, models.PipeObject):
|
||||
store_name = store_override or res.store
|
||||
raw_hash = res.hash
|
||||
raw_path = res.path
|
||||
elif isinstance(res, dict):
|
||||
store_name = store_override or res.get("store")
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
else:
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
if not store_name:
|
||||
log("[add_tag] Missing store (use -store or pipe a result with store)", file=sys.stderr)
|
||||
log("[add_tag] Error: Missing -store and item has no store field", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
|
||||
if not resolved_hash and raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
resolved_hash = stem.lower()
|
||||
elif p.exists() and p.is_file():
|
||||
resolved_hash = sha256_file(p)
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not resolved_hash:
|
||||
log("[add_tag] Warning: Item missing usable hash (and could not derive from path); skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# Check if we have sufficient identifier (file_path OR file_hash)
|
||||
if not file_path and not file_hash:
|
||||
log(f"[add_tag] Warning: Result has neither path nor hash available, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
# Handle -duplicate logic (copy existing tags to new namespaces)
|
||||
|
||||
try:
|
||||
backend = Store(config)[str(store_name)]
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
existing_tag, _src = backend.get_tag(resolved_hash, config=config)
|
||||
except Exception:
|
||||
existing_tag = []
|
||||
|
||||
existing_tag_list = [t for t in (existing_tag or []) if isinstance(t, str)]
|
||||
existing_lower = {t.lower() for t in existing_tag_list}
|
||||
original_title = _extract_title_tag(existing_tag_list)
|
||||
|
||||
# Per-item tag list (do not mutate shared list)
|
||||
item_tag_to_add = list(tag_to_add)
|
||||
item_tag_to_add = collapse_namespace_tag(item_tag_to_add, "title", prefer="last")
|
||||
|
||||
# Handle -duplicate logic (copy existing tag to new namespaces)
|
||||
if duplicate_arg:
|
||||
# Parse duplicate format: source:target1,target2 or source,target1,target2
|
||||
parts = duplicate_arg.split(':')
|
||||
parts = str(duplicate_arg).split(':')
|
||||
source_ns = ""
|
||||
targets = []
|
||||
targets: list[str] = []
|
||||
|
||||
if len(parts) > 1:
|
||||
# Explicit format: source:target1,target2
|
||||
source_ns = parts[0]
|
||||
targets = parts[1].split(',')
|
||||
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
|
||||
else:
|
||||
# Inferred format: source,target1,target2
|
||||
parts = duplicate_arg.split(',')
|
||||
if len(parts) > 1:
|
||||
source_ns = parts[0]
|
||||
targets = parts[1:]
|
||||
parts2 = str(duplicate_arg).split(',')
|
||||
if len(parts2) > 1:
|
||||
source_ns = parts2[0]
|
||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
||||
|
||||
if source_ns and targets:
|
||||
# Find tags in source namespace
|
||||
source_tags = [t for t in existing_tags if t.startswith(source_ns + ':')]
|
||||
for t in source_tags:
|
||||
value = t.split(':', 1)[1]
|
||||
source_prefix = source_ns.lower() + ":"
|
||||
for t in existing_tag_list:
|
||||
if not t.lower().startswith(source_prefix):
|
||||
continue
|
||||
value = t.split(":", 1)[1]
|
||||
for target_ns in targets:
|
||||
new_tag = f"{target_ns}:{value}"
|
||||
if new_tag not in existing_tags and new_tag not in tags_to_add:
|
||||
tags_to_add.append(new_tag)
|
||||
|
||||
# Initialize tag mutation tracking local variables
|
||||
removed_tags = []
|
||||
new_tags_added = []
|
||||
final_tags = list(existing_tags) if existing_tags else []
|
||||
if new_tag.lower() not in existing_lower:
|
||||
item_tag_to_add.append(new_tag)
|
||||
|
||||
# Resolve hash from path if needed
|
||||
if not file_hash and file_path:
|
||||
try:
|
||||
from SYS.utils import sha256_file
|
||||
file_hash = sha256_file(Path(file_path))
|
||||
except Exception:
|
||||
file_hash = ""
|
||||
|
||||
if not file_hash:
|
||||
log("[add_tag] Warning: No hash available, skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
# Route tag updates through the configured store backend
|
||||
try:
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
# For namespaced tags, compute old tags in same namespace to remove
|
||||
removed_tags = []
|
||||
for new_tag in tags_to_add:
|
||||
if ':' in new_tag:
|
||||
namespace = new_tag.split(':', 1)[0]
|
||||
to_remove = [t for t in existing_tags if t.startswith(namespace + ':') and t.lower() != new_tag.lower()]
|
||||
removed_tags.extend(to_remove)
|
||||
|
||||
ok = backend.add_tag(file_hash, tags_to_add, config=config)
|
||||
if removed_tags:
|
||||
unique_removed = sorted(set(removed_tags))
|
||||
backend.delete_tag(file_hash, unique_removed, config=config)
|
||||
|
||||
if not ok:
|
||||
log(f"[add_tag] Warning: Failed to add tags via store '{store_name}'", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
# Namespace replacement: delete old namespace:* when adding namespace:value
|
||||
removed_namespace_tag: list[str] = []
|
||||
for new_tag in item_tag_to_add:
|
||||
if not isinstance(new_tag, str) or ":" not in new_tag:
|
||||
continue
|
||||
ns = new_tag.split(":", 1)[0].strip()
|
||||
if not ns:
|
||||
continue
|
||||
ns_prefix = ns.lower() + ":"
|
||||
for t in existing_tag_list:
|
||||
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
|
||||
removed_namespace_tag.append(t)
|
||||
|
||||
refreshed_tags, _ = backend.get_tag(file_hash, config=config)
|
||||
refreshed_tags = list(refreshed_tags or [])
|
||||
final_tags = refreshed_tags
|
||||
new_tags_added = [t for t in refreshed_tags if t.lower() not in original_tags_lower]
|
||||
removed_namespace_tag = sorted({t for t in removed_namespace_tag})
|
||||
|
||||
# Update result tags for downstream cmdlets/UI
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = refreshed_tags
|
||||
if isinstance(res.extra, dict):
|
||||
res.extra['tags'] = refreshed_tags
|
||||
elif isinstance(res, dict):
|
||||
res['tags'] = refreshed_tags
|
||||
actual_tag_to_add = [t for t in item_tag_to_add if isinstance(t, str) and t.lower() not in existing_lower]
|
||||
|
||||
# Update title if changed
|
||||
title_value = self._extract_title_tag(refreshed_tags)
|
||||
self._apply_title_to_result(res, title_value)
|
||||
changed = False
|
||||
if removed_namespace_tag:
|
||||
try:
|
||||
backend.delete_tag(resolved_hash, removed_namespace_tag, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Warning: Failed deleting namespace tag: {exc}", file=sys.stderr)
|
||||
|
||||
total_new_tags += len(new_tags_added)
|
||||
if new_tags_added:
|
||||
total_modified += 1
|
||||
except KeyError:
|
||||
log(f"[add_tag] Store '{store_name}' not configured", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
except Exception as e:
|
||||
log(f"[add_tag] Warning: Backend error for store '{store_name}': {e}", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
if actual_tag_to_add:
|
||||
try:
|
||||
backend.add_tag(resolved_hash, actual_tag_to_add, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tag] Warning: Failed adding tag: {exc}", file=sys.stderr)
|
||||
|
||||
if changed:
|
||||
total_added += len(actual_tag_to_add)
|
||||
total_modified += 1
|
||||
|
||||
try:
|
||||
refreshed_tag, _src2 = backend.get_tag(resolved_hash, config=config)
|
||||
refreshed_list = [t for t in (refreshed_tag or []) if isinstance(t, str)]
|
||||
except Exception:
|
||||
refreshed_list = existing_tag_list
|
||||
|
||||
# Update the result's tag using canonical field
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tag = refreshed_list
|
||||
elif isinstance(res, dict):
|
||||
res["tag"] = refreshed_list
|
||||
|
||||
final_title = _extract_title_tag(refreshed_list)
|
||||
_apply_title_to_result(res, final_title)
|
||||
|
||||
# If title changed, refresh the cached result table so the display reflects the new name
|
||||
final_title = self._extract_title_tag(final_tags)
|
||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
||||
self._refresh_result_table_title(final_title, file_hash, file_path)
|
||||
# If tags changed, refresh tag view via get-tag
|
||||
if new_tags_added or removed_tags:
|
||||
self._refresh_tags_view(res, file_hash, file_path, config)
|
||||
# Emit the modified result
|
||||
_refresh_result_table_title(final_title, resolved_hash, raw_path)
|
||||
|
||||
if changed:
|
||||
_refresh_tag_view(res, resolved_hash, str(store_name), raw_path, config)
|
||||
|
||||
ctx.emit(res)
|
||||
log(f"[add_tag] Added {total_new_tags} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)", file=sys.stderr)
|
||||
|
||||
log(
|
||||
f"[add_tag] Added {total_added} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -1,456 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Sequence, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from SYS.logger import log
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from ._shared import normalize_result_input, filter_results_by_temp
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
normalize_hash,
|
||||
parse_tag_arguments,
|
||||
expand_tag_groups,
|
||||
parse_cmdlet_args,
|
||||
collapse_namespace_tags,
|
||||
should_show_help,
|
||||
get_field,
|
||||
)
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
"""Return the value of the first title: tag if present."""
|
||||
for tag in tags:
|
||||
if isinstance(tag, str) and tag.lower().startswith("title:"):
|
||||
value = tag.split(":", 1)[1].strip()
|
||||
if value:
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
"""Update result object/dict title fields and columns in-place."""
|
||||
if not title_value:
|
||||
return
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.title = title_value
|
||||
# Update columns if present (Title column assumed index 0)
|
||||
if hasattr(res, "columns") and isinstance(res.columns, list) and res.columns:
|
||||
label, *_ = res.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
res.columns[0] = (res.columns[0][0], title_value)
|
||||
elif isinstance(res, dict):
|
||||
res["title"] = title_value
|
||||
cols = res.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated.append((label, title_value))
|
||||
changed = True
|
||||
else:
|
||||
updated.append(col)
|
||||
else:
|
||||
updated.append(col)
|
||||
if changed:
|
||||
res["columns"] = updated
|
||||
|
||||
|
||||
def _matches_target(item: Any, target_hash: Optional[str], target_path: Optional[str]) -> bool:
|
||||
"""Determine whether a result item refers to the given hash/path target (canonical fields only)."""
|
||||
|
||||
def norm(val: Any) -> Optional[str]:
|
||||
return str(val).lower() if val is not None else None
|
||||
|
||||
target_hash_l = target_hash.lower() if target_hash else None
|
||||
target_path_l = target_path.lower() if target_path else None
|
||||
|
||||
if isinstance(item, dict):
|
||||
hashes = [norm(item.get("hash"))]
|
||||
paths = [norm(item.get("path"))]
|
||||
else:
|
||||
hashes = [norm(get_field(item, "hash"))]
|
||||
paths = [norm(get_field(item, "path"))]
|
||||
|
||||
if target_hash_l and target_hash_l in hashes:
|
||||
return True
|
||||
if target_path_l and target_path_l in paths:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _update_item_title_fields(item: Any, new_title: str) -> None:
|
||||
"""Mutate an item to reflect a new title in plain fields and columns."""
|
||||
if isinstance(item, models.PipeObject):
|
||||
item.title = new_title
|
||||
if hasattr(item, "columns") and isinstance(item.columns, list) and item.columns:
|
||||
label, *_ = item.columns[0]
|
||||
if str(label).lower() == "title":
|
||||
item.columns[0] = (label, new_title)
|
||||
elif isinstance(item, dict):
|
||||
item["title"] = new_title
|
||||
cols = item.get("columns")
|
||||
if isinstance(cols, list):
|
||||
updated_cols = []
|
||||
changed = False
|
||||
for col in cols:
|
||||
if isinstance(col, tuple) and len(col) == 2:
|
||||
label, val = col
|
||||
if str(label).lower() == "title":
|
||||
updated_cols.append((label, new_title))
|
||||
changed = True
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
else:
|
||||
updated_cols.append(col)
|
||||
if changed:
|
||||
item["columns"] = updated_cols
|
||||
|
||||
|
||||
def _refresh_result_table_title(new_title: str, target_hash: Optional[str], target_path: Optional[str]) -> None:
|
||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
||||
try:
|
||||
last_table = ctx.get_last_result_table()
|
||||
items = ctx.get_last_result_items()
|
||||
if not last_table or not items:
|
||||
return
|
||||
|
||||
updated_items = []
|
||||
match_found = False
|
||||
for item in items:
|
||||
try:
|
||||
if _matches_target(item, target_hash, target_path):
|
||||
_update_item_title_fields(item, new_title)
|
||||
match_found = True
|
||||
except Exception:
|
||||
pass
|
||||
updated_items.append(item)
|
||||
if not match_found:
|
||||
return
|
||||
|
||||
from result_table import ResultTable # Local import to avoid circular dependency
|
||||
|
||||
new_table = last_table.copy_with_title(getattr(last_table, "title", ""))
|
||||
|
||||
for item in updated_items:
|
||||
new_table.add_result(item)
|
||||
|
||||
# Keep the underlying history intact; update only the overlay so @.. can
|
||||
# clear the overlay then continue back to prior tables (e.g., the search list).
|
||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _refresh_tags_view(res: Any, target_hash: Optional[str], store_name: Optional[str], target_path: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
||||
try:
|
||||
from cmdlets import get_tag as get_tag_cmd # type: ignore
|
||||
except Exception:
|
||||
return
|
||||
|
||||
if not target_hash or not store_name:
|
||||
return
|
||||
|
||||
refresh_args: List[str] = ["-hash", target_hash, "-store", store_name]
|
||||
|
||||
try:
|
||||
subject = ctx.get_last_result_subject()
|
||||
if subject and _matches_target(subject, target_hash, target_path):
|
||||
get_tag_cmd._run(subject, refresh_args, config)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
get_tag_cmd._run(res, refresh_args, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class Add_Tag(Cmdlet):
|
||||
"""Class-based add-tags cmdlet with Cmdlet metadata inheritance."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="add-tags",
|
||||
summary="Add tags to a file in a store.",
|
||||
usage="add-tags -store <store> [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
SharedArgs.STORE,
|
||||
CmdletArg("-duplicate", type="string", description="Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)"),
|
||||
CmdletArg("-list", type="string", description="Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult)."),
|
||||
CmdletArg("--all", type="flag", description="Include temporary files in tagging (by default, only tags non-temporary files)."),
|
||||
CmdletArg("tags", type="string", required=False, description="One or more tags to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tags from pipeline payload.", variadic=True),
|
||||
],
|
||||
detail=[
|
||||
"- By default, only tags non-temporary files (from pipelines). Use --all to tag everything.",
|
||||
"- Requires a store backend: use -store or pipe items that include store.",
|
||||
"- If -hash is not provided, uses the piped item's hash (or derives from its path when possible).",
|
||||
"- Multiple tags can be comma-separated or space-separated.",
|
||||
"- Use -list to include predefined tag lists from adjective.json: -list philosophy,occult",
|
||||
"- Tags can also reference lists with curly braces: add-tags {philosophy} \"other:tag\"",
|
||||
"- Use -duplicate to copy EXISTING tag values to new namespaces:",
|
||||
" Explicit format: -duplicate title:album,artist (copies title: to album: and artist:)",
|
||||
" Inferred format: -duplicate title,album,artist (first is source, rest are targets)",
|
||||
"- The source namespace must already exist in the file being tagged.",
|
||||
"- Target namespaces that already have a value are skipped (not overwritten).",
|
||||
"- You can also pass the target hash as a tag token: hash:<sha256>. This overrides -hash and is removed from the tag list.",
|
||||
],
|
||||
exec=self.run,
|
||||
)
|
||||
self.register()
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Add tags to a file with smart filtering for pipeline results."""
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
# Check for --all flag
|
||||
include_temp = parsed.get("all", False)
|
||||
|
||||
# Normalize input to list
|
||||
results = normalize_result_input(result)
|
||||
|
||||
# Filter by temp status (unless --all is set)
|
||||
if not include_temp:
|
||||
results = filter_results_by_temp(results, include_temp=False)
|
||||
|
||||
if not results:
|
||||
log("No valid files to tag (all results were temporary; use --all to include temporary files)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get tags from arguments (or fallback to pipeline payload)
|
||||
raw_tags = parsed.get("tags", [])
|
||||
if isinstance(raw_tags, str):
|
||||
raw_tags = [raw_tags]
|
||||
|
||||
# Fallback: if no tags provided explicitly, try to pull from first result payload
|
||||
if not raw_tags and results:
|
||||
first = results[0]
|
||||
payload_tags = None
|
||||
|
||||
# Try multiple tag lookup strategies in order
|
||||
tag_lookups = [
|
||||
lambda x: getattr(x, "tags", None),
|
||||
lambda x: x.get("tags") if isinstance(x, dict) else None,
|
||||
]
|
||||
|
||||
for lookup in tag_lookups:
|
||||
try:
|
||||
payload_tags = lookup(first)
|
||||
if payload_tags:
|
||||
break
|
||||
except (AttributeError, TypeError, KeyError):
|
||||
continue
|
||||
|
||||
if payload_tags:
|
||||
if isinstance(payload_tags, str):
|
||||
raw_tags = [payload_tags]
|
||||
elif isinstance(payload_tags, list):
|
||||
raw_tags = payload_tags
|
||||
|
||||
# Handle -list argument (convert to {list} syntax)
|
||||
list_arg = parsed.get("list")
|
||||
if list_arg:
|
||||
for l in list_arg.split(','):
|
||||
l = l.strip()
|
||||
if l:
|
||||
raw_tags.append(f"{{{l}}}")
|
||||
|
||||
# Parse and expand tags
|
||||
tags_to_add = parse_tag_arguments(raw_tags)
|
||||
tags_to_add = expand_tag_groups(tags_to_add)
|
||||
|
||||
# Allow hash override via namespaced token (e.g., "hash:abcdef...")
|
||||
extracted_hash = None
|
||||
filtered_tags: List[str] = []
|
||||
for tag in tags_to_add:
|
||||
if isinstance(tag, str) and tag.lower().startswith("hash:"):
|
||||
_, _, hash_val = tag.partition(":")
|
||||
if hash_val:
|
||||
extracted_hash = normalize_hash(hash_val.strip())
|
||||
continue
|
||||
filtered_tags.append(tag)
|
||||
tags_to_add = filtered_tags
|
||||
|
||||
if not tags_to_add:
|
||||
log("No tags provided to add", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get other flags (hash override can come from -hash or hash: token)
|
||||
hash_override = normalize_hash(parsed.get("hash")) or extracted_hash
|
||||
duplicate_arg = parsed.get("duplicate")
|
||||
|
||||
# Tags ARE provided - apply them to each store-backed result
|
||||
total_added = 0
|
||||
total_modified = 0
|
||||
|
||||
store_override = parsed.get("store")
|
||||
|
||||
for res in results:
|
||||
store_name: Optional[str]
|
||||
raw_hash: Optional[str]
|
||||
raw_path: Optional[str]
|
||||
|
||||
if isinstance(res, models.PipeObject):
|
||||
store_name = store_override or res.store
|
||||
raw_hash = res.hash
|
||||
raw_path = res.path
|
||||
elif isinstance(res, dict):
|
||||
store_name = store_override or res.get("store")
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
else:
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
if not store_name:
|
||||
log("[add_tags] Error: Missing -store and item has no store field", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
|
||||
if not resolved_hash and raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
resolved_hash = stem.lower()
|
||||
elif p.exists() and p.is_file():
|
||||
resolved_hash = sha256_file(p)
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not resolved_hash:
|
||||
log("[add_tags] Warning: Item missing usable hash (and could not derive from path); skipping", file=sys.stderr)
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
try:
|
||||
backend = Store(config)[str(store_name)]
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
existing_tags, _src = backend.get_tag(resolved_hash, config=config)
|
||||
except Exception:
|
||||
existing_tags = []
|
||||
|
||||
existing_tags_list = [t for t in (existing_tags or []) if isinstance(t, str)]
|
||||
existing_lower = {t.lower() for t in existing_tags_list}
|
||||
original_title = _extract_title_tag(existing_tags_list)
|
||||
|
||||
# Per-item tag list (do not mutate shared list)
|
||||
item_tags_to_add = list(tags_to_add)
|
||||
item_tags_to_add = collapse_namespace_tags(item_tags_to_add, "title", prefer="last")
|
||||
|
||||
# Handle -duplicate logic (copy existing tags to new namespaces)
|
||||
if duplicate_arg:
|
||||
parts = str(duplicate_arg).split(':')
|
||||
source_ns = ""
|
||||
targets: list[str] = []
|
||||
|
||||
if len(parts) > 1:
|
||||
source_ns = parts[0]
|
||||
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
|
||||
else:
|
||||
parts2 = str(duplicate_arg).split(',')
|
||||
if len(parts2) > 1:
|
||||
source_ns = parts2[0]
|
||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
||||
|
||||
if source_ns and targets:
|
||||
source_prefix = source_ns.lower() + ":"
|
||||
for t in existing_tags_list:
|
||||
if not t.lower().startswith(source_prefix):
|
||||
continue
|
||||
value = t.split(":", 1)[1]
|
||||
for target_ns in targets:
|
||||
new_tag = f"{target_ns}:{value}"
|
||||
if new_tag.lower() not in existing_lower:
|
||||
item_tags_to_add.append(new_tag)
|
||||
|
||||
# Namespace replacement: delete old namespace:* when adding namespace:value
|
||||
removed_namespace_tags: list[str] = []
|
||||
for new_tag in item_tags_to_add:
|
||||
if not isinstance(new_tag, str) or ":" not in new_tag:
|
||||
continue
|
||||
ns = new_tag.split(":", 1)[0].strip()
|
||||
if not ns:
|
||||
continue
|
||||
ns_prefix = ns.lower() + ":"
|
||||
for t in existing_tags_list:
|
||||
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
|
||||
removed_namespace_tags.append(t)
|
||||
|
||||
removed_namespace_tags = sorted({t for t in removed_namespace_tags})
|
||||
|
||||
actual_tags_to_add = [t for t in item_tags_to_add if isinstance(t, str) and t.lower() not in existing_lower]
|
||||
|
||||
changed = False
|
||||
if removed_namespace_tags:
|
||||
try:
|
||||
backend.delete_tag(resolved_hash, removed_namespace_tags, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Warning: Failed deleting namespace tags: {exc}", file=sys.stderr)
|
||||
|
||||
if actual_tags_to_add:
|
||||
try:
|
||||
backend.add_tag(resolved_hash, actual_tags_to_add, config=config)
|
||||
changed = True
|
||||
except Exception as exc:
|
||||
log(f"[add_tags] Warning: Failed adding tags: {exc}", file=sys.stderr)
|
||||
|
||||
if changed:
|
||||
total_added += len(actual_tags_to_add)
|
||||
total_modified += 1
|
||||
|
||||
try:
|
||||
refreshed_tags, _src2 = backend.get_tag(resolved_hash, config=config)
|
||||
refreshed_list = [t for t in (refreshed_tags or []) if isinstance(t, str)]
|
||||
except Exception:
|
||||
refreshed_list = existing_tags_list
|
||||
|
||||
# Update the result's tags using canonical field
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tags = refreshed_list
|
||||
elif isinstance(res, dict):
|
||||
res["tags"] = refreshed_list
|
||||
|
||||
final_title = _extract_title_tag(refreshed_list)
|
||||
_apply_title_to_result(res, final_title)
|
||||
|
||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
||||
_refresh_result_table_title(final_title, resolved_hash, raw_path)
|
||||
|
||||
if changed:
|
||||
_refresh_tags_view(res, resolved_hash, str(store_name), raw_path, config)
|
||||
|
||||
ctx.emit(res)
|
||||
|
||||
log(
|
||||
f"[add_tags] Added {total_added} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET = Add_Tag()
|
||||
@@ -103,11 +103,11 @@ def get_cmdlet_metadata(cmd_name: str) -> Optional[Dict[str, Any]]:
|
||||
base = {}
|
||||
|
||||
name = getattr(data, "name", base.get("name", cmd_name)) or cmd_name
|
||||
aliases = getattr(data, "aliases", base.get("aliases", [])) or []
|
||||
aliases = getattr(data, "alias", base.get("alias", [])) or []
|
||||
usage = getattr(data, "usage", base.get("usage", ""))
|
||||
summary = getattr(data, "summary", base.get("summary", ""))
|
||||
details = getattr(data, "details", base.get("details", [])) or []
|
||||
args_list = getattr(data, "args", base.get("args", [])) or []
|
||||
details = getattr(data, "detail", base.get("detail", [])) or []
|
||||
args_list = getattr(data, "arg", base.get("arg", [])) or []
|
||||
args = [_normalize_arg(arg) for arg in args_list]
|
||||
|
||||
return {
|
||||
|
||||
@@ -33,7 +33,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
- Emits only non-temporary results
|
||||
|
||||
Typical pipeline usage:
|
||||
download-data url | screen-shot | add-tag "tag" --all | cleanup
|
||||
download-data url | screen-shot | add-tag -store local "tag" --all | cleanup
|
||||
"""
|
||||
|
||||
# Help
|
||||
@@ -67,7 +67,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
deleted_count += 1
|
||||
|
||||
# Clean up any associated sidecar files
|
||||
for ext in ['.tags', '.metadata']:
|
||||
for ext in ['.tag', '.metadata']:
|
||||
sidecar = path_obj.parent / (path_obj.name + ext)
|
||||
if sidecar.exists():
|
||||
try:
|
||||
@@ -98,9 +98,9 @@ CMDLET = Cmdlet(
|
||||
detail=[
|
||||
"- Accepts pipeline results that may contain temporary files (screenshots, intermediate artifacts)",
|
||||
"- Deletes files marked with is_temp=True from disk",
|
||||
"- Also cleans up associated sidecar files (.tags, .metadata)",
|
||||
"- Also cleans up associated sidecar files (.tag, .metadata)",
|
||||
"- Emits only non-temporary results for further processing",
|
||||
"- Typical usage at end of pipeline: ... | add-tag \"tag\" --all | cleanup",
|
||||
"- Typical usage at end of pipeline: ... | add-tag -store local \"tag\" --all | cleanup",
|
||||
"- Exit code 0 if cleanup successful, 1 if no results to process",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -100,8 +100,11 @@ class Delete_File(Cmdlet):
|
||||
log(f"Local delete failed: {exc}", file=sys.stderr)
|
||||
|
||||
# Remove common sidecars regardless of file removal success
|
||||
for sidecar in (path.with_suffix(".tags"), path.with_suffix(".tags.txt"),
|
||||
path.with_suffix(".metadata"), path.with_suffix(".notes")):
|
||||
for sidecar in (
|
||||
path.with_suffix(".tag"),
|
||||
path.with_suffix(".metadata"),
|
||||
path.with_suffix(".notes"),
|
||||
):
|
||||
try:
|
||||
if sidecar.exists() and sidecar.is_file():
|
||||
sidecar.unlink()
|
||||
|
||||
@@ -302,7 +302,7 @@ def _process_deletion(tags: list[str], file_hash: str | None, path: str | None,
|
||||
del_title_set = {t.lower() for t in title_tags}
|
||||
remaining_titles = [t for t in current_titles if t.lower() not in del_title_set]
|
||||
if current_titles and not remaining_titles:
|
||||
log("Cannot delete the last title: tag. Add a replacement title first (add-tag \"title:new title\").", file=sys.stderr)
|
||||
log("Cannot delete the last title: tag. Add a replacement title first (add-tags \"title:new title\").", file=sys.stderr)
|
||||
return False
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
"""Download files directly via HTTP (non-yt-dlp url).
|
||||
"""Generic file downloader.
|
||||
|
||||
Focused cmdlet for direct file downloads from:
|
||||
- PDFs, images, documents
|
||||
- url not supported by yt-dlp
|
||||
- LibGen sources
|
||||
- Direct file links
|
||||
Supports:
|
||||
- Direct HTTP file URLs (PDFs, images, documents; non-yt-dlp)
|
||||
- Piped provider items (uses provider.download when available)
|
||||
|
||||
No streaming site logic - pure HTTP download with retries.
|
||||
No streaming site logic; use download-media for yt-dlp/streaming.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -17,10 +15,17 @@ from typing import Any, Dict, List, Optional, Sequence
|
||||
|
||||
from SYS.download import DownloadError, _download_direct_file
|
||||
from SYS.logger import log, debug
|
||||
from models import DownloadOptions
|
||||
import pipeline as pipeline_context
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, register_url_with_local_library, coerce_to_pipe_object
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
parse_cmdlet_args,
|
||||
register_url_with_local_library,
|
||||
coerce_to_pipe_object,
|
||||
get_field,
|
||||
)
|
||||
|
||||
|
||||
class Download_File(Cmdlet):
|
||||
@@ -30,14 +35,13 @@ class Download_File(Cmdlet):
|
||||
"""Initialize download-file cmdlet."""
|
||||
super().__init__(
|
||||
name="download-file",
|
||||
summary="Download files directly via HTTP (PDFs, images, documents)",
|
||||
usage="download-file <url> [options] or search-file | download-file [options]",
|
||||
summary="Download files via HTTP or provider handlers",
|
||||
usage="download-file <url> [options] OR @N | download-file [options]",
|
||||
alias=["dl-file", "download-http"],
|
||||
arg=[
|
||||
CmdletArg(name="url", type="string", required=False, description="URL to download (direct file links)", variadic=True),
|
||||
CmdletArg(name="-url", type="string", description="URL to download (alias for positional argument)", variadic=True),
|
||||
CmdletArg(name="output", type="string", alias="o", description="Output filename (auto-detected if not specified)"),
|
||||
SharedArgs.URL
|
||||
CmdletArg(name="output", type="string", alias="o", description="Output directory (overrides defaults)"),
|
||||
SharedArgs.URL,
|
||||
|
||||
],
|
||||
detail=["Download files directly via HTTP without yt-dlp processing.", "For streaming sites, use download-media."],
|
||||
exec=self.run,
|
||||
@@ -60,13 +64,21 @@ class Download_File(Cmdlet):
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
# Extract options
|
||||
# Extract explicit URL args (if any)
|
||||
raw_url = parsed.get("url", [])
|
||||
if isinstance(raw_url, str):
|
||||
raw_url = [raw_url]
|
||||
|
||||
# If no URL args were provided, fall back to piped results (provider items)
|
||||
piped_items: List[Any] = []
|
||||
if not raw_url:
|
||||
log("No url to download", file=sys.stderr)
|
||||
if isinstance(result, list):
|
||||
piped_items = result
|
||||
elif result:
|
||||
piped_items = [result]
|
||||
|
||||
if not raw_url and not piped_items:
|
||||
log("No url or piped items to download", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get output directory
|
||||
@@ -76,27 +88,78 @@ class Download_File(Cmdlet):
|
||||
|
||||
debug(f"Output directory: {final_output_dir}")
|
||||
|
||||
# Download each URL
|
||||
# Download each URL and/or provider item
|
||||
downloaded_count = 0
|
||||
quiet_mode = bool(config.get("_quiet_background_output")) if isinstance(config, dict) else False
|
||||
custom_output = parsed.get("output")
|
||||
|
||||
# Provider lookup is optional; keep import local to avoid overhead if unused
|
||||
get_search_provider = None
|
||||
SearchResult = None
|
||||
try:
|
||||
from Provider.registry import get_search_provider as _get_search_provider, SearchResult as _SearchResult
|
||||
|
||||
get_search_provider = _get_search_provider
|
||||
SearchResult = _SearchResult
|
||||
except Exception:
|
||||
get_search_provider = None
|
||||
SearchResult = None
|
||||
|
||||
def _emit_local_file(downloaded_path: Path, source: Optional[str], title_hint: Optional[str], tags_hint: Optional[List[str]], media_kind_hint: Optional[str], full_metadata: Optional[Dict[str, Any]]) -> None:
|
||||
title_val = (title_hint or downloaded_path.stem or "Unknown").strip() or downloaded_path.stem
|
||||
hash_value = self._compute_file_hash(downloaded_path)
|
||||
tag: List[str] = []
|
||||
if tags_hint:
|
||||
tag.extend([str(t) for t in tags_hint if t])
|
||||
if not any(str(t).lower().startswith("title:") for t in tag):
|
||||
tag.insert(0, f"title:{title_val}")
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"path": str(downloaded_path),
|
||||
"hash": hash_value,
|
||||
"title": title_val,
|
||||
"action": "cmdlet:download-file",
|
||||
"download_mode": "file",
|
||||
"store": "local",
|
||||
"media_kind": media_kind_hint or "file",
|
||||
"tag": tag,
|
||||
}
|
||||
if full_metadata:
|
||||
payload["full_metadata"] = full_metadata
|
||||
if source and str(source).startswith("http"):
|
||||
payload["url"] = source
|
||||
elif source:
|
||||
payload["source_url"] = source
|
||||
|
||||
pipeline_context.emit(payload)
|
||||
|
||||
# Automatically register url with local library
|
||||
if payload.get("url"):
|
||||
pipe_obj = coerce_to_pipe_object(payload)
|
||||
register_url_with_local_library(pipe_obj, config)
|
||||
|
||||
# 1) Explicit URL downloads
|
||||
for url in raw_url:
|
||||
try:
|
||||
debug(f"Processing: {url}")
|
||||
debug(f"Processing URL: {url}")
|
||||
|
||||
# Direct HTTP download
|
||||
result_obj = _download_direct_file(url, final_output_dir, quiet=quiet_mode)
|
||||
debug(f"Download completed, building pipe object...")
|
||||
pipe_obj_dict = self._build_pipe_object(result_obj, url, final_output_dir)
|
||||
debug(f"Emitting result to pipeline...")
|
||||
pipeline_context.emit(pipe_obj_dict)
|
||||
|
||||
# Automatically register url with local library
|
||||
if pipe_obj_dict.get("url"):
|
||||
pipe_obj = coerce_to_pipe_object(pipe_obj_dict)
|
||||
register_url_with_local_library(pipe_obj, config)
|
||||
|
||||
file_path = None
|
||||
if hasattr(result_obj, "path"):
|
||||
file_path = getattr(result_obj, "path")
|
||||
elif isinstance(result_obj, dict):
|
||||
file_path = result_obj.get("path")
|
||||
if not file_path:
|
||||
file_path = str(result_obj)
|
||||
downloaded_path = Path(str(file_path))
|
||||
|
||||
_emit_local_file(
|
||||
downloaded_path=downloaded_path,
|
||||
source=url,
|
||||
title_hint=downloaded_path.stem,
|
||||
tags_hint=[f"title:{downloaded_path.stem}"],
|
||||
media_kind_hint="file",
|
||||
full_metadata=None,
|
||||
)
|
||||
downloaded_count += 1
|
||||
debug("✓ Downloaded and emitted")
|
||||
|
||||
@@ -105,6 +168,72 @@ class Download_File(Cmdlet):
|
||||
except Exception as e:
|
||||
log(f"Error processing {url}: {e}", file=sys.stderr)
|
||||
|
||||
# 2) Provider item downloads (piped results)
|
||||
for item in piped_items:
|
||||
try:
|
||||
table = get_field(item, "table")
|
||||
title = get_field(item, "title")
|
||||
target = get_field(item, "path") or get_field(item, "url")
|
||||
media_kind = get_field(item, "media_kind")
|
||||
tags_val = get_field(item, "tag")
|
||||
tags_list: Optional[List[str]]
|
||||
if isinstance(tags_val, list):
|
||||
tags_list = [str(t) for t in tags_val if t]
|
||||
else:
|
||||
tags_list = None
|
||||
|
||||
full_metadata = get_field(item, "full_metadata")
|
||||
if (not full_metadata) and isinstance(item, dict) and isinstance(item.get("extra"), dict):
|
||||
extra_md = item["extra"].get("full_metadata")
|
||||
if isinstance(extra_md, dict):
|
||||
full_metadata = extra_md
|
||||
|
||||
# If this looks like a provider item and providers are available, prefer provider.download()
|
||||
downloaded_path: Optional[Path] = None
|
||||
if table and get_search_provider and SearchResult:
|
||||
provider = get_search_provider(str(table), config)
|
||||
if provider is not None:
|
||||
sr = SearchResult(
|
||||
table=str(table),
|
||||
title=str(title or "Unknown"),
|
||||
path=str(target or ""),
|
||||
full_metadata=full_metadata if isinstance(full_metadata, dict) else {},
|
||||
)
|
||||
debug(f"[download-file] Downloading provider item via {table}: {sr.title}")
|
||||
downloaded_path = provider.download(sr, final_output_dir)
|
||||
|
||||
# Fallback: if we have a direct HTTP URL, download it directly
|
||||
if downloaded_path is None and isinstance(target, str) and target.startswith("http"):
|
||||
debug(f"[download-file] Provider item looks like direct URL, downloading: {target}")
|
||||
result_obj = _download_direct_file(target, final_output_dir, quiet=quiet_mode)
|
||||
file_path = None
|
||||
if hasattr(result_obj, "path"):
|
||||
file_path = getattr(result_obj, "path")
|
||||
elif isinstance(result_obj, dict):
|
||||
file_path = result_obj.get("path")
|
||||
if not file_path:
|
||||
file_path = str(result_obj)
|
||||
downloaded_path = Path(str(file_path))
|
||||
|
||||
if downloaded_path is None:
|
||||
log(f"Cannot download item (no provider handler / unsupported target): {title or target}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
_emit_local_file(
|
||||
downloaded_path=downloaded_path,
|
||||
source=str(target) if target else None,
|
||||
title_hint=str(title) if title else downloaded_path.stem,
|
||||
tags_hint=tags_list,
|
||||
media_kind_hint=str(media_kind) if media_kind else None,
|
||||
full_metadata=full_metadata if isinstance(full_metadata, dict) else None,
|
||||
)
|
||||
downloaded_count += 1
|
||||
|
||||
except DownloadError as e:
|
||||
log(f"Download failed: {e}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
log(f"Error downloading item: {e}", file=sys.stderr)
|
||||
|
||||
if downloaded_count > 0:
|
||||
debug(f"✓ Successfully processed {downloaded_count} file(s)")
|
||||
return 0
|
||||
@@ -118,6 +247,16 @@ class Download_File(Cmdlet):
|
||||
|
||||
def _resolve_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]:
|
||||
"""Resolve the output directory from storage location or config."""
|
||||
output_dir_arg = parsed.get("output")
|
||||
if output_dir_arg:
|
||||
try:
|
||||
out_path = Path(str(output_dir_arg)).expanduser()
|
||||
out_path.mkdir(parents=True, exist_ok=True)
|
||||
return out_path
|
||||
except Exception as e:
|
||||
log(f"Cannot use output directory {output_dir_arg}: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
storage_location = parsed.get("storage")
|
||||
|
||||
# Priority 1: --storage flag
|
||||
@@ -148,40 +287,6 @@ class Download_File(Cmdlet):
|
||||
|
||||
return final_output_dir
|
||||
|
||||
def _build_pipe_object(self, download_result: Any, url: str, output_dir: Path) -> Dict[str, Any]:
|
||||
"""Create a PipeObject-compatible dict from a download result."""
|
||||
# Try to get file path from result
|
||||
file_path = None
|
||||
if hasattr(download_result, 'path'):
|
||||
file_path = download_result.path
|
||||
elif isinstance(download_result, dict) and 'path' in download_result:
|
||||
file_path = download_result['path']
|
||||
|
||||
if not file_path:
|
||||
# Fallback: assume result is the path itself
|
||||
file_path = str(download_result)
|
||||
|
||||
media_path = Path(file_path)
|
||||
hash_value = self._compute_file_hash(media_path)
|
||||
title = media_path.stem
|
||||
|
||||
# Build tags with title for searchability
|
||||
tags = [f"title:{title}"]
|
||||
|
||||
# Canonical pipeline payload (no legacy aliases)
|
||||
return {
|
||||
"path": str(media_path),
|
||||
"hash": hash_value,
|
||||
"title": title,
|
||||
"file_title": title,
|
||||
"action": "cmdlet:download-file",
|
||||
"download_mode": "file",
|
||||
"url": url or (download_result.get('url') if isinstance(download_result, dict) else None),
|
||||
"store": "local",
|
||||
"media_kind": "file",
|
||||
"tags": tags,
|
||||
}
|
||||
|
||||
def _compute_file_hash(self, filepath: Path) -> str:
|
||||
"""Compute SHA256 hash of a file."""
|
||||
import hashlib
|
||||
|
||||
@@ -1391,11 +1391,11 @@ class Download_Media(Cmdlet):
|
||||
media_path = Path(download_result.path)
|
||||
hash_value = download_result.hash_value or self._compute_file_hash(media_path)
|
||||
title = info.get("title") or media_path.stem
|
||||
tags = list(download_result.tags or [])
|
||||
tag = list(download_result.tag or [])
|
||||
|
||||
# Add title tag for searchability
|
||||
if title and f"title:{title}" not in tags:
|
||||
tags.insert(0, f"title:{title}")
|
||||
if title and f"title:{title}" not in tag:
|
||||
tag.insert(0, f"title:{title}")
|
||||
|
||||
# Build a single canonical URL field; prefer yt-dlp provided webpage_url or info.url,
|
||||
# but fall back to the original requested URL. If multiple unique urls are available,
|
||||
@@ -1424,7 +1424,7 @@ class Download_Media(Cmdlet):
|
||||
"hash": hash_value,
|
||||
"title": title,
|
||||
"url": final_url,
|
||||
"tags": tags,
|
||||
"tag": tag,
|
||||
"action": "cmdlet:download-media",
|
||||
# download_mode removed (deprecated), keep media_kind
|
||||
"store": getattr(opts, "storage_name", None) or getattr(opts, "storage_location", None) or "PATH",
|
||||
|
||||
@@ -1,157 +0,0 @@
|
||||
"""download-provider cmdlet: Download items from external providers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import json
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from Provider.registry import get_search_provider, SearchResult
|
||||
from SYS.utils import unique_path
|
||||
|
||||
from ._shared import Cmdlet, CmdletArg, should_show_help, get_field, coerce_to_pipe_object
|
||||
import pipeline as ctx
|
||||
|
||||
# Optional dependencies
|
||||
try:
|
||||
from config import get_local_storage_path, resolve_output_dir
|
||||
except Exception: # pragma: no cover
|
||||
get_local_storage_path = None # type: ignore
|
||||
resolve_output_dir = None # type: ignore
|
||||
|
||||
class Download_Provider(Cmdlet):
|
||||
"""Download items from external providers."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
name="download-provider",
|
||||
summary="Download items from external providers (soulseek, libgen, etc).",
|
||||
usage="download-provider [item] [-output DIR]",
|
||||
arg=[
|
||||
CmdletArg("output", type="string", alias="o", description="Output directory"),
|
||||
],
|
||||
detail=[
|
||||
"Download items from external providers.",
|
||||
"Usually called automatically by @N selection on provider results.",
|
||||
"Can be used manually by piping a provider result item.",
|
||||
],
|
||||
exec=self.run
|
||||
)
|
||||
self.register()
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Execute download-provider cmdlet."""
|
||||
if should_show_help(args):
|
||||
ctx.emit(self.__dict__)
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
output_dir_arg = None
|
||||
i = 0
|
||||
while i < len(args):
|
||||
arg = args[i]
|
||||
if arg in ("-output", "--output", "-o") and i + 1 < len(args):
|
||||
output_dir_arg = args[i+1]
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
|
||||
# Determine output directory
|
||||
if output_dir_arg:
|
||||
output_dir = Path(output_dir_arg)
|
||||
elif resolve_output_dir:
|
||||
output_dir = resolve_output_dir(config)
|
||||
else:
|
||||
output_dir = Path("./downloads")
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Process input result
|
||||
items = []
|
||||
if isinstance(result, list):
|
||||
items = result
|
||||
elif result:
|
||||
items = [result]
|
||||
|
||||
if not items:
|
||||
log("No items to download", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
success_count = 0
|
||||
|
||||
for item in items:
|
||||
try:
|
||||
# Extract provider info
|
||||
table = get_field(item, "table")
|
||||
if not table:
|
||||
log(f"Skipping item without provider info: {item}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
provider = get_search_provider(table, config)
|
||||
if not provider:
|
||||
log(f"Provider '{table}' not available for download", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Reconstruct SearchResult if needed
|
||||
# The provider.download method expects a SearchResult object or compatible dict
|
||||
if isinstance(item, dict):
|
||||
# Ensure full_metadata is present
|
||||
if "full_metadata" not in item and "extra" in item:
|
||||
item["full_metadata"] = item["extra"].get("full_metadata", {})
|
||||
|
||||
search_result = SearchResult(
|
||||
table=table,
|
||||
title=item.get("title", "Unknown"),
|
||||
path=item.get("path", ""),
|
||||
full_metadata=item.get("full_metadata", {})
|
||||
)
|
||||
else:
|
||||
# Assume it's an object with attributes (like PipeObject)
|
||||
full_metadata = getattr(item, "full_metadata", {})
|
||||
# Check extra dict if full_metadata is missing/empty
|
||||
if not full_metadata and hasattr(item, "extra") and isinstance(item.extra, dict):
|
||||
full_metadata = item.extra.get("full_metadata", {})
|
||||
# Fallback: if full_metadata key isn't there, maybe the extra dict IS the metadata
|
||||
if not full_metadata and "username" in item.extra:
|
||||
full_metadata = item.extra
|
||||
|
||||
search_result = SearchResult(
|
||||
table=table,
|
||||
title=getattr(item, "title", "Unknown"),
|
||||
path=getattr(item, "path", ""),
|
||||
full_metadata=full_metadata
|
||||
)
|
||||
|
||||
debug(f"[download-provider] Downloading '{search_result.title}' via {table}...")
|
||||
downloaded_path = provider.download(search_result, output_dir)
|
||||
|
||||
if downloaded_path:
|
||||
debug(f"[download-provider] Download successful: {downloaded_path}")
|
||||
|
||||
# Create PipeObject for the downloaded file
|
||||
pipe_obj = coerce_to_pipe_object({
|
||||
"path": str(downloaded_path),
|
||||
"title": search_result.title,
|
||||
"table": "local", # Now it's a local file
|
||||
"media_kind": getattr(item, "media_kind", "other"),
|
||||
"tags": getattr(item, "tags", []),
|
||||
"full_metadata": search_result.full_metadata
|
||||
})
|
||||
|
||||
ctx.emit(pipe_obj)
|
||||
success_count += 1
|
||||
else:
|
||||
log(f"Download failed for '{search_result.title}'", file=sys.stderr)
|
||||
|
||||
except Exception as e:
|
||||
log(f"Error downloading item: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
debug(traceback.format_exc())
|
||||
|
||||
if success_count > 0:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
# Register cmdlet instance
|
||||
Download_Provider_Instance = Download_Provider()
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
This cmdlet retrieves tags for a selected result, supporting both:
|
||||
- Hydrus Network (for files with hash)
|
||||
- Local sidecar files (.tags)
|
||||
- Local sidecar files (.tag)
|
||||
|
||||
In interactive mode: navigate with numbers, add/delete tags
|
||||
In pipeline mode: display tags as read-only table, emit as structured JSON
|
||||
@@ -89,9 +89,9 @@ def _emit_tags_as_table(
|
||||
from result_table import ResultTable
|
||||
|
||||
# Create ResultTable with just tag column (no title)
|
||||
table_title = "Tags"
|
||||
table_title = "Tag"
|
||||
if item_title:
|
||||
table_title = f"Tags: {item_title}"
|
||||
table_title = f"Tag: {item_title}"
|
||||
if file_hash:
|
||||
table_title += f" [{file_hash[:8]}]"
|
||||
|
||||
@@ -195,19 +195,19 @@ def _rename_file_if_title_tag(media: Optional[Path], tags_added: List[str]) -> b
|
||||
return False
|
||||
|
||||
# Build sidecar paths BEFORE renaming the file
|
||||
old_sidecar = Path(str(file_path) + '.tags')
|
||||
new_sidecar = Path(str(new_file_path) + '.tags')
|
||||
old_sidecar = Path(str(file_path) + '.tag')
|
||||
new_sidecar = Path(str(new_file_path) + '.tag')
|
||||
|
||||
# Rename file
|
||||
try:
|
||||
file_path.rename(new_file_path)
|
||||
log(f"Renamed file: {old_name} → {new_name}")
|
||||
|
||||
# Rename .tags sidecar if it exists
|
||||
# Rename .tag sidecar if it exists
|
||||
if old_sidecar.exists():
|
||||
try:
|
||||
old_sidecar.rename(new_sidecar)
|
||||
log(f"Renamed sidecar: {old_name}.tags → {new_name}.tags")
|
||||
log(f"Renamed sidecar: {old_name}.tag → {new_name}.tag")
|
||||
except Exception as e:
|
||||
log(f"Failed to rename sidecar: {e}", file=sys.stderr)
|
||||
|
||||
@@ -232,7 +232,7 @@ def _apply_result_updates_from_tags(result: Any, tag_list: List[str]) -> None:
|
||||
|
||||
|
||||
def _handle_title_rename(old_path: Path, tags_list: List[str]) -> Optional[Path]:
|
||||
"""If a title: tag is present, rename the file and its .tags sidecar to match.
|
||||
"""If a title: tag is present, rename the file and its .tag sidecar to match.
|
||||
|
||||
Returns the new path if renamed, otherwise returns None.
|
||||
"""
|
||||
@@ -267,10 +267,10 @@ def _handle_title_rename(old_path: Path, tags_list: List[str]) -> Optional[Path]
|
||||
old_path.rename(new_path)
|
||||
log(f"Renamed file: {old_name} → {new_name}", file=sys.stderr)
|
||||
|
||||
# Rename the .tags sidecar if it exists
|
||||
old_tags_path = old_path.parent / (old_name + '.tags')
|
||||
# Rename the .tag sidecar if it exists
|
||||
old_tags_path = old_path.parent / (old_name + '.tag')
|
||||
if old_tags_path.exists():
|
||||
new_tags_path = old_path.parent / (new_name + '.tags')
|
||||
new_tags_path = old_path.parent / (new_name + '.tag')
|
||||
if new_tags_path.exists():
|
||||
log(f"Warning: Target sidecar already exists: {new_tags_path.name}", file=sys.stderr)
|
||||
else:
|
||||
@@ -368,14 +368,12 @@ def _write_sidecar(p: Path, media: Path, tag_list: List[str], url: List[str], ha
|
||||
return media
|
||||
|
||||
|
||||
|
||||
def _emit_tag_payload(source: str, tags_list: List[str], *, hash_value: Optional[str], extra: Optional[Dict[str, Any]] = None, store_label: Optional[str] = None) -> int:
|
||||
"""Emit tags as structured payload to pipeline.
|
||||
|
||||
Also emits individual tag objects to _PIPELINE_LAST_ITEMS so they can be selected by index.
|
||||
"""
|
||||
"""Emit tag values as structured payload to pipeline."""
|
||||
payload: Dict[str, Any] = {
|
||||
"source": source,
|
||||
"tags": list(tags_list),
|
||||
"tag": list(tags_list),
|
||||
"count": len(tags_list),
|
||||
}
|
||||
if hash_value:
|
||||
@@ -388,11 +386,9 @@ def _emit_tag_payload(source: str, tags_list: List[str], *, hash_value: Optional
|
||||
if store_label:
|
||||
label = store_label
|
||||
elif ctx.get_stage_context() is not None:
|
||||
label = "tags"
|
||||
label = "tag"
|
||||
if label:
|
||||
ctx.store_value(label, payload)
|
||||
if ctx.get_stage_context() is not None and label.lower() != "tags":
|
||||
ctx.store_value("tags", payload)
|
||||
|
||||
# Emit individual TagItem objects so they can be selected by bare index
|
||||
# When in pipeline, emit individual TagItem objects
|
||||
@@ -1065,7 +1061,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
output = {
|
||||
"title": title,
|
||||
"tags": tags,
|
||||
"tag": tags,
|
||||
"formats": [(label, fmt_id) for label, fmt_id in formats],
|
||||
"playlist_items": playlist_items,
|
||||
}
|
||||
@@ -1080,7 +1076,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename
|
||||
identifier_tags: List[str] = []
|
||||
result_tags = get_field(result, "tags", None)
|
||||
result_tags = get_field(result, "tag", None)
|
||||
if isinstance(result_tags, list):
|
||||
identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))]
|
||||
|
||||
@@ -1160,7 +1156,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
row.add_column("Album", item.get("album", ""))
|
||||
row.add_column("Year", item.get("year", ""))
|
||||
payload = {
|
||||
"tags": tags,
|
||||
"tag": tags,
|
||||
"provider": provider.name,
|
||||
"title": item.get("title"),
|
||||
"artist": item.get("artist"),
|
||||
@@ -1169,7 +1165,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"hash": hash_for_payload,
|
||||
"store": store_for_payload,
|
||||
"extra": {
|
||||
"tags": tags,
|
||||
"tag": tags,
|
||||
"provider": provider.name,
|
||||
},
|
||||
}
|
||||
@@ -1236,13 +1232,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Build a subject payload representing the file whose tags are being shown
|
||||
subject_store = get_field(result, "store", None) or store_name
|
||||
subject_payload: Dict[str, Any] = {
|
||||
"tags": list(current),
|
||||
"tag": list(current),
|
||||
"title": item_title,
|
||||
"name": item_title,
|
||||
"store": subject_store,
|
||||
"service_name": service_name,
|
||||
"extra": {
|
||||
"tags": list(current),
|
||||
"tag": list(current),
|
||||
},
|
||||
}
|
||||
if file_hash:
|
||||
@@ -1288,9 +1284,9 @@ class Get_Tag(Cmdlet):
|
||||
"""Initialize get-tag cmdlet."""
|
||||
super().__init__(
|
||||
name="get-tag",
|
||||
summary="Get tags from Hydrus or local sidecar metadata",
|
||||
summary="Get tag values from Hydrus or local sidecar metadata",
|
||||
usage="get-tag [-hash <sha256>] [--store <key>] [--emit] [-scrape <url|provider>]",
|
||||
alias=["tags"],
|
||||
alias=[],
|
||||
arg=[
|
||||
SharedArgs.HASH,
|
||||
CmdletArg(
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
from typing import List, Dict, Any
|
||||
from ._shared import Cmdlet, CmdletArg
|
||||
from config import load_config, save_config
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".config",
|
||||
summary="Manage configuration settings",
|
||||
usage=".config [key] [value]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="key",
|
||||
description="Configuration key to update (dot-separated)",
|
||||
required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="value",
|
||||
description="New value for the configuration key",
|
||||
required=False
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
def flatten_config(config: Dict[str, Any], parent_key: str = '', sep: str = '.') -> List[Dict[str, Any]]:
|
||||
items = []
|
||||
for k, v in config.items():
|
||||
if k.startswith('_'): # Skip internal keys
|
||||
continue
|
||||
|
||||
new_key = f"{parent_key}{sep}{k}" if parent_key else k
|
||||
if isinstance(v, dict):
|
||||
items.extend(flatten_config(v, new_key, sep=sep))
|
||||
else:
|
||||
items.append({
|
||||
"Key": new_key,
|
||||
"Value": str(v),
|
||||
"Type": type(v).__name__,
|
||||
"_selection_args": [new_key]
|
||||
})
|
||||
return items
|
||||
|
||||
def set_nested_config(config: Dict[str, Any], key: str, value: str) -> bool:
|
||||
keys = key.split('.')
|
||||
d = config
|
||||
|
||||
# Navigate to the parent dict
|
||||
for k in keys[:-1]:
|
||||
if k not in d or not isinstance(d[k], dict):
|
||||
d[k] = {}
|
||||
d = d[k]
|
||||
|
||||
last_key = keys[-1]
|
||||
|
||||
# Try to preserve type if key exists
|
||||
if last_key in d:
|
||||
current_val = d[last_key]
|
||||
if isinstance(current_val, bool):
|
||||
if value.lower() in ('true', 'yes', '1', 'on'):
|
||||
d[last_key] = True
|
||||
elif value.lower() in ('false', 'no', '0', 'off'):
|
||||
d[last_key] = False
|
||||
else:
|
||||
# Fallback to boolean conversion of string (usually True for non-empty)
|
||||
# But for config, explicit is better.
|
||||
print(f"Warning: Could not convert '{value}' to boolean. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, int):
|
||||
try:
|
||||
d[last_key] = int(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to int. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, float):
|
||||
try:
|
||||
d[last_key] = float(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to float. Using string.")
|
||||
d[last_key] = value
|
||||
else:
|
||||
d[last_key] = value
|
||||
else:
|
||||
# New key, try to infer type
|
||||
if value.lower() in ('true', 'false'):
|
||||
d[last_key] = (value.lower() == 'true')
|
||||
elif value.isdigit():
|
||||
d[last_key] = int(value)
|
||||
else:
|
||||
d[last_key] = value
|
||||
|
||||
return True
|
||||
|
||||
def _run(piped_result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
||||
# Reload config to ensure we have the latest on disk
|
||||
# We don't use the passed 'config' because we want to edit the file
|
||||
# and 'config' might contain runtime objects (like worker manager)
|
||||
# But load_config() returns a fresh dict from disk (or cache)
|
||||
# We should use load_config()
|
||||
|
||||
current_config = load_config()
|
||||
|
||||
# Parse args
|
||||
# We handle args manually because of the potential for spaces in values
|
||||
# and the @ expansion logic in CLI.py passing args
|
||||
|
||||
if not args:
|
||||
# List mode
|
||||
items = flatten_config(current_config)
|
||||
# Sort by key
|
||||
items.sort(key=lambda x: x['Key'])
|
||||
|
||||
# Emit items for ResultTable
|
||||
import pipeline as ctx
|
||||
for item in items:
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
# Update mode
|
||||
key = args[0]
|
||||
|
||||
if len(args) < 2:
|
||||
print(f"Error: Value required for key '{key}'")
|
||||
return 1
|
||||
|
||||
value = " ".join(args[1:])
|
||||
|
||||
# Remove quotes if present
|
||||
if (value.startswith('"') and value.endswith('"')) or (value.startswith("'") and value.endswith("'")):
|
||||
value = value[1:-1]
|
||||
|
||||
try:
|
||||
set_nested_config(current_config, key, value)
|
||||
save_config(current_config)
|
||||
print(f"Updated '{key}' to '{value}'")
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error updating config: {e}")
|
||||
return 1
|
||||
|
||||
CMDLET.exec = _run
|
||||
@@ -12,7 +12,7 @@ from models import DownloadOptions
|
||||
from config import resolve_output_dir
|
||||
import subprocess as _subprocess
|
||||
import shutil as _shutil
|
||||
from ._shared import parse_cmdlet_args
|
||||
from ._shared import create_pipe_object_result, parse_cmdlet_args
|
||||
|
||||
try:
|
||||
from PyPDF2 import PdfWriter, PdfReader
|
||||
@@ -136,35 +136,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if target_path and target_path.exists():
|
||||
source_files.append(target_path)
|
||||
|
||||
# Track the .tags file for this source
|
||||
tags_file = target_path.with_suffix(target_path.suffix + '.tags')
|
||||
# Track the .tag file for this source
|
||||
tags_file = target_path.with_suffix(target_path.suffix + '.tag')
|
||||
if tags_file.exists():
|
||||
source_tags_files.append(tags_file)
|
||||
|
||||
# Try to read hash, tags, url, and relationships from .tags sidecar file
|
||||
try:
|
||||
tags_content = tags_file.read_text(encoding='utf-8')
|
||||
for line in tags_content.split('\n'):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith('hash:'):
|
||||
hash_value = line[5:].strip()
|
||||
if hash_value:
|
||||
source_hashes.append(hash_value)
|
||||
elif line.startswith('url:') or line.startswith('url:'):
|
||||
# Extract url from tags file
|
||||
url_value = line.split(':', 1)[1].strip() if ':' in line else ''
|
||||
if url_value and url_value not in source_url:
|
||||
source_url.append(url_value)
|
||||
elif line.startswith('relationship:'):
|
||||
# Extract relationships from tags file
|
||||
rel_value = line.split(':', 1)[1].strip() if ':' in line else ''
|
||||
if rel_value and rel_value not in source_relationships:
|
||||
source_relationships.append(rel_value)
|
||||
else:
|
||||
# Collect actual tags (not metadata like hash: or url:)
|
||||
source_tags.append(line)
|
||||
source_tags.extend(read_tags_from_file(tags_file) if HAS_METADATA_API else [])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -254,8 +231,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
log(f"Merged {len(source_files)} files into: {output_path}", file=sys.stderr)
|
||||
|
||||
# Create .tags sidecar file for the merged output using unified API
|
||||
tags_path = output_path.with_suffix(output_path.suffix + '.tags')
|
||||
# Create .tag sidecar file for the merged output using unified API
|
||||
tags_path = output_path.with_suffix(output_path.suffix + '.tag')
|
||||
try:
|
||||
# Start with title tag
|
||||
merged_tags = [f"title:{output_path.stem}"]
|
||||
@@ -312,29 +289,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as e:
|
||||
log(f"Warning: Could not create sidecar: {e}", file=sys.stderr)
|
||||
|
||||
# Emit PipelineItem so the merged file can be piped to next command
|
||||
# Emit a PipeObject-compatible dict so the merged file can be piped to next command
|
||||
try:
|
||||
# Try to import PipelineItem from downlow module
|
||||
try:
|
||||
from downlow import PipelineItem
|
||||
except ImportError:
|
||||
# Fallback: create a simple object with the required attributes
|
||||
class SimpleItem:
|
||||
def __init__(self, target, title, media_kind, tags=None, url=None):
|
||||
self.target = target
|
||||
self.title = title
|
||||
self.media_kind = media_kind
|
||||
self.tags = tags or []
|
||||
self.url = url or []
|
||||
self.store = "local"
|
||||
PipelineItem = SimpleItem
|
||||
|
||||
merged_item = PipelineItem(
|
||||
target=str(output_path),
|
||||
from SYS.utils import sha256_file
|
||||
merged_hash = sha256_file(output_path)
|
||||
merged_item = create_pipe_object_result(
|
||||
source="local",
|
||||
identifier=output_path.name,
|
||||
file_path=str(output_path),
|
||||
cmdlet_name="merge-file",
|
||||
title=output_path.stem,
|
||||
hash_value=merged_hash,
|
||||
tag=merged_tags,
|
||||
url=source_url,
|
||||
media_kind=file_kind,
|
||||
tags=merged_tags, # Include merged tags
|
||||
url=source_url # Include known url
|
||||
)
|
||||
# Clear previous results to ensure only the merged file is passed down
|
||||
ctx.clear_last_result()
|
||||
@@ -348,7 +316,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Always delete source files if they were downloaded playlist items (temp files)
|
||||
# We can detect this if they are in the temp download directory or if we tracked them
|
||||
if delete_after or True: # Force delete for now as merge consumes them
|
||||
# First delete all .tags files
|
||||
# First delete all .tag files
|
||||
for tags_file in source_tags_files:
|
||||
try:
|
||||
tags_file.unlink()
|
||||
@@ -490,8 +458,8 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
title = file_path.stem # Default to filename without extension
|
||||
if HAS_METADATA_API:
|
||||
try:
|
||||
# Try to read tags from .tags sidecar file
|
||||
tags_file = file_path.with_suffix(file_path.suffix + '.tags')
|
||||
# Try to read tags from .tag sidecar file
|
||||
tags_file = file_path.with_suffix(file_path.suffix + '.tag')
|
||||
if tags_file.exists():
|
||||
tags = read_tags_from_file(tags_file)
|
||||
if tags:
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
from ._shared import Cmdlet
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Output the current pipeline result as JSON."""
|
||||
print(json.dumps(result, indent=2, default=str))
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="output-json",
|
||||
summary="Output the current pipeline result as JSON.",
|
||||
usage="... | output-json",
|
||||
)
|
||||
@@ -121,7 +121,7 @@ class ScreenshotOptions:
|
||||
wait_after_load: float = 2.0
|
||||
wait_for_article: bool = False
|
||||
replace_video_posters: bool = True
|
||||
tags: Sequence[str] = ()
|
||||
tag: Sequence[str] = ()
|
||||
archive: bool = False
|
||||
archive_timeout: float = ARCHIVE_TIMEOUT
|
||||
url: Sequence[str] = ()
|
||||
@@ -136,7 +136,7 @@ class ScreenshotResult:
|
||||
"""Details about the captured screenshot."""
|
||||
|
||||
path: Path
|
||||
tags_applied: List[str]
|
||||
tag_applied: List[str]
|
||||
archive_url: List[str]
|
||||
url: List[str]
|
||||
warnings: List[str] = field(default_factory=list)
|
||||
@@ -481,11 +481,11 @@ def _capture_screenshot(options: ScreenshotOptions) -> ScreenshotResult:
|
||||
if archives:
|
||||
url = unique_preserve_order([*url, *archives])
|
||||
|
||||
applied_tags = unique_preserve_order(list(tag for tag in options.tags if tag.strip()))
|
||||
applied_tag = unique_preserve_order(list(tag for tag in options.tag if tag.strip()))
|
||||
|
||||
return ScreenshotResult(
|
||||
path=destination,
|
||||
tags_applied=applied_tags,
|
||||
tag_applied=applied_tag,
|
||||
archive_url=archive_url,
|
||||
url=url,
|
||||
warnings=warnings,
|
||||
|
||||
@@ -27,9 +27,9 @@ except Exception: # pragma: no cover
|
||||
resolve_output_dir = None # type: ignore
|
||||
|
||||
try:
|
||||
from API.HydrusNetwork import HydrusClient, HydrusRequestError
|
||||
from API.HydrusNetwork import HydrusNetwork, HydrusRequestError
|
||||
except ImportError: # pragma: no cover
|
||||
HydrusClient = None # type: ignore
|
||||
HydrusNetwork = None # type: ignore
|
||||
HydrusRequestError = RuntimeError # type: ignore
|
||||
|
||||
try:
|
||||
@@ -47,7 +47,7 @@ class SearchRecord:
|
||||
path: str
|
||||
size_bytes: int | None = None
|
||||
duration_seconds: str | None = None
|
||||
tags: str | None = None
|
||||
tag: str | None = None
|
||||
hash: str | None = None
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
@@ -56,8 +56,8 @@ class SearchRecord:
|
||||
payload["size"] = str(self.size_bytes)
|
||||
if self.duration_seconds:
|
||||
payload["duration"] = self.duration_seconds
|
||||
if self.tags:
|
||||
payload["tags"] = self.tags
|
||||
if self.tag:
|
||||
payload["tag"] = self.tag
|
||||
if self.hash:
|
||||
payload["hash"] = self.hash
|
||||
return payload
|
||||
@@ -233,16 +233,17 @@ class Search_Store(Cmdlet):
|
||||
|
||||
from Store import Store
|
||||
storage = Store(config=config or {})
|
||||
from Store._base import Store as BaseStore
|
||||
|
||||
backend_to_search = storage_backend or None
|
||||
if backend_to_search:
|
||||
searched_backends.append(backend_to_search)
|
||||
target_backend = storage[backend_to_search]
|
||||
if not callable(getattr(target_backend, 'search_file', None)):
|
||||
if type(target_backend).search is BaseStore.search:
|
||||
log(f"Backend '{backend_to_search}' does not support searching", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
return 1
|
||||
results = target_backend.search_store(query, limit=limit)
|
||||
results = target_backend.search(query, limit=limit)
|
||||
else:
|
||||
from API.HydrusNetwork import is_hydrus_available
|
||||
hydrus_available = is_hydrus_available(config or {})
|
||||
@@ -256,7 +257,7 @@ class Search_Store(Cmdlet):
|
||||
continue
|
||||
searched_backends.append(backend_name)
|
||||
|
||||
backend_results = backend.search_store(query, limit=limit - len(all_results))
|
||||
backend_results = backend.search(query, limit=limit - len(all_results))
|
||||
if backend_results:
|
||||
all_results.extend(backend_results)
|
||||
if len(all_results) >= limit:
|
||||
|
||||
@@ -17,7 +17,7 @@ from ._shared import (
|
||||
CmdletArg,
|
||||
parse_cmdlet_args,
|
||||
normalize_result_input,
|
||||
extract_tags_from_result,
|
||||
extract_tag_from_result,
|
||||
extract_title_from_result
|
||||
)
|
||||
import pipeline as ctx
|
||||
@@ -33,7 +33,7 @@ CMDLET = Cmdlet(
|
||||
],
|
||||
detail=[
|
||||
"Creates a new file with 'clip_' prefix in the filename/title.",
|
||||
"Inherits tags from the source file.",
|
||||
"Inherits tag values from the source file.",
|
||||
"Adds a relationship to the source file (if hash is available).",
|
||||
"Output can be piped to add-file.",
|
||||
]
|
||||
@@ -185,8 +185,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2. Get tags
|
||||
tags = extract_tags_from_result(item)
|
||||
# 2. Get tag values
|
||||
tags = extract_tag_from_result(item)
|
||||
|
||||
# 3. Get title and modify it
|
||||
title = extract_title_from_result(item)
|
||||
@@ -266,7 +266,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
result_dict = {
|
||||
"path": str(output_path),
|
||||
"title": new_title,
|
||||
"tags": new_tags,
|
||||
"tag": new_tags,
|
||||
"media_kind": "video", # Assumption, or derive
|
||||
"hash": clip_hash, # Pass calculated hash
|
||||
"relationships": {
|
||||
|
||||
Reference in New Issue
Block a user