This commit is contained in:
2026-01-21 22:52:52 -08:00
parent d94e321148
commit 201663bb62
9 changed files with 377 additions and 124 deletions

View File

@@ -5,7 +5,7 @@ from pathlib import Path
import sys
import re
from SYS.logger import log
from SYS.logger import log, debug
from SYS import models
from SYS import pipeline as ctx
@@ -420,11 +420,9 @@ def _refresh_tag_view(
except Exception:
return
if not target_hash or not store_name:
if not target_hash:
return
refresh_args: List[str] = ["-query", f"hash:{target_hash}", "-store", store_name]
get_tag = None
try:
get_tag = get_cmdlet("get-tag")
@@ -435,14 +433,54 @@ def _refresh_tag_view(
try:
subject = ctx.get_last_result_subject()
if subject and _matches_target(subject, target_hash, target_path, store_name):
get_tag(subject, refresh_args, config)
if not subject or not _matches_target(subject, target_hash, target_path, store_name):
return
except Exception:
pass
try:
get_tag(res, refresh_args, config)
refresh_args: List[str] = ["-query", f"hash:{target_hash}"]
# Build a lean subject so get-tag fetches fresh tags instead of reusing cached payloads.
def _value_has_content(value: Any) -> bool:
if value is None:
return False
if isinstance(value, str):
return bool(value.strip())
if isinstance(value, (list, tuple, set)):
return len(value) > 0
return True
def _build_refresh_subject() -> Dict[str, Any]:
payload: Dict[str, Any] = {}
payload["hash"] = target_hash
if _value_has_content(store_name):
payload["store"] = store_name
path_value = target_path or get_field(subject, "path")
if not _value_has_content(path_value):
path_value = get_field(subject, "target")
if _value_has_content(path_value):
payload["path"] = path_value
for key in ("title", "name", "url", "relations", "service_name"):
val = get_field(subject, key)
if _value_has_content(val):
payload[key] = val
extra_value = get_field(subject, "extra")
if isinstance(extra_value, dict):
cleaned = {
k: v for k, v in extra_value.items()
if str(k).lower() not in {"tag", "tags"}
}
if cleaned:
payload["extra"] = cleaned
elif _value_has_content(extra_value):
payload["extra"] = extra_value
return payload
refresh_subject = _build_refresh_subject()
with ctx.suspend_live_progress():
get_tag(refresh_subject, refresh_args, config)
except Exception:
pass
@@ -643,7 +681,7 @@ class Add_Tag(Cmdlet):
total_added = 0
total_modified = 0
store_registry = Store(config)
store_registry = Store(config, suppress_debug=True)
extract_matched_items = 0
extract_no_match_items = 0
@@ -1004,7 +1042,7 @@ class Add_Tag(Cmdlet):
raw_path
)
if changed and not is_last_stage and not use_inline_tags:
if changed and not use_inline_tags:
_refresh_tag_view(res, resolved_hash, str(store_name), raw_path, config)
if is_last_stage:

View File

@@ -78,9 +78,52 @@ def _refresh_tag_view_if_current(
refresh_args: list[str] = []
if file_hash:
refresh_args.extend(["-query", f"hash:{file_hash}"])
if store_name:
refresh_args.extend(["-store", store_name])
get_tag(subject, refresh_args, config)
# Build a lean subject so get-tag fetches fresh tags instead of reusing cached payloads.
def _value_has_content(value: Any) -> bool:
if value is None:
return False
if isinstance(value, str):
return bool(value.strip())
if isinstance(value, (list, tuple, set)):
return len(value) > 0
return True
def _build_refresh_subject() -> Dict[str, Any]:
payload: Dict[str, Any] = {}
payload["hash"] = file_hash
store_value = store_name or get_field(subject, "store")
if _value_has_content(store_value):
payload["store"] = store_value
path_value = path or get_field(subject, "path")
if not _value_has_content(path_value):
path_value = get_field(subject, "target")
if _value_has_content(path_value):
payload["path"] = path_value
for key in ("title", "name", "url", "relations", "service_name"):
val = get_field(subject, key)
if _value_has_content(val):
payload[key] = val
extra_value = get_field(subject, "extra")
if isinstance(extra_value, dict):
cleaned = {
k: v for k, v in extra_value.items()
if str(k).lower() not in {"tag", "tags"}
}
if cleaned:
payload["extra"] = cleaned
elif _value_has_content(extra_value):
payload["extra"] = extra_value
return payload
refresh_subject = _build_refresh_subject()
# Do not pass -store here as it triggers emit_mode/quiet in get-tag
with ctx.suspend_live_progress():
get_tag(refresh_subject, refresh_args, config)
except Exception:
pass
@@ -333,7 +376,7 @@ def _process_deletion(
def _fetch_existing_tags() -> list[str]:
try:
backend = Store(config)[store_name]
backend = Store(config, suppress_debug=True)[store_name]
existing, _src = backend.get_tag(resolved_hash, config=config)
return list(existing or [])
except Exception:
@@ -360,7 +403,7 @@ def _process_deletion(
return False
try:
backend = Store(config)[store_name]
backend = Store(config, suppress_debug=True)[store_name]
ok = backend.delete_tag(resolved_hash, list(tags), config=config)
if ok:
preview = resolved_hash[:12] + ("" if len(resolved_hash) > 12 else "")

View File

@@ -314,6 +314,7 @@ def _emit_tags_as_table(
item_title: Optional[str] = None,
path: Optional[str] = None,
subject: Optional[Any] = None,
quiet: bool = False,
) -> None:
"""Emit tags as TagItem objects and display via ResultTable.
@@ -335,8 +336,9 @@ def _emit_tags_as_table(
if path:
metadata["Path"] = path
# Create ItemDetailView
table = ItemDetailView("Tags", item_metadata=metadata, max_columns=1)
# Create ItemDetailView with exclude_tags=True so the panel shows file info
# but doesn't duplicate the tag list that we show as a table below.
table = ItemDetailView("Tags", item_metadata=metadata, max_columns=1, exclude_tags=True)
table.set_source_command("get-tag", [])
# Create TagItem for each tag
@@ -371,6 +373,15 @@ def _emit_tags_as_table(
except Exception:
table_applied = False
# Display the rich panel (metadata info) if not in quiet/emit-only mode.
# In the TUI, this output is captured and shown in the log pane.
if not quiet:
try:
from SYS.rich_display import stdout_console
stdout_console().print(table)
except Exception:
pass
if table_applied:
try:
if hasattr(ctx, "set_current_stage_table"):
@@ -1129,6 +1140,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
--emit: Emit result without interactive prompt (quiet mode)
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks, imdb)
"""
emit_mode = False
is_store_backed = False
args_list = [str(arg) for arg in (args or [])]
raw_args = list(args_list)
@@ -1179,6 +1192,58 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
return 1
store_key = parsed_args.get("store")
emit_requested = parsed_args.get("emit", False)
# Only use emit mode if explicitly requested with --emit flag, not just because we're in a pipeline
# This allows interactive REPL to work even in pipelines
emit_mode = emit_requested or bool(store_key)
store_label = store_key.strip() if store_key and store_key.strip() else None
# Handle @N selection which creates a list - extract the first item
if isinstance(result, list) and len(result) > 0:
result = result[0]
try:
display_subject = ctx.get_last_result_subject()
except Exception:
display_subject = None
def _value_has_content(value: Any) -> bool:
if value is None:
return False
if isinstance(value, str):
return bool(value.strip())
if isinstance(value, (list, tuple, set)):
return len(value) > 0
return True
def _resolve_subject_value(*keys: str) -> Any:
for key in keys:
val = get_field(result, key, None)
if _value_has_content(val):
return val
if display_subject is None:
return None
for key in keys:
val = get_field(display_subject, key, None)
if _value_has_content(val):
return val
return None
# Resolve core identity early so it's available for all branches
hash_from_result = normalize_hash(_resolve_subject_value("hash"))
file_hash = hash_override or hash_from_result
store_value = _resolve_subject_value("store")
store_name = (store_key or str(store_value).strip()) if store_value is not None else store_key
subject_path = _resolve_subject_value("path", "target", "filename")
item_title = _resolve_subject_value("title", "name", "filename")
# Identify if the subject is store-backed. If so, we prioritize fresh data over cached tags.
# Note: PATH, URL, and LOCAL stores are transient and don't support backend get-tag refreshes.
is_store_backed = bool(file_hash and store_name and
str(store_name).upper() not in {"PATH", "URL", "LOCAL"})
scrape_url = parsed_args.get("scrape")
scrape_requested = scrape_flag_present or scrape_url is not None
@@ -1238,7 +1303,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
from Store import Store
storage = Store(config)
storage = Store(config, suppress_debug=True)
backend = storage[str(store_name)]
except Exception as exc:
log(
@@ -1357,6 +1422,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"scrape_url": scrape_target
},
},
quiet=emit_mode,
)
return 0
@@ -1396,7 +1462,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
from Store import Store
storage = Store(config)
storage = Store(config, suppress_debug=True)
backend = storage[str(store_for_scrape)]
current_tags, _src = backend.get_tag(file_hash_for_scrape, config=config)
if isinstance(current_tags, (list, tuple, set)) and current_tags:
@@ -1562,6 +1628,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"provider": "ytdlp",
"url": str(query_hint)
},
quiet=emit_mode,
)
return 0
@@ -1624,57 +1691,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log("-scrape requires a URL argument", file=sys.stderr)
return 1
# Handle @N selection which creates a list - extract the first item
if isinstance(result, list) and len(result) > 0:
result = result[0]
try:
display_subject = ctx.get_last_result_subject()
except Exception:
display_subject = None
def _value_has_content(value: Any) -> bool:
if value is None:
return False
if isinstance(value, str):
return bool(value.strip())
if isinstance(value, (list, tuple, set)):
return len(value) > 0
return True
def _resolve_subject_value(*keys: str) -> Any:
for key in keys:
val = get_field(result, key, None)
if _value_has_content(val):
return val
if display_subject is None:
return None
for key in keys:
val = get_field(display_subject, key, None)
if _value_has_content(val):
return val
return None
# If the current result already carries a tag list (e.g. a selected metadata
# row from get-tag -scrape itunes), APPLY those tags to the file in the store.
result_provider = get_field(result, "provider", None)
result_tags = get_field(result, "tag", None)
if result_provider and isinstance(result_tags, list) and result_tags:
file_hash = normalize_hash(hash_override) or normalize_hash(
get_field(result,
"hash",
None)
)
store_name = get_field(result, "store", None)
subject_path = (
get_field(result,
"path",
None) or get_field(result,
"target",
None) or get_field(result,
"filename",
None)
)
if not file_hash or not store_name:
log(
"Selected metadata row is missing hash/store; cannot apply tags",
@@ -1691,6 +1713,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
None) or result_provider),
path=str(subject_path) if subject_path else None,
subject=result,
quiet=emit_mode,
)
_emit_tag_payload(
str(result_provider),
@@ -1715,7 +1738,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
from Store import Store
storage = Store(config)
storage = Store(config, suppress_debug=True)
backend = storage[str(store_name)]
ok = bool(backend.add_tag(file_hash, apply_tags, config=config))
if not ok:
@@ -1759,6 +1782,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"applied_provider": str(result_provider)
},
},
quiet=emit_mode,
)
_emit_tag_payload(
str(store_name),
@@ -1768,17 +1792,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
)
return 0
hash_from_result = normalize_hash(_resolve_subject_value("hash"))
file_hash = hash_override or hash_from_result
# Only use emit mode if explicitly requested with --emit flag, not just because we're in a pipeline
# This allows interactive REPL to work even in pipelines
emit_mode = emit_requested or bool(store_key)
store_label = store_key.strip() if store_key and store_key.strip() else None
# Get hash and store from result
store_value = _resolve_subject_value("store")
store_name = str(store_value).strip() if store_value is not None else None
if not file_hash:
log("No hash available in result", file=sys.stderr)
return 1
@@ -1787,9 +1800,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log("No store specified in result", file=sys.stderr)
return 1
item_title = (
_resolve_subject_value("title", "name", "filename")
)
subject_store = store_name
subject_path_value = (
_resolve_subject_value("path", "target", "filename")
@@ -1833,7 +1843,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
display_tags: List[str] = []
if isinstance(raw_result_tags, list):
display_tags = [str(t) for t in raw_result_tags if t is not None]
if display_tags and not emit_mode:
# Only use cached tags if the item is NOT store-backed.
# For store-backed items (Hydrus/Folders), we want the latest state.
if display_tags and not emit_mode and not is_store_backed:
subject_payload = _subject_payload_with(display_tags)
_emit_tags_as_table(
display_tags,
@@ -1844,6 +1857,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
item_title=item_title,
path=subject_path,
subject=subject_payload,
quiet=emit_mode,
)
return 0
@@ -1851,7 +1865,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
from Store import Store
storage = Store(config)
storage = Store(config, suppress_debug=True)
backend = storage[store_name]
current, source = backend.get_tag(file_hash, config=config)
current = list(current or [])
@@ -1877,6 +1891,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
item_title=item_title,
path=subject_path,
subject=subject_payload,
quiet=emit_mode,
)
# If emit requested or store key provided, emit payload