This commit is contained in:
2026-01-20 16:42:49 -08:00
parent 1e2054189b
commit 922b649e17
9 changed files with 351 additions and 141 deletions

View File

@@ -358,10 +358,25 @@ def _emit_tags_as_table(
# Store the table and items in history so @.. works to go back
# Use overlay mode so it doesn't push the previous search to history stack
# This makes get-tag behave like a transient view
table_applied = False
try:
ctx.set_last_result_table_overlay(table, tag_items, subject)
table_applied = True
except AttributeError:
ctx.set_last_result_table(table, tag_items, subject)
try:
ctx.set_last_result_table(table, tag_items, subject)
table_applied = True
except Exception:
table_applied = False
except Exception:
table_applied = False
if table_applied:
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(table)
except Exception:
pass
# Note: CLI will handle displaying the table via ResultTable formatting
@@ -776,7 +791,7 @@ def _scrape_url_metadata(
import json as json_module
try:
from SYS.metadata import extract_ytdlp_tags
from SYS.yt_metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None
@@ -1613,6 +1628,33 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
if isinstance(result, list) and len(result) > 0:
result = result[0]
try:
display_subject = ctx.get_last_result_subject()
except Exception:
display_subject = None
def _value_has_content(value: Any) -> bool:
if value is None:
return False
if isinstance(value, str):
return bool(value.strip())
if isinstance(value, (list, tuple, set)):
return len(value) > 0
return True
def _resolve_subject_value(*keys: str) -> Any:
for key in keys:
val = get_field(result, key, None)
if _value_has_content(val):
return val
if display_subject is None:
return None
for key in keys:
val = get_field(display_subject, key, None)
if _value_has_content(val):
return val
return None
# If the current result already carries a tag list (e.g. a selected metadata
# row from get-tag -scrape itunes), APPLY those tags to the file in the store.
result_provider = get_field(result, "provider", None)
@@ -1726,7 +1768,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
)
return 0
hash_from_result = normalize_hash(get_field(result, "hash", None))
hash_from_result = normalize_hash(_resolve_subject_value("hash"))
file_hash = hash_override or hash_from_result
# Only use emit mode if explicitly requested with --emit flag, not just because we're in a pipeline
# This allows interactive REPL to work even in pipelines
@@ -1734,7 +1776,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
store_label = store_key.strip() if store_key and store_key.strip() else None
# Get hash and store from result
store_name = get_field(result, "store")
store_value = _resolve_subject_value("store")
store_name = str(store_value).strip() if store_value is not None else None
if not file_hash:
log("No hash available in result", file=sys.stderr)
@@ -1744,6 +1787,68 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log("No store specified in result", file=sys.stderr)
return 1
item_title = (
_resolve_subject_value("title", "name", "filename")
)
subject_store = store_name
subject_path_value = (
_resolve_subject_value("path", "target", "filename")
)
subject_path = None
if subject_path_value is not None:
try:
subject_path = str(subject_path_value)
except Exception:
subject_path = None
service_name = ""
subject_payload_base: Dict[str, Any] = {
"tag": [],
"title": item_title,
"name": item_title,
"store": subject_store,
"service_name": service_name,
"extra": {
"tag": [],
},
}
if file_hash:
subject_payload_base["hash"] = file_hash
if subject_path:
subject_payload_base["path"] = subject_path
def _subject_payload_with(
tags: Sequence[str],
service_name_override: Optional[str] = None,
) -> Dict[str, Any]:
payload = dict(subject_payload_base)
payload["tag"] = list(tags)
extra = {"tag": list(tags)}
payload["extra"] = extra
if service_name_override is not None:
payload["service_name"] = service_name_override
return payload
raw_result_tags = get_field(result, "tag", None)
if not isinstance(raw_result_tags, list):
raw_result_tags = get_field(result, "tags", None)
display_tags: List[str] = []
if isinstance(raw_result_tags, list):
display_tags = [str(t) for t in raw_result_tags if t is not None]
if display_tags and not emit_mode:
subject_payload = _subject_payload_with(display_tags)
_emit_tags_as_table(
display_tags,
file_hash=file_hash,
store=str(subject_store),
service_name=None,
config=config,
item_title=item_title,
path=subject_path,
subject=subject_payload,
)
return 0
# Get tags using storage backend
try:
from Store import Store
@@ -1761,56 +1866,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log(f"Failed to get tags: {exc}", file=sys.stderr)
return 1
# Always output to ResultTable (pipeline mode only)
# Extract title for table header
item_title = (
get_field(result,
"title",
None) or get_field(result,
"name",
None) or get_field(result,
"filename",
None)
subject_payload = _subject_payload_with(
current,
service_name if source == "hydrus" else None,
)
# Build a subject payload representing the file whose tags are being shown
subject_store = get_field(result, "store", None) or store_name
subject_path = (
get_field(result,
"path",
None) or get_field(result,
"target",
None) or get_field(result,
"filename",
None)
)
subject_payload: Dict[str,
Any] = {
"tag": list(current),
"title": item_title,
"name": item_title,
"store": subject_store,
"service_name": service_name,
"extra": {
"tag": list(current),
},
}
if file_hash:
subject_payload["hash"] = file_hash
if subject_path:
try:
subject_payload["path"] = str(subject_path)
except Exception:
pass
_emit_tags_as_table(
current,
file_hash=file_hash,
store=subject_store,
store=str(subject_store),
service_name=service_name if source == "hydrus" else None,
config=config,
item_title=item_title,
path=str(subject_path) if subject_path else None,
path=subject_path,
subject=subject_payload,
)