df
This commit is contained in:
@@ -1118,7 +1118,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
-query "hash:<sha256>": Override hash to use instead of result's hash
|
||||
--store <key>: Store result to this key for pipeline
|
||||
--emit: Emit result without interactive prompt (quiet mode)
|
||||
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks)
|
||||
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks, imdb)
|
||||
"""
|
||||
args_list = [str(arg) for arg in (args or [])]
|
||||
raw_args = list(args_list)
|
||||
@@ -1367,7 +1367,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
print(json_module.dumps(output, ensure_ascii=False))
|
||||
return 0
|
||||
|
||||
# Provider scraping (e.g., itunes)
|
||||
# Provider scraping (e.g., itunes, imdb)
|
||||
provider = get_metadata_provider(scrape_url, config)
|
||||
if provider is None:
|
||||
log(f"Unknown metadata provider: {scrape_url}", file=sys.stderr)
|
||||
@@ -1447,6 +1447,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
identifiers.get("isbn_13") or identifiers.get("isbn_10")
|
||||
or identifiers.get("isbn") or identifiers.get("openlibrary")
|
||||
)
|
||||
elif provider.name == "imdb":
|
||||
identifier_query = identifiers.get("imdb")
|
||||
elif provider.name == "itunes":
|
||||
identifier_query = identifiers.get("musicbrainz") or identifiers.get(
|
||||
"musicbrainzalbum"
|
||||
@@ -1557,6 +1559,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
from SYS.result_table import ResultTable
|
||||
|
||||
table = ResultTable(f"Metadata: {provider.name}")
|
||||
table.set_table(f"metadata.{provider.name}")
|
||||
table.set_source_command("get-tag", [])
|
||||
selection_payload = []
|
||||
hash_for_payload = normalize_hash(hash_override) or normalize_hash(
|
||||
@@ -1601,10 +1604,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
selection_payload.append(payload)
|
||||
table.set_row_selection_args(idx, [str(idx + 1)])
|
||||
|
||||
# Store an overlay so that a subsequent `@N` selects from THIS metadata table,
|
||||
# not from the previous searchable table.
|
||||
ctx.set_last_result_table_overlay(table, selection_payload)
|
||||
ctx.set_current_stage_table(table)
|
||||
# Preserve items for @ selection and downstream pipes without emitting duplicates
|
||||
ctx.set_last_result_items_only(selection_payload)
|
||||
return 0
|
||||
|
||||
# If -scrape was requested but no URL, that's an error
|
||||
@@ -1653,6 +1656,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
path=str(subject_path) if subject_path else None,
|
||||
subject=result,
|
||||
)
|
||||
_emit_tag_payload(
|
||||
str(result_provider),
|
||||
[str(t) for t in result_tags if t is not None],
|
||||
hash_value=file_hash,
|
||||
)
|
||||
return 0
|
||||
|
||||
# Apply tags to the store backend (no sidecar writing here).
|
||||
@@ -1716,6 +1724,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
},
|
||||
},
|
||||
)
|
||||
_emit_tag_payload(
|
||||
str(store_name),
|
||||
list(updated_tags),
|
||||
hash_value=file_hash,
|
||||
extra={"applied_provider": str(result_provider)},
|
||||
)
|
||||
return 0
|
||||
|
||||
hash_from_result = normalize_hash(get_field(result, "hash", None))
|
||||
@@ -1825,7 +1839,14 @@ _SCRAPE_CHOICES = []
|
||||
try:
|
||||
_SCRAPE_CHOICES = sorted(list_metadata_providers().keys())
|
||||
except Exception:
|
||||
_SCRAPE_CHOICES = ["itunes", "openlibrary", "googlebooks", "google", "musicbrainz"]
|
||||
_SCRAPE_CHOICES = [
|
||||
"itunes",
|
||||
"openlibrary",
|
||||
"googlebooks",
|
||||
"google",
|
||||
"musicbrainz",
|
||||
"imdb",
|
||||
]
|
||||
|
||||
# Special scrape mode: pull tags from an item's URL via yt-dlp (no download)
|
||||
if "ytdlp" not in _SCRAPE_CHOICES:
|
||||
|
||||
Reference in New Issue
Block a user