This commit is contained in:
2026-02-14 15:54:31 -08:00
parent ce2f28cc50
commit ae4880b164
7 changed files with 215 additions and 22 deletions

View File

@@ -2082,8 +2082,9 @@ class Download_File(Cmdlet):
except Exception:
height_selector = None
if query_wants_audio:
# Explicit audio request should map to the configured audio selector (usually '251/140/bestaudio')
ytdl_format = ytdlp_tool.default_format("audio")
# Explicit `format:audio` must always force bestaudio fallback chain
# and avoid format-list/selector ambiguity.
ytdl_format = "bestaudio/best"
elif height_selector:
ytdl_format = height_selector
elif query_format:

View File

@@ -144,6 +144,105 @@ class search_file(Cmdlet):
raw = str(value or "").strip().lower()
return "".join(ch for ch in raw if ch.isalnum())
@staticmethod
def _extract_namespace_tags(payload: Dict[str, Any]) -> List[str]:
"""Return deduplicated namespace tags from payload, excluding title:* tags."""
candidates: List[str] = []
def _add_candidate(value: Any) -> None:
if isinstance(value, str):
text = value.strip()
if text:
parts = re.split(r"[,;\n\r]+", text)
for part in parts:
token = part.strip().strip("[](){}\"'#")
if token:
candidates.append(token)
elif isinstance(value, dict):
for nested in value.values():
_add_candidate(nested)
elif isinstance(value, (list, tuple, set)):
for item in value:
_add_candidate(item)
_add_candidate(payload.get("tag"))
_add_candidate(payload.get("tags"))
_add_candidate(payload.get("tag_summary"))
metadata = payload.get("metadata")
if isinstance(metadata, dict):
_add_candidate(metadata.get("tag"))
_add_candidate(metadata.get("tags"))
meta_tags = metadata.get("tags")
if isinstance(meta_tags, dict):
for service_data in meta_tags.values():
if not isinstance(service_data, dict):
continue
display_tags = service_data.get("display_tags")
if isinstance(display_tags, dict):
for ns_name, tag_list in display_tags.items():
if isinstance(tag_list, list):
ns_text = str(ns_name or "").strip()
for tag_item in tag_list:
item_text = str(tag_item or "").strip()
if not item_text:
continue
if ":" in item_text:
candidates.append(item_text)
continue
if ns_text:
candidates.append(f"{ns_text}:{item_text}")
else:
candidates.append(item_text)
else:
_add_candidate(tag_list)
namespace_tags: List[str] = []
seen: set[str] = set()
for raw in candidates:
candidate = str(raw or "").strip()
if not candidate or ":" not in candidate:
continue
ns, value = candidate.split(":", 1)
ns_norm = ns.strip().lower()
value_norm = value.strip()
if not value_norm:
continue
if ns_norm == "title":
continue
normalized = f"{ns_norm}:{value_norm}"
key = normalized.lower()
if key in seen:
continue
seen.add(key)
namespace_tags.append(normalized)
return namespace_tags
def _set_storage_display_columns(self, payload: Dict[str, Any]) -> None:
"""Set explicit display columns for store search results."""
title_text = str(payload.get("title") or payload.get("name") or payload.get("filename") or "Result")
namespace_tags = self._extract_namespace_tags(payload)
tag_text = ", ".join(namespace_tags)
store_text = str(payload.get("store") or payload.get("table") or payload.get("source") or "")
size_raw = payload.get("size_bytes")
if size_raw is None:
size_raw = payload.get("size")
ext_text = str(payload.get("ext") or "")
payload["columns"] = [
("Title", title_text),
("Tag", tag_text),
("Store", store_text),
("Size", size_raw),
("Ext", ext_text),
]
def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Ensure storage results have the necessary fields for result_table display."""
@@ -166,8 +265,8 @@ class search_file(Cmdlet):
# Ensure size_bytes is present for display (already set by search_file())
# result_table will handle formatting it
# Don't create manual columns - let result_table handle display
# This allows the table to respect max_columns and apply consistent formatting
# Store search uses explicit columns so TAG can appear right after TITLE.
self._set_storage_display_columns(payload)
return payload
def _run_provider_search(
@@ -706,19 +805,35 @@ class search_file(Cmdlet):
# First try to extract from metadata tags dict
metadata_tags = meta_obj.get("tags")
if isinstance(metadata_tags, dict):
collected_tags: List[str] = []
for service_data in metadata_tags.values():
if isinstance(service_data, dict):
display_tags = service_data.get("display_tags", {})
if isinstance(display_tags, dict):
for tag_list in display_tags.values():
if isinstance(tag_list, list):
tags_list = [
str(t).strip() for t in tag_list
if isinstance(t, str) and str(t).strip()
]
break
if tags_list:
break
for ns_name, tag_list in display_tags.items():
if not isinstance(tag_list, list):
continue
ns_text = str(ns_name or "").strip()
for tag_item in tag_list:
tag_text = str(tag_item or "").strip()
if not tag_text:
continue
if ":" in tag_text:
collected_tags.append(tag_text)
elif ns_text:
collected_tags.append(f"{ns_text}:{tag_text}")
else:
collected_tags.append(tag_text)
if collected_tags:
dedup: List[str] = []
seen_tags: set[str] = set()
for tag_text in collected_tags:
key = tag_text.lower()
if key in seen_tags:
continue
seen_tags.add(key)
dedup.append(tag_text)
tags_list = dedup
# Fallback: if metadata didn't include tags, call get_tag() separately
# (This maintains compatibility with backends that don't include tags in metadata)
@@ -788,6 +903,8 @@ class search_file(Cmdlet):
"url": meta_obj.get("url") or [],
}
self._set_storage_display_columns(payload)
table.add_result(payload)
results_list.append(payload)
ctx.emit(payload)