This commit is contained in:
nose
2025-12-21 05:10:09 -08:00
parent 8ca5783970
commit 11a13edb84
15 changed files with 1712 additions and 213 deletions

View File

@@ -254,6 +254,22 @@ def list_formats(
return None
formats = info.get("formats") or []
# Some URLs (notably playlist contexts) yield a playlist-shaped payload with
# `entries` rather than a direct video payload. If so, try to pull formats
# from the first concrete entry.
if (not formats) and isinstance(info.get("entries"), list):
try:
for entry in info.get("entries") or []:
if not isinstance(entry, dict):
continue
entry_formats = entry.get("formats")
if isinstance(entry_formats, list) and entry_formats:
formats = entry_formats
break
except Exception:
pass
if not isinstance(formats, list) or not formats:
log("No formats available", file=sys.stderr)
return None
@@ -704,7 +720,30 @@ def download_media(
session_id = None
first_section_info = {}
if ytdl_options.get("download_sections"):
session_id, first_section_info = _download_with_sections_via_cli(opts.url, ytdl_options, ytdl_options.get("download_sections", []), quiet=opts.quiet)
# The CLI path emits yt-dlp's own progress output; pause the pipeline Live UI
# so those progress bars remain visible instead of being clobbered.
try:
from contextlib import nullcontext
except Exception:
nullcontext = None # type: ignore
suspend = getattr(pipeline_context, "suspend_live_progress", None)
cm = suspend() if callable(suspend) else (nullcontext() if nullcontext else None)
if cm is None:
session_id, first_section_info = _download_with_sections_via_cli(
opts.url,
ytdl_options,
ytdl_options.get("download_sections", []),
quiet=opts.quiet,
)
else:
with cm:
session_id, first_section_info = _download_with_sections_via_cli(
opts.url,
ytdl_options,
ytdl_options.get("download_sections", []),
quiet=opts.quiet,
)
info = None
else:
with yt_dlp.YoutubeDL(ytdl_options) as ydl: # type: ignore[arg-type]
@@ -1384,21 +1423,50 @@ class Download_Media(Cmdlet):
item["title"] = item.get("name") or item.get("target") or item.get("path") or "Result"
# Keep the full payload for history/inspection, but display a focused table.
display_row = {
"title": item.get("title"),
"store": item.get("store"),
"hash": item.get("hash") or item.get("file_hash") or item.get("sha256"),
}
# Use shared extractors so Ext/Size/Store/Hash remain consistent everywhere.
try:
from result_table import build_display_row
except Exception:
build_display_row = None # type: ignore
if callable(build_display_row):
display_row = build_display_row(item, keys=["title", "store", "hash", "ext", "size"])
else:
display_row = {
"title": item.get("title"),
"store": item.get("store"),
"hash": item.get("hash") or item.get("file_hash") or item.get("sha256"),
"ext": str(item.get("ext") or ""),
"size": item.get("size") or item.get("size_bytes"),
}
table.add_result(display_row)
results_list.append(item)
pipeline_context.set_current_stage_table(table)
pipeline_context.set_last_result_table(table, results_list)
get_stderr_console().print(table)
setattr(table, "_rendered_by_cmdlet", True)
if not Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()):
return False
try:
from contextlib import nullcontext
except Exception:
nullcontext = None # type: ignore
suspend = getattr(pipeline_context, "suspend_live_progress", None)
cm = suspend() if callable(suspend) else (nullcontext() if nullcontext else None)
if cm is None:
get_stderr_console().print(table)
setattr(table, "_rendered_by_cmdlet", True)
if not Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()):
return False
else:
with cm:
get_stderr_console().print(table)
setattr(table, "_rendered_by_cmdlet", True)
if not Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()):
try:
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
except Exception:
pass
return False
return True
def _preflight_url_duplicates_bulk(urls: Sequence[str]) -> bool:
@@ -1597,15 +1665,45 @@ class Download_Media(Cmdlet):
hit = backend_hits[0]
title = hit.get("title") or hit.get("name") or hit.get("target") or hit.get("path") or "(exists)"
file_hash = hit.get("hash") or hit.get("file_hash") or hit.get("sha256") or ""
try:
from result_table import build_display_row
except Exception:
build_display_row = None # type: ignore
extracted = {
"title": str(title),
"store": str(hit.get("store") or backend_name),
"hash": str(file_hash or ""),
"ext": "",
"size": None,
}
if callable(build_display_row):
try:
extracted = build_display_row(hit, keys=["title", "store", "hash", "ext", "size"])
except Exception:
pass
# Ensure we still prefer the precomputed values for title/store/hash.
extracted["title"] = str(title)
extracted["store"] = str(hit.get("store") or backend_name)
extracted["hash"] = str(file_hash or "")
ext = extracted.get("ext")
size_val = extracted.get("size")
display_row = {
"title": str(title),
"store": str(hit.get("store") or backend_name),
"hash": str(file_hash or ""),
"ext": str(ext or ""),
"size": size_val,
"url": original_url,
"columns": [
("Title", str(title)),
("Store", str(hit.get("store") or backend_name)),
("Hash", str(file_hash or "")),
("Ext", str(ext or "")),
("Size", size_val),
("URL", original_url),
],
}
@@ -1615,7 +1713,8 @@ class Download_Media(Cmdlet):
debug("Bulk URL preflight: no matches")
return True
table = ResultTable(f"URL already exists ({len(matched_urls)} url(s))")
# This table is non-interactive and intentionally wide (we want URL + ext/size).
table = ResultTable(f"URL already exists ({len(matched_urls)} url(s))", max_columns=10)
table.set_no_choice(True)
try:
table.set_preserve_order(True)
@@ -1777,7 +1876,10 @@ class Download_Media(Cmdlet):
table = ResultTable()
safe_url = str(url or "").strip()
table.title = f'download-media -url "{safe_url}"' if safe_url else "download-media"
table.set_source_command("download-media", [url])
# Selection tables should expand '@N' into a runnable command.
# For playlist-item rows we prefer the concrete per-item URL so the
# expanded command targets a single video (not the whole playlist).
table.set_source_command("download-media", [])
try:
table.set_preserve_order(True)
except Exception:
@@ -1803,6 +1905,9 @@ class Download_Media(Cmdlet):
"detail": str(uploader or ""),
"media_kind": "playlist-item",
"playlist_index": idx,
# Enable '@N' expansion into a concrete command.
# Prefer selecting the resolved per-item URL when available.
"_selection_args": (["-url", str(entry_url)] if entry_url else ["-url", str(url), "-item", str(idx)]),
# Critical for normal @ selection piping: downstream cmdlets
# (including download-media itself) look for url/target.
"url": entry_url,