Files
Medios-Macina/cmdlet/download_file.py

2781 lines
110 KiB
Python
Raw Normal View History

2026-01-01 20:37:27 -08:00
"""Generic file/stream downloader.
2025-12-11 12:47:30 -08:00
2025-12-11 23:21:45 -08:00
Supports:
- Direct HTTP file URLs (PDFs, images, documents; non-yt-dlp)
- Piped provider items (uses provider.download when available)
2026-01-01 20:37:27 -08:00
- Streaming sites via yt-dlp (YouTube, Bandcamp, etc.)
2025-12-11 12:47:30 -08:00
"""
from __future__ import annotations
import sys
2026-01-01 20:37:27 -08:00
import re
2025-12-11 12:47:30 -08:00
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence
2025-12-19 02:29:42 -08:00
from urllib.parse import urlparse
2026-01-01 20:37:27 -08:00
from contextlib import AbstractContextManager, nullcontext
2026-01-03 03:37:48 -08:00
import requests
2026-01-05 07:51:19 -08:00
from API.HTTP import _download_direct_file
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult
2025-12-11 19:04:02 -08:00
from SYS.logger import log, debug
2025-12-22 02:11:53 -08:00
from SYS.pipeline_progress import PipelineProgress
2026-01-01 20:37:27 -08:00
from SYS.result_table import ResultTable
from SYS.rich_display import stderr_console as get_stderr_console
from SYS import pipeline as pipeline_context
2026-01-01 20:37:27 -08:00
from SYS.utils import sha256_file
2026-01-06 16:19:29 -08:00
from SYS.metadata import normalize_urls as normalize_url_list
2026-01-01 20:37:27 -08:00
from rich.prompt import Confirm
from tool.ytdlp import (
YtDlpTool,
_best_subtitle_sidecar,
2026-01-02 02:28:59 -08:00
_SUBTITLE_EXTS,
2026-01-01 20:37:27 -08:00
_download_with_timeout,
_format_chapters_note,
_read_text_file,
is_url_supported_by_ytdlp,
2026-01-10 17:30:18 -08:00
is_browseable_format,
format_for_table_selection,
2026-01-01 20:37:27 -08:00
list_formats,
probe_url,
)
2025-12-11 12:47:30 -08:00
2025-12-16 23:23:43 -08:00
from . import _shared as sh
Cmdlet = sh.Cmdlet
CmdletArg = sh.CmdletArg
SharedArgs = sh.SharedArgs
2026-01-01 20:37:27 -08:00
QueryArg = sh.QueryArg
2025-12-16 23:23:43 -08:00
parse_cmdlet_args = sh.parse_cmdlet_args
register_url_with_local_library = sh.register_url_with_local_library
coerce_to_pipe_object = sh.coerce_to_pipe_object
get_field = sh.get_field
2025-12-11 12:47:30 -08:00
class Download_File(Cmdlet):
"""Class-based download-file cmdlet - direct HTTP downloads."""
def __init__(self) -> None:
"""Initialize download-file cmdlet."""
super().__init__(
name="download-file",
2026-01-01 20:37:27 -08:00
summary="Download files or streaming media",
usage=
"download-file <url> [-path DIR] [options] OR @N | download-file [-path DIR|DIR] [options]",
alias=["dl-file",
"download-http"],
2025-12-11 12:47:30 -08:00
arg=[
2025-12-11 23:21:45 -08:00
SharedArgs.URL,
SharedArgs.PROVIDER,
2025-12-18 22:50:21 -08:00
SharedArgs.PATH,
2026-01-01 20:37:27 -08:00
SharedArgs.QUERY,
2025-12-18 22:50:21 -08:00
# Prefer -path for output directory to match other cmdlets; keep -output for backwards compatibility.
2025-12-29 17:05:03 -08:00
CmdletArg(
name="-output",
type="string",
alias="o",
description="(deprecated) Output directory (use -path instead)",
),
2026-01-01 20:37:27 -08:00
CmdletArg(
name="audio",
type="flag",
alias="a",
description="Download audio only (yt-dlp)",
),
2026-01-07 11:01:13 -08:00
CmdletArg(
name="-magnet-id",
type="string",
description="(internal) AllDebrid magnet id used by provider selection hooks",
),
2026-01-01 20:37:27 -08:00
CmdletArg(
name="format",
type="string",
alias="fmt",
description="Explicit yt-dlp format selector",
),
QueryArg(
"clip",
key="clip",
aliases=["range",
"section",
"sections"],
type="string",
required=False,
description=(
"Clip time ranges via -query keyed fields (e.g. clip:1m-2m or clip:00:01-00:10). "
"Comma-separated values supported."
),
query_only=True,
),
CmdletArg(
name="item",
type="string",
description="Item selection for playlists/formats",
),
2025-12-11 12:47:30 -08:00
],
2025-12-27 21:24:27 -08:00
detail=[
2026-01-01 20:37:27 -08:00
"Download files directly via HTTP or streaming media via yt-dlp.",
"For Internet Archive item pages (archive.org/details/...), shows a selectable file/format list; pick with @N to download.",
2025-12-27 21:24:27 -08:00
],
2025-12-11 12:47:30 -08:00
exec=self.run,
)
self.register()
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Main execution method."""
debug(f"[download-file] run invoked with args: {list(args)}")
2025-12-11 12:47:30 -08:00
return self._run_impl(result, args, config)
2025-12-22 02:11:53 -08:00
def _process_explicit_urls(
self,
*,
raw_urls: Sequence[str],
final_output_dir: Path,
config: Dict[str,
Any],
2025-12-22 02:11:53 -08:00
quiet_mode: bool,
registry: Dict[str,
Any],
2025-12-22 02:11:53 -08:00
progress: PipelineProgress,
2026-01-06 16:19:29 -08:00
context_items: Sequence[Any] = (),
) -> tuple[int,
Optional[int]]:
2025-12-22 02:11:53 -08:00
downloaded_count = 0
SearchResult = registry.get("SearchResult")
get_provider = registry.get("get_provider")
match_provider_name_for_url = registry.get("match_provider_name_for_url")
for url in raw_urls:
2025-12-11 23:21:45 -08:00
try:
2025-12-22 02:11:53 -08:00
debug(f"Processing URL: {url}")
2026-01-11 18:56:26 -08:00
# Check providers first
2025-12-22 02:11:53 -08:00
provider_name = None
2026-01-11 18:56:26 -08:00
if match_provider_name_for_url:
2025-12-22 02:11:53 -08:00
try:
provider_name = match_provider_name_for_url(str(url))
2025-12-25 04:49:22 -08:00
except Exception:
pass
2026-01-11 18:56:26 -08:00
provider = None
if provider_name and get_provider:
provider = get_provider(provider_name, config)
if provider:
debug(f"Provider {provider_name} claimed {url}")
2026-01-07 05:09:59 -08:00
try:
2026-01-11 18:56:26 -08:00
# Try generic handle_url
if hasattr(provider, "handle_url"):
handled, path = provider.handle_url(str(url), output_dir=final_output_dir)
if handled:
if path:
self._emit_local_file(
downloaded_path=Path(str(path)),
source=str(url),
title_hint=Path(str(path)).stem,
tags_hint=None,
media_kind_hint="file",
full_metadata=None,
progress=progress,
config=config,
provider_hint=provider_name
)
downloaded_count += 1
continue
# Try generic download_url
elif hasattr(provider, "download_url"):
downloaded_path = provider.download_url(str(url), final_output_dir)
if downloaded_path:
self._emit_local_file(
downloaded_path=Path(downloaded_path),
source=str(url),
title_hint=Path(str(downloaded_path)).stem,
tags_hint=None,
media_kind_hint="file",
full_metadata=None,
provider_hint=provider_name,
progress=progress,
config=config,
2025-12-16 01:45:01 -08:00
)
2026-01-11 18:56:26 -08:00
downloaded_count += 1
continue
except Exception as e:
log(f"Provider {provider_name} error handling {url}: {e}", file=sys.stderr)
# Fallthrough to direct download?
# If a provider explicitly claimed it but failed, maybe we shouldn't fallback?
# But "barebones" implies robustness might be up to user.
# We'll continue to next URL.
2025-12-14 00:53:52 -08:00
continue
2026-01-11 18:56:26 -08:00
# Direct Download Fallback
2025-12-28 04:13:11 -08:00
result_obj = _download_direct_file(
str(url),
final_output_dir,
quiet=quiet_mode,
pipeline_progress=progress,
)
2025-12-22 02:11:53 -08:00
downloaded_path = self._path_from_download_result(result_obj)
self._emit_local_file(
downloaded_path=downloaded_path,
source=str(url),
title_hint=downloaded_path.stem,
tags_hint=[f"title:{downloaded_path.stem}"],
media_kind_hint="file",
full_metadata=None,
progress=progress,
config=config,
)
downloaded_count += 1
debug("✓ Downloaded and emitted")
except DownloadError as e:
log(f"Download failed for {url}: {e}", file=sys.stderr)
except Exception as e:
log(f"Error processing {url}: {e}", file=sys.stderr)
return downloaded_count, None
def _expand_provider_items(
self,
*,
piped_items: Sequence[Any],
registry: Dict[str,
Any],
config: Dict[str,
Any],
2025-12-22 02:11:53 -08:00
) -> List[Any]:
get_search_provider = registry.get("get_search_provider")
expanded_items: List[Any] = []
2026-01-11 18:56:26 -08:00
2025-12-22 02:11:53 -08:00
for item in piped_items:
try:
table = get_field(item, "table")
2026-01-11 18:56:26 -08:00
provider_key = str(table).split(".")[0] if table else None
provider = get_search_provider(provider_key, config) if provider_key and get_search_provider else None
2025-12-22 02:11:53 -08:00
2026-01-11 18:56:26 -08:00
# Generic hook: If provider has expand_item(item), use it.
if provider and hasattr(provider, "expand_item") and callable(provider.expand_item):
try:
sub_items = provider.expand_item(item)
if sub_items:
expanded_items.extend(sub_items)
continue
except Exception as e:
debug(f"Provider {provider_key} expand_item failed: {e}")
2025-12-22 02:11:53 -08:00
expanded_items.append(item)
except Exception:
expanded_items.append(item)
return expanded_items
def _process_provider_items(self,
2026-01-07 05:09:59 -08:00
*,
piped_items: Sequence[Any],
final_output_dir: Path,
config: Dict[str,
Any],
quiet_mode: bool,
registry: Dict[str,
Any],
progress: PipelineProgress,
) -> tuple[int, int]:
2025-12-22 02:11:53 -08:00
downloaded_count = 0
2026-01-05 07:51:19 -08:00
queued_magnet_submissions = 0
2025-12-22 02:11:53 -08:00
get_search_provider = registry.get("get_search_provider")
SearchResult = registry.get("SearchResult")
2025-12-29 17:05:03 -08:00
expanded_items = self._expand_provider_items(
piped_items=piped_items,
registry=registry,
config=config
2025-12-29 17:05:03 -08:00
)
2025-12-22 02:11:53 -08:00
2026-01-05 07:51:19 -08:00
total_items = len(expanded_items)
processed_items = 0
try:
if total_items:
progress.set_percent(0)
except Exception:
pass
2025-12-22 02:11:53 -08:00
for item in expanded_items:
try:
2026-01-05 07:51:19 -08:00
label = "item"
2025-12-22 02:11:53 -08:00
table = get_field(item, "table")
title = get_field(item, "title")
target = get_field(item, "path") or get_field(item, "url")
media_kind = get_field(item, "media_kind")
tags_val = get_field(item, "tag")
tags_list: Optional[List[str]]
2026-01-11 18:56:26 -08:00
if isinstance(tags_val, (list, set)):
tags_list = sorted([str(t) for t in tags_val if t])
2025-12-22 02:11:53 -08:00
else:
tags_list = None
full_metadata = get_field(item, "full_metadata")
if ((not full_metadata) and isinstance(item,
dict)
and isinstance(item.get("extra"),
dict)):
2025-12-22 02:11:53 -08:00
extra_md = item["extra"].get("full_metadata")
if isinstance(extra_md, dict):
full_metadata = extra_md
2026-01-05 07:51:19 -08:00
try:
label = title or target
label = str(label or "item").strip()
if total_items:
pct = int(round((processed_items / max(1, total_items)) * 100))
progress.set_percent(pct)
progress.set_status(
f"downloading {processed_items + 1}/{total_items}: {label}"
)
except Exception:
pass
transfer_label = label
2026-01-11 14:46:41 -08:00
table_type = str(table or "").lower()
if table_type == "tidal" or table_type.startswith("tidal."):
2026-01-05 07:51:19 -08:00
try:
progress.begin_transfer(label=transfer_label, total=None)
except Exception:
pass
2025-12-22 02:11:53 -08:00
# If this looks like a provider item and providers are available, prefer provider.download()
downloaded_path: Optional[Path] = None
attempted_provider_download = False
2025-12-25 04:49:22 -08:00
provider_sr = None
2026-01-07 05:09:59 -08:00
provider_obj = None
2025-12-22 02:11:53 -08:00
if table and get_search_provider and SearchResult:
2026-01-11 14:46:41 -08:00
# Strip sub-table suffix (e.g. tidal.track -> tidal) to find the provider key
provider_key = str(table).split(".")[0]
provider_obj = get_search_provider(provider_key, config)
2026-01-07 05:09:59 -08:00
if provider_obj is not None:
2025-12-22 02:11:53 -08:00
attempted_provider_download = True
sr = SearchResult(
table=str(table),
title=str(title or "Unknown"),
path=str(target or ""),
2026-01-11 18:56:26 -08:00
tag=set(tags_list) if tags_list else set(),
media_kind=str(media_kind or "file"),
full_metadata=full_metadata
if isinstance(full_metadata,
dict) else {},
)
debug(
f"[download-file] Downloading provider item via {table}: {sr.title}"
2025-12-18 22:50:21 -08:00
)
2025-12-28 03:51:48 -08:00
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
output_dir = final_output_dir
2026-01-11 18:56:26 -08:00
# Generic: allow provider to strict output_dir?
# Using default output_dir for now.
2026-01-07 05:09:59 -08:00
downloaded_path = provider_obj.download(sr, output_dir)
2025-12-25 04:49:22 -08:00
provider_sr = sr
2025-12-22 02:11:53 -08:00
2026-01-07 05:09:59 -08:00
if downloaded_path is None:
2026-01-11 18:56:26 -08:00
# Some providers might work via callback 'download_items', mostly legacy.
# If relevant, check for it.
download_items = getattr(provider_obj, "download_items", None)
if callable(download_items):
pass # We can implement generic callback support if needed,
# but pure download() is preferred.
2025-12-22 02:11:53 -08:00
# Fallback: if we have a direct HTTP URL, download it directly
if (downloaded_path is None and isinstance(target,
str)
and target.startswith("http")):
2026-01-11 18:56:26 -08:00
# Generic guard for known "not-a-file" URLs could go here or in a helper,
# but for now we rely on user or provider.
2025-12-29 17:05:03 -08:00
debug(
f"[download-file] Provider item looks like direct URL, downloading: {target}"
)
2026-01-11 18:56:26 -08:00
2025-12-22 02:11:53 -08:00
suggested_name = str(title).strip() if title is not None else None
result_obj = _download_direct_file(
target,
final_output_dir,
quiet=quiet_mode,
suggested_filename=suggested_name,
2025-12-28 04:13:11 -08:00
pipeline_progress=progress,
2025-12-11 23:21:45 -08:00
)
2025-12-22 02:11:53 -08:00
downloaded_path = self._path_from_download_result(result_obj)
if downloaded_path is None:
2025-12-29 17:05:03 -08:00
log(
f"Cannot download item (no provider handler / unsupported target): {title or target}",
file=sys.stderr,
)
2025-12-22 02:11:53 -08:00
continue
2025-12-25 04:49:22 -08:00
# Allow providers to add/enrich tags and metadata during download.
2026-01-11 14:46:41 -08:00
if provider_sr is not None:
2025-12-25 04:49:22 -08:00
try:
sr_md = getattr(provider_sr, "full_metadata", None)
if isinstance(sr_md, dict) and sr_md:
2026-01-11 18:56:26 -08:00
debug(f"[download-file] Syncing full_metadata from provider_sr (keys={list(sr_md.keys())})")
2025-12-25 04:49:22 -08:00
full_metadata = sr_md
except Exception:
pass
try:
if isinstance(full_metadata, dict):
t = str(full_metadata.get("title") or "").strip()
if t:
title = t
except Exception:
pass
2026-01-11 14:46:41 -08:00
# Prefer tags from the search result object if the provider mutated them during download.
try:
sr_tags = getattr(provider_sr, "tag", None)
if isinstance(sr_tags, (set, list)) and sr_tags:
2026-01-11 18:56:26 -08:00
debug(f"[download-file] Syncing tags_list from provider_sr (count={len(sr_tags)})")
2026-01-11 14:46:41 -08:00
# Re-sync tags_list with the potentially enriched provider_sr.tag
tags_list = sorted([str(t) for t in sr_tags if t])
except Exception:
pass
2025-12-22 02:11:53 -08:00
self._emit_local_file(
downloaded_path=downloaded_path,
source=str(target) if target else None,
title_hint=str(title) if title else downloaded_path.stem,
tags_hint=tags_list,
media_kind_hint=str(media_kind) if media_kind else None,
full_metadata=full_metadata if isinstance(full_metadata,
dict) else None,
2025-12-22 02:11:53 -08:00
progress=progress,
config=config,
)
downloaded_count += 1
except DownloadError as e:
log(f"Download failed: {e}", file=sys.stderr)
except Exception as e:
log(f"Error downloading item: {e}", file=sys.stderr)
2026-01-05 07:51:19 -08:00
finally:
2026-01-11 14:46:41 -08:00
table_type = str(table or "").lower()
if table_type == "tidal" or table_type.startswith("tidal."):
2026-01-05 07:51:19 -08:00
try:
progress.finish_transfer(label=transfer_label)
except Exception:
pass
processed_items += 1
try:
pct = int(round((processed_items / max(1, total_items)) * 100))
progress.set_percent(pct)
if processed_items >= total_items:
progress.clear_status()
except Exception:
pass
2025-12-22 02:11:53 -08:00
2026-01-05 07:51:19 -08:00
return downloaded_count, queued_magnet_submissions
2025-12-22 02:11:53 -08:00
2026-01-06 16:19:29 -08:00
@staticmethod
def _path_from_download_result(result_obj: Any) -> Path:
file_path = None
if hasattr(result_obj, "path"):
file_path = getattr(result_obj, "path")
elif isinstance(result_obj, dict):
file_path = result_obj.get("path")
if not file_path:
file_path = str(result_obj)
return Path(str(file_path))
def _emit_local_file(
self,
*,
downloaded_path: Path,
source: Optional[str],
title_hint: Optional[str],
tags_hint: Optional[List[str]],
media_kind_hint: Optional[str],
full_metadata: Optional[Dict[str, Any]],
progress: PipelineProgress,
config: Dict[str, Any],
provider_hint: Optional[str] = None,
) -> None:
title_val = (title_hint or downloaded_path.stem or "Unknown").strip() or downloaded_path.stem
hash_value = self._compute_file_hash(downloaded_path)
notes: Optional[Dict[str, str]] = None
try:
if isinstance(full_metadata, dict):
subtitles = full_metadata.get("_tidal_lyrics_subtitles")
if isinstance(subtitles, str) and subtitles.strip():
notes = {"lyric": subtitles}
except Exception:
notes = None
tag: List[str] = []
if tags_hint:
tag.extend([str(t) for t in tags_hint if t])
if not any(str(t).lower().startswith("title:") for t in tag):
tag.insert(0, f"title:{title_val}")
payload: Dict[str, Any] = {
"path": str(downloaded_path),
"hash": hash_value,
"title": title_val,
"action": "cmdlet:download-file",
"download_mode": "file",
"store": "local",
"media_kind": media_kind_hint or "file",
"tag": tag,
}
if provider_hint:
payload["provider"] = str(provider_hint)
if full_metadata:
2026-01-11 18:56:26 -08:00
payload["metadata"] = full_metadata
2026-01-06 16:19:29 -08:00
if notes:
payload["notes"] = notes
if source and str(source).startswith("http"):
payload["url"] = source
elif source:
payload["source_url"] = source
pipeline_context.emit(payload)
@staticmethod
def _normalize_urls(parsed: Dict[str, Any]) -> List[str]:
urls: List[str] = []
url_value: Any = None
if isinstance(parsed, dict):
url_value = parsed.get("url")
try:
urls = normalize_url_list(url_value)
except Exception:
urls = []
if not urls and isinstance(parsed, dict):
query_val = parsed.get("query")
try:
if isinstance(query_val, str) and query_val.strip().lower().startswith("url:"):
urls = normalize_url_list(query_val)
except Exception:
pass
return urls
@staticmethod
def _collect_piped_items_if_no_urls(result: Any, raw_url: Sequence[str]) -> List[Any]:
if raw_url:
return []
if result is None:
return []
if isinstance(result, list):
return list(result)
return [result]
@staticmethod
def _load_provider_registry() -> Dict[str, Any]:
"""Lightweight accessor for provider helpers without hard dependencies."""
try:
from ProviderCore import registry as provider_registry # type: ignore
from ProviderCore.base import SearchResult # type: ignore
return {
"get_provider": getattr(provider_registry, "get_provider", None),
"get_search_provider": getattr(provider_registry, "get_search_provider", None),
"match_provider_name_for_url": getattr(provider_registry, "match_provider_name_for_url", None),
"SearchResult": SearchResult,
}
except Exception:
return {
"get_provider": None,
"get_search_provider": None,
"match_provider_name_for_url": None,
"SearchResult": None,
}
@staticmethod
def _safe_total_items(raw_url: Sequence[str], piped_items: Sequence[Any]) -> int:
"""Return a sane item count for progress display."""
try:
url_count = len(raw_url or [])
except Exception:
url_count = 0
try:
piped_count = len(piped_items or [])
except Exception:
piped_count = 0
total = url_count + piped_count
return total if total > 0 else 1
@staticmethod
def _build_preview(raw_url: Sequence[str], piped_items: Sequence[Any], total_items: int) -> List[str]:
"""Construct a short preview list for the local progress UI."""
preview: List[str] = []
try:
for url in raw_url or []:
if len(preview) >= 5:
break
preview.append(str(url))
except Exception:
pass
if len(preview) < 5:
try:
items = piped_items if isinstance(piped_items, list) else list(piped_items or [])
except Exception:
items = []
for item in items:
if len(preview) >= 5:
break
try:
label = get_field(item, "title") or get_field(item, "path") or get_field(item, "url")
except Exception:
label = None
if label:
preview.append(str(label))
# If we still have nothing, supply a generic placeholder to avoid empty previews.
if not preview and total_items:
preview.append(f"{total_items} item(s)")
return preview
2026-01-01 20:37:27 -08:00
# === Streaming helpers (yt-dlp) ===
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
@staticmethod
def _append_urls_from_piped_result(raw_urls: List[str], result: Any) -> List[str]:
if raw_urls:
return raw_urls
if not result:
return raw_urls
results_to_check = result if isinstance(result, list) else [result]
for item in results_to_check:
2025-12-28 04:13:11 -08:00
try:
2026-01-01 20:37:27 -08:00
url = get_field(item, "url") or get_field(item, "target")
2025-12-28 04:13:11 -08:00
except Exception:
2026-01-01 20:37:27 -08:00
url = None
if url:
raw_urls.append(url)
return raw_urls
2025-12-28 04:13:11 -08:00
2026-01-01 20:37:27 -08:00
@staticmethod
def _filter_supported_urls(raw_urls: Sequence[str]) -> tuple[List[str], List[str]]:
supported = [url for url in (raw_urls or []) if is_url_supported_by_ytdlp(url)]
unsupported = list(set(raw_urls or []) - set(supported or []))
return supported, unsupported
def _parse_query_keyed_spec(self, query_spec: Optional[str]) -> Dict[str, List[str]]:
if not query_spec:
return {}
try:
keyed = self._parse_keyed_csv_spec(str(query_spec), default_key="hash")
if not keyed:
return {}
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
def _alias(src: str, dest: str) -> None:
try:
values = keyed.get(src)
except Exception:
values = None
if not values:
return
try:
keyed.setdefault(dest, []).extend(list(values))
except Exception:
pass
try:
keyed.pop(src, None)
except Exception:
pass
2025-12-11 23:21:45 -08:00
2026-01-01 20:37:27 -08:00
for src in ("range", "ranges", "section", "sections"):
_alias(src, "clip")
for src in ("fmt", "f"):
_alias(src, "format")
for src in ("aud", "a"):
_alias(src, "audio")
2025-12-28 04:13:11 -08:00
2026-01-01 20:37:27 -08:00
return keyed
except Exception:
return {}
@staticmethod
def _extract_hash_override(query_spec: Optional[str], query_keyed: Dict[str, List[str]]) -> Optional[str]:
try:
hash_values = query_keyed.get("hash", []) if isinstance(query_keyed, dict) else []
hash_candidate = hash_values[-1] if hash_values else None
if hash_candidate:
return sh.parse_single_hash_query(f"hash:{hash_candidate}")
try:
has_non_hash_keys = bool(
query_keyed
and isinstance(query_keyed, dict)
and any(k for k in query_keyed.keys() if str(k).strip().lower() != "hash")
2025-12-28 04:13:11 -08:00
)
2026-01-01 20:37:27 -08:00
except Exception:
has_non_hash_keys = False
if has_non_hash_keys:
return None
return sh.parse_single_hash_query(str(query_spec)) if query_spec else None
except Exception:
return None
2025-12-28 04:13:11 -08:00
2026-01-01 20:37:27 -08:00
def _parse_clip_ranges_and_apply_items(
self,
*,
clip_spec: Optional[str],
query_keyed: Dict[str, List[str]],
parsed: Dict[str, Any],
query_spec: Optional[str],
) -> tuple[Optional[List[tuple[int, int]]], bool, List[str]]:
clip_ranges: Optional[List[tuple[int, int]]] = None
clip_values: List[str] = []
item_values: List[str] = []
def _uniq(values: Sequence[str]) -> List[str]:
seen: set[str] = set()
out: List[str] = []
for v in values:
key = str(v)
if key in seen:
continue
seen.add(key)
out.append(v)
return out
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
if clip_spec:
keyed = self._parse_keyed_csv_spec(str(clip_spec), default_key="clip")
clip_values.extend(keyed.get("clip", []) or [])
item_values.extend(keyed.get("item", []) or [])
2025-12-27 21:24:27 -08:00
2026-01-01 20:37:27 -08:00
if query_keyed:
clip_values.extend(query_keyed.get("clip", []) or [])
item_values.extend(query_keyed.get("item", []) or [])
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
clip_values = _uniq(clip_values)
item_values = _uniq(item_values)
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
if item_values and not parsed.get("item"):
parsed["item"] = ",".join([v for v in item_values if v])
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
if clip_values:
clip_ranges = self._parse_time_ranges(",".join([v for v in clip_values if v]))
if not clip_ranges:
bad_spec = clip_spec or query_spec
log(f"Invalid clip format: {bad_spec}", file=sys.stderr)
return None, True, clip_values
2025-12-22 02:11:53 -08:00
2026-01-01 20:37:27 -08:00
return clip_ranges, False, clip_values
@staticmethod
def _init_storage(config: Dict[str, Any]) -> tuple[Optional[Any], bool]:
storage = None
hydrus_available = True
try:
from Store import Store
from API.HydrusNetwork import is_hydrus_available
storage = Store(config=config or {}, suppress_debug=True)
hydrus_available = bool(is_hydrus_available(config or {}))
except Exception:
storage = None
return storage, hydrus_available
@staticmethod
def _cookiefile_str(ytdlp_tool: YtDlpTool) -> Optional[str]:
try:
cookie_path = ytdlp_tool.resolve_cookiefile()
if cookie_path is not None and cookie_path.is_file():
return str(cookie_path)
except Exception:
pass
return None
def _list_formats_cached(
self,
u: str,
*,
playlist_items_value: Optional[str],
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]],
ytdlp_tool: YtDlpTool,
) -> Optional[List[Dict[str, Any]]]:
key = f"{u}||{playlist_items_value or ''}"
if key in formats_cache:
return formats_cache[key]
fmts = list_formats(
u,
no_playlist=False,
playlist_items=playlist_items_value,
cookiefile=self._cookiefile_str(ytdlp_tool),
)
formats_cache[key] = fmts
return fmts
def _is_browseable_format(self, fmt: Any) -> bool:
2026-01-10 17:30:18 -08:00
"""Check if format is user-browseable. Delegates to ytdlp helper."""
return is_browseable_format(fmt)
2026-01-01 20:37:27 -08:00
def _format_id_for_query_index(
self,
query_format: str,
url: str,
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]],
ytdlp_tool: YtDlpTool,
) -> Optional[str]:
import re
if not query_format or not re.match(r"^\s*#?\d+\s*$", str(query_format)):
return None
try:
idx = int(str(query_format).lstrip("#").strip())
except Exception:
raise ValueError(f"Invalid format index: {query_format}")
fmts = self._list_formats_cached(
url,
playlist_items_value=None,
formats_cache=formats_cache,
ytdlp_tool=ytdlp_tool,
)
if not fmts:
raise ValueError("Unable to list formats for the URL; cannot resolve numeric format index")
candidate_formats = [f for f in fmts if self._is_browseable_format(f)]
filtered_formats = candidate_formats if candidate_formats else list(fmts)
if not filtered_formats:
raise ValueError("No formats available for selection")
if idx <= 0 or idx > len(filtered_formats):
raise ValueError(f"Format index {idx} out of range (1..{len(filtered_formats)})")
chosen = filtered_formats[idx - 1]
selection_format_id = str(chosen.get("format_id") or "").strip()
if not selection_format_id:
raise ValueError("Selected format has no format_id")
try:
vcodec = str(chosen.get("vcodec", "none"))
acodec = str(chosen.get("acodec", "none"))
if vcodec != "none" and acodec == "none":
selection_format_id = f"{selection_format_id}+ba"
except Exception:
pass
return selection_format_id
@staticmethod
def _format_selector_for_query_height(query_format: str) -> Optional[str]:
import re
if query_format is None:
return None
s = str(query_format).strip().lower()
m = re.match(r"^(\d{2,5})p$", s)
if not m:
return None
try:
height = int(m.group(1))
except Exception:
return None
if height <= 0:
raise ValueError(f"Invalid height selection: {query_format}")
return f"bv*[height<={height}]+ba"
@staticmethod
def _canonicalize_url_for_storage(*, requested_url: str, ytdlp_tool: YtDlpTool, playlist_items: Optional[str]) -> str:
if playlist_items:
return str(requested_url)
try:
cf = None
try:
cookie_path = ytdlp_tool.resolve_cookiefile()
if cookie_path is not None and cookie_path.is_file():
cf = str(cookie_path)
except Exception:
cf = None
pr = probe_url(requested_url, no_playlist=False, timeout_seconds=15, cookiefile=cf)
if isinstance(pr, dict):
for key in ("webpage_url", "original_url", "url", "requested_url"):
value = pr.get(key)
if isinstance(value, str) and value.strip():
return value.strip()
except Exception:
pass
return str(requested_url)
2026-01-11 18:56:26 -08:00
2026-01-01 20:37:27 -08:00
def _preflight_url_duplicate(
self,
*,
storage: Any,
hydrus_available: bool,
final_output_dir: Path,
2026-01-11 18:56:26 -08:00
candidate_url: Optional[str] = None,
extra_urls: Optional[List[str]] = None,
**kwargs: Any,
2026-01-01 20:37:27 -08:00
) -> bool:
2026-01-11 18:56:26 -08:00
to_check = []
if candidate_url:
to_check.append(candidate_url)
if extra_urls:
to_check.extend(extra_urls)
return sh.check_url_exists_in_storage(
urls=to_check,
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir
)
2026-01-01 20:37:27 -08:00
def _preflight_url_duplicates_bulk(
self,
*,
2026-01-11 18:56:26 -08:00
urls: List[str],
2026-01-01 20:37:27 -08:00
storage: Any,
hydrus_available: bool,
final_output_dir: Path,
2026-01-11 18:56:26 -08:00
**kwargs: Any,
2026-01-01 20:37:27 -08:00
) -> bool:
2026-01-11 18:56:26 -08:00
return sh.check_url_exists_in_storage(
urls=urls,
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir
)
2026-01-01 20:37:27 -08:00
def _maybe_show_playlist_table(self, *, url: str, ytdlp_tool: YtDlpTool) -> bool:
try:
cf = self._cookiefile_str(ytdlp_tool)
pr = probe_url(url, no_playlist=False, timeout_seconds=15, cookiefile=cf)
except Exception:
pr = None
if not isinstance(pr, dict):
return False
entries = pr.get("entries")
if not isinstance(entries, list) or len(entries) <= 1:
return False
extractor_name = ""
try:
extractor_name = str(pr.get("extractor") or pr.get("extractor_key") or "").strip().lower()
except Exception:
extractor_name = ""
table_type: Optional[str] = None
if "bandcamp" in extractor_name:
table_type = "bandcamp"
elif "youtube" in extractor_name:
table_type = "youtube"
max_rows = 200
display_entries = entries[:max_rows]
def _entry_to_url(entry: Any) -> Optional[str]:
if not isinstance(entry, dict):
return None
for key in ("webpage_url", "original_url", "url"):
v = entry.get(key)
if isinstance(v, str) and v.strip():
s_val = v.strip()
try:
if urlparse(s_val).scheme in {"http", "https"}:
return s_val
except Exception:
return s_val
entry_id = entry.get("id")
if isinstance(entry_id, str) and entry_id.strip():
extractor_name_inner = str(pr.get("extractor") or pr.get("extractor_key") or "").lower()
if "youtube" in extractor_name_inner:
return f"https://www.youtube.com/watch?v={entry_id.strip()}"
return None
table = ResultTable()
safe_url = str(url or "").strip()
table.title = f'download-file -url "{safe_url}"' if safe_url else "download-file"
if table_type:
try:
table.set_table(table_type)
except Exception:
table.table = table_type
table.set_source_command("download-file", [])
try:
table.set_preserve_order(True)
except Exception:
pass
results_list: List[Dict[str, Any]] = []
for idx, entry in enumerate(display_entries, 1):
title = None
uploader = None
duration = None
entry_url = _entry_to_url(entry)
try:
if isinstance(entry, dict):
title = entry.get("title")
uploader = entry.get("uploader") or pr.get("uploader")
duration = entry.get("duration")
except Exception:
pass
row: Dict[str, Any] = {
"table": "download-file",
"title": str(title or f"Item {idx}"),
"detail": str(uploader or ""),
"media_kind": "playlist-item",
"playlist_index": idx,
"_selection_args": (["-url", str(entry_url)] if entry_url else ["-url", str(url), "-item", str(idx)]),
"url": entry_url,
"target": entry_url,
"columns": [
("#", str(idx)),
("Title", str(title or "")),
("Duration", str(duration or "")),
("Uploader", str(uploader or "")),
],
}
results_list.append(row)
table.add_result(row)
pipeline_context.set_current_stage_table(table)
pipeline_context.set_last_result_table(table, results_list)
try:
suspend = getattr(pipeline_context, "suspend_live_progress", None)
cm: AbstractContextManager[Any] = nullcontext()
if callable(suspend):
maybe_cm = suspend()
if maybe_cm is not None:
cm = maybe_cm # type: ignore[assignment]
with cm:
get_stderr_console().print(table)
except Exception:
pass
setattr(table, "_rendered_by_cmdlet", True)
return True
def _maybe_show_format_table_for_single_url(
self,
*,
mode: str,
clip_spec: Any,
clip_values: Sequence[str],
playlist_items: Optional[str],
ytdl_format: Any,
supported_url: Sequence[str],
playlist_selection_handled: bool,
ytdlp_tool: YtDlpTool,
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]],
storage: Any,
hydrus_available: bool,
final_output_dir: Path,
args: Sequence[str],
) -> Optional[int]:
if (
mode != "audio"
and not clip_spec
and not clip_values
and not playlist_items
and not ytdl_format
and len(supported_url) == 1
and not playlist_selection_handled
):
url = supported_url[0]
canonical_url = self._canonicalize_url_for_storage(
requested_url=url,
ytdlp_tool=ytdlp_tool,
playlist_items=playlist_items,
)
if not self._preflight_url_duplicate(
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir,
candidate_url=canonical_url,
extra_urls=[url],
):
log(f"Skipping download: {url}", file=sys.stderr)
return 0
formats = self._list_formats_cached(
url,
playlist_items_value=None,
formats_cache=formats_cache,
ytdlp_tool=ytdlp_tool,
)
if formats and len(formats) > 1:
candidate_formats = [f for f in formats if self._is_browseable_format(f)]
filtered_formats = candidate_formats if candidate_formats else list(formats)
debug(f"Formatlist: showing {len(filtered_formats)} formats (raw={len(formats)})")
base_cmd = f'download-file "{url}"'
remaining_args = [arg for arg in args if arg not in [url] and not arg.startswith("-")]
if remaining_args:
base_cmd += " " + " ".join(remaining_args)
table = ResultTable(title=f"Available formats for {url}", max_columns=10, preserve_order=True)
table.set_table("ytdlp.formatlist")
table.set_source_command("download-file", [url])
2026-01-10 17:30:18 -08:00
debug(f"[ytdlp.formatlist] Displaying format selection table for {url}")
debug(f"[ytdlp.formatlist] Provider: ytdlp (routing to download-file via TABLE_AUTO_STAGES)")
2026-01-01 20:37:27 -08:00
results_list: List[Dict[str, Any]] = []
for idx, fmt in enumerate(filtered_formats, 1):
resolution = fmt.get("resolution", "")
ext = fmt.get("ext", "")
vcodec = fmt.get("vcodec", "none")
acodec = fmt.get("acodec", "none")
filesize = fmt.get("filesize")
filesize_approx = fmt.get("filesize_approx")
format_id = fmt.get("format_id", "")
selection_format_id = format_id
try:
if vcodec != "none" and acodec == "none" and format_id:
selection_format_id = f"{format_id}+ba"
except Exception:
selection_format_id = format_id
2026-01-10 17:30:18 -08:00
# Use ytdlp helper to format for table
format_dict = format_for_table_selection(
fmt,
url,
idx,
selection_format_id=selection_format_id,
)
# Add base command for display
format_dict["cmd"] = base_cmd
# Append clip values to selection args if needed
selection_args: List[str] = format_dict["_selection_args"].copy()
2026-01-01 20:37:27 -08:00
try:
if (not clip_spec) and clip_values:
selection_args.extend(["-query", f"clip:{','.join([v for v in clip_values if v])}"])
except Exception:
pass
format_dict["_selection_args"] = selection_args
2026-01-10 17:30:18 -08:00
# Also update in full_metadata for provider registration
format_dict["full_metadata"]["_selection_args"] = selection_args
2026-01-01 20:37:27 -08:00
results_list.append(format_dict)
table.add_result(format_dict)
try:
suspend = getattr(pipeline_context, "suspend_live_progress", None)
cm: AbstractContextManager[Any] = nullcontext()
if callable(suspend):
maybe_cm = suspend()
if maybe_cm is not None:
cm = maybe_cm # type: ignore[assignment]
with cm:
get_stderr_console().print(table)
except Exception:
pass
setattr(table, "_rendered_by_cmdlet", True)
pipeline_context.set_current_stage_table(table)
pipeline_context.set_last_result_table(table, results_list)
2026-01-10 17:30:18 -08:00
debug(f"[ytdlp.formatlist] Format table registered with {len(results_list)} formats")
debug(f"[ytdlp.formatlist] When user selects @N, will invoke: download-file {url} -format <format_id>")
2026-01-01 20:37:27 -08:00
log(f"", file=sys.stderr)
return 0
return None
def _download_supported_urls(
self,
*,
supported_url: Sequence[str],
ytdlp_tool: YtDlpTool,
args: Sequence[str],
config: Dict[str, Any],
final_output_dir: Path,
mode: str,
clip_spec: Any,
clip_ranges: Optional[List[tuple[int, int]]],
query_hash_override: Optional[str],
embed_chapters: bool,
write_sub: bool,
quiet_mode: bool,
playlist_items: Optional[str],
ytdl_format: Any,
skip_per_url_preflight: bool,
forced_single_format_id: Optional[str],
forced_single_format_for_batch: bool,
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]],
storage: Any,
hydrus_available: bool,
) -> int:
downloaded_count = 0
downloaded_pipe_objects: List[Dict[str, Any]] = []
pipe_seq = 0
clip_sections_spec = self._build_clip_sections_spec(clip_ranges)
if clip_sections_spec:
try:
debug(f"Clip sections spec: {clip_sections_spec}")
except Exception:
pass
for url in supported_url:
try:
debug(f"Processing: {url}")
canonical_url = self._canonicalize_url_for_storage(
requested_url=url,
ytdlp_tool=ytdlp_tool,
playlist_items=playlist_items,
)
if not skip_per_url_preflight:
if not self._preflight_url_duplicate(
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir,
candidate_url=canonical_url,
extra_urls=[url],
):
log(f"Skipping download: {url}", file=sys.stderr)
continue
PipelineProgress(pipeline_context).begin_steps(2)
actual_format = ytdl_format
actual_playlist_items = playlist_items
if playlist_items and not ytdl_format:
import re
if re.search(r"[^0-9,-]", playlist_items):
actual_format = playlist_items
actual_playlist_items = None
if mode == "audio" and not actual_format:
actual_format = "bestaudio"
if mode == "video" and not actual_format:
configured = (ytdlp_tool.default_format("video") or "").strip()
if configured and configured != "bestvideo+bestaudio/best":
actual_format = configured
forced_single_applied = False
if (
forced_single_format_for_batch
and forced_single_format_id
and not ytdl_format
and not actual_playlist_items
):
actual_format = forced_single_format_id
forced_single_applied = True
if (
actual_format
and isinstance(actual_format, str)
and mode != "audio"
and "+" not in actual_format
and "/" not in actual_format
and "[" not in actual_format
and actual_format not in {"best", "bv", "ba", "b"}
and not forced_single_applied
):
try:
formats = self._list_formats_cached(
url,
playlist_items_value=actual_playlist_items,
formats_cache=formats_cache,
ytdlp_tool=ytdlp_tool,
)
if formats:
fmt_match = next((f for f in formats if str(f.get("format_id", "")) == actual_format), None)
if fmt_match:
vcodec = str(fmt_match.get("vcodec", "none"))
acodec = str(fmt_match.get("acodec", "none"))
if vcodec != "none" and acodec == "none":
debug(f"Selected video-only format {actual_format}; using {actual_format}+ba for audio")
actual_format = f"{actual_format}+ba"
except Exception:
pass
attempted_single_format_fallback = False
while True:
try:
opts = DownloadOptions(
url=url,
mode=mode,
output_dir=final_output_dir,
ytdl_format=actual_format,
cookies_path=ytdlp_tool.resolve_cookiefile(),
clip_sections=clip_sections_spec,
playlist_items=actual_playlist_items,
quiet=quiet_mode,
no_playlist=False,
embed_chapters=embed_chapters,
write_sub=write_sub,
)
PipelineProgress(pipeline_context).step("downloading")
debug(f"Starting download with 5-minute timeout...")
result_obj = _download_with_timeout(opts, timeout_seconds=300)
debug(f"Download completed, building pipe object...")
break
except DownloadError as e:
cause = getattr(e, "__cause__", None)
detail = ""
try:
detail = str(cause or "")
except Exception:
detail = ""
if ("requested format is not available" in (detail or "").lower()) and mode != "audio":
if (
forced_single_format_for_batch
and forced_single_format_id
and not ytdl_format
and not actual_playlist_items
and not attempted_single_format_fallback
):
attempted_single_format_fallback = True
actual_format = forced_single_format_id
debug(f"Only one format available (playlist preflight); retrying with: {actual_format}")
continue
formats = self._list_formats_cached(
url,
playlist_items_value=actual_playlist_items,
formats_cache=formats_cache,
ytdlp_tool=ytdlp_tool,
)
if (
(not attempted_single_format_fallback)
and isinstance(formats, list)
and len(formats) == 1
and isinstance(formats[0], dict)
):
only = formats[0]
fallback_format = str(only.get("format_id") or "").strip()
selection_format_id = fallback_format
try:
vcodec = str(only.get("vcodec", "none"))
acodec = str(only.get("acodec", "none"))
if vcodec != "none" and acodec == "none" and fallback_format:
selection_format_id = f"{fallback_format}+ba"
except Exception:
selection_format_id = fallback_format
if selection_format_id:
attempted_single_format_fallback = True
actual_format = selection_format_id
debug(f"Only one format available; retrying with: {actual_format}")
continue
if formats:
formats_to_show = formats
table = ResultTable(title=f"Available formats for {url}", max_columns=10, preserve_order=True)
table.set_table("ytdlp.formatlist")
table.set_source_command("download-file", [url])
results_list: List[Dict[str, Any]] = []
for idx, fmt in enumerate(formats_to_show, 1):
resolution = fmt.get("resolution", "")
ext = fmt.get("ext", "")
vcodec = fmt.get("vcodec", "none")
acodec = fmt.get("acodec", "none")
filesize = fmt.get("filesize")
filesize_approx = fmt.get("filesize_approx")
format_id = fmt.get("format_id", "")
selection_format_id = format_id
try:
if vcodec != "none" and acodec == "none" and format_id:
selection_format_id = f"{format_id}+ba"
except Exception:
selection_format_id = format_id
size_str = ""
size_prefix = ""
size_bytes = filesize
if not size_bytes:
size_bytes = filesize_approx
if size_bytes:
size_prefix = "~"
try:
if isinstance(size_bytes, (int, float)) and size_bytes > 0:
size_mb = float(size_bytes) / (1024 * 1024)
size_str = f"{size_prefix}{size_mb:.1f}MB"
except Exception:
size_str = ""
desc_parts: List[str] = []
if resolution and resolution != "audio only":
desc_parts.append(str(resolution))
if ext:
desc_parts.append(str(ext).upper())
if vcodec != "none":
desc_parts.append(f"v:{vcodec}")
if acodec != "none":
desc_parts.append(f"a:{acodec}")
if size_str:
desc_parts.append(size_str)
format_desc = " | ".join(desc_parts)
format_dict: Dict[str, Any] = {
"table": "download-file",
"title": f"Format {format_id}",
"url": url,
"target": url,
"detail": format_desc,
"media_kind": "format",
"columns": [
("ID", format_id),
("Resolution", resolution or "N/A"),
("Ext", ext),
("Size", size_str or ""),
("Video", vcodec),
("Audio", acodec),
],
"full_metadata": {
"format_id": format_id,
"url": url,
"item_selector": selection_format_id,
},
"_selection_args": ["-format", selection_format_id],
}
results_list.append(format_dict)
table.add_result(format_dict)
pipeline_context.set_current_stage_table(table)
pipeline_context.set_last_result_table(table, results_list)
try:
suspend = getattr(pipeline_context, "suspend_live_progress", None)
cm: AbstractContextManager[Any] = nullcontext()
if callable(suspend):
maybe_cm = suspend()
if maybe_cm is not None:
cm = maybe_cm # type: ignore[assignment]
with cm:
get_stderr_console().print(table)
except Exception:
pass
PipelineProgress(pipeline_context).step("awaiting selection")
log("Requested format is not available; select a working format with @N", file=sys.stderr)
return 0
raise
results_to_emit: List[Any] = []
if isinstance(result_obj, list):
results_to_emit = list(result_obj)
else:
paths = getattr(result_obj, "paths", None)
if isinstance(paths, list) and paths:
for p in paths:
try:
p_path = Path(p)
except Exception:
continue
try:
2026-01-02 02:28:59 -08:00
if p_path.suffix.lower() in _SUBTITLE_EXTS:
2026-01-01 20:37:27 -08:00
continue
except Exception:
pass
if not p_path.exists() or p_path.is_dir():
continue
try:
hv = sha256_file(p_path)
except Exception:
hv = None
results_to_emit.append(
DownloadMediaResult(
path=p_path,
info=getattr(result_obj, "info", {}) or {},
tag=list(getattr(result_obj, "tag", []) or []),
source_url=getattr(result_obj, "source_url", None) or opts.url,
hash_value=hv,
)
)
else:
results_to_emit = [result_obj]
pipe_objects: List[Dict[str, Any]] = []
for downloaded in results_to_emit:
po = self._build_pipe_object(downloaded, url, opts)
pipe_seq += 1
try:
po.setdefault("pipe_index", pipe_seq)
except Exception:
pass
try:
info = downloaded.info if isinstance(getattr(downloaded, "info", None), dict) else {}
except Exception:
info = {}
chapters_text = _format_chapters_note(info) if embed_chapters else None
if chapters_text:
notes = po.get("notes")
if not isinstance(notes, dict):
notes = {}
notes.setdefault("chapters", chapters_text)
po["notes"] = notes
if write_sub:
try:
media_path = Path(str(po.get("path") or ""))
except Exception:
media_path = None
if media_path is not None and media_path.exists() and media_path.is_file():
sub_path = _best_subtitle_sidecar(media_path)
if sub_path is not None:
sub_text = _read_text_file(sub_path)
if sub_text:
notes = po.get("notes")
if not isinstance(notes, dict):
notes = {}
notes["sub"] = sub_text
po["notes"] = notes
try:
sub_path.unlink()
except Exception:
pass
pipe_objects.append(po)
try:
if clip_ranges and len(pipe_objects) == len(clip_ranges):
source_hash = query_hash_override or self._find_existing_hash_for_url(
storage,
canonical_url,
hydrus_available=hydrus_available,
)
self._apply_clip_decorations(pipe_objects, clip_ranges, source_king_hash=source_hash)
except Exception:
pass
debug(f"Emitting {len(pipe_objects)} result(s) to pipeline...")
PipelineProgress(pipeline_context).step("finalized")
stage_ctx = pipeline_context.get_stage_context()
emit_enabled = bool(stage_ctx is not None)
for pipe_obj_dict in pipe_objects:
if emit_enabled:
pipeline_context.emit(pipe_obj_dict)
if pipe_obj_dict.get("url"):
pipe_obj = coerce_to_pipe_object(pipe_obj_dict)
register_url_with_local_library(pipe_obj, config)
try:
downloaded_pipe_objects.append(pipe_obj_dict)
except Exception:
pass
downloaded_count += len(pipe_objects)
debug("✓ Downloaded and emitted")
except DownloadError as e:
log(f"Download failed for {url}: {e}", file=sys.stderr)
except Exception as e:
log(f"Error processing {url}: {e}", file=sys.stderr)
if downloaded_count > 0:
debug(f"✓ Successfully processed {downloaded_count} URL(s)")
return 0
log("No downloads completed", file=sys.stderr)
return 1
def _run_streaming_urls(
self,
*,
streaming_urls: List[str],
args: Sequence[str],
config: Dict[str, Any],
parsed: Dict[str, Any],
) -> int:
try:
debug("Starting streaming download handler")
ytdlp_tool = YtDlpTool(config)
raw_url = list(streaming_urls)
supported_url, unsupported_list = self._filter_supported_urls(raw_url)
if not supported_url:
log("No yt-dlp-supported url to download", file=sys.stderr)
return 1
if unsupported_list:
debug(f"Skipping {len(unsupported_list)} unsupported url (use direct HTTP mode)")
final_output_dir = self._resolve_streaming_output_dir(parsed, config)
if not final_output_dir:
return 1
debug(f"Output directory: {final_output_dir}")
2026-01-05 07:51:19 -08:00
try:
PipelineProgress(pipeline_context).ensure_local_ui(
label="download-file",
total_items=len(supported_url),
items_preview=supported_url,
)
except Exception:
pass
2026-01-01 20:37:27 -08:00
clip_spec = parsed.get("clip")
query_spec = parsed.get("query")
query_keyed = self._parse_query_keyed_spec(str(query_spec) if query_spec is not None else None)
query_hash_override = self._extract_hash_override(str(query_spec) if query_spec is not None else None, query_keyed)
embed_chapters = True
write_sub = True
query_format: Optional[str] = None
try:
fmt_values = query_keyed.get("format", []) if isinstance(query_keyed, dict) else []
fmt_candidate = fmt_values[-1] if fmt_values else None
if fmt_candidate is not None:
query_format = str(fmt_candidate).strip()
except Exception:
query_format = None
query_audio: Optional[bool] = None
try:
audio_values = query_keyed.get("audio", []) if isinstance(query_keyed, dict) else []
audio_candidate = audio_values[-1] if audio_values else None
if audio_candidate is not None:
s_val = str(audio_candidate).strip().lower()
if s_val in {"1", "true", "t", "yes", "y", "on"}:
query_audio = True
elif s_val in {"0", "false", "f", "no", "n", "off"}:
query_audio = False
elif s_val:
query_audio = True
except Exception:
query_audio = None
query_wants_audio = False
if query_format:
try:
query_wants_audio = str(query_format).strip().lower() == "audio"
except Exception:
query_wants_audio = False
audio_flag = bool(parsed.get("audio") is True)
wants_audio = audio_flag
if query_audio is not None:
wants_audio = wants_audio or bool(query_audio)
else:
wants_audio = wants_audio or bool(query_wants_audio)
mode = "audio" if wants_audio else "video"
clip_ranges, clip_invalid, clip_values = self._parse_clip_ranges_and_apply_items(
clip_spec=str(clip_spec) if clip_spec is not None else None,
query_keyed=query_keyed,
parsed=parsed,
query_spec=str(query_spec) if query_spec is not None else None,
)
if clip_invalid:
return 1
if clip_ranges:
try:
debug(f"Clip ranges: {clip_ranges}")
except Exception:
pass
quiet_mode = bool(config.get("_quiet_background_output")) if isinstance(config, dict) else False
storage, hydrus_available = self._init_storage(config if isinstance(config, dict) else {})
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]] = {}
playlist_items = str(parsed.get("item")) if parsed.get("item") else None
ytdl_format = parsed.get("format")
if not ytdl_format and query_format and not query_wants_audio:
try:
height_selector = self._format_selector_for_query_height(query_format)
except ValueError as e:
log(f"Error parsing format selection: {e}", file=sys.stderr)
return 1
if height_selector:
ytdl_format = height_selector
else:
import re
if not re.match(r"^\s*#?\d+\s*$", str(query_format)):
ytdl_format = query_format
playlist_selection_handled = False
if len(supported_url) == 1 and not playlist_items and not ytdl_format:
candidate_url = supported_url[0]
if query_format and not query_wants_audio:
try:
idx_fmt = self._format_id_for_query_index(query_format, candidate_url, formats_cache, ytdlp_tool)
except ValueError as e:
log(f"Error parsing format selection: {e}", file=sys.stderr)
return 1
if idx_fmt:
debug(f"Resolved numeric format selection '{query_format}' -> {idx_fmt}")
ytdl_format = idx_fmt
if not ytdl_format:
if self._maybe_show_playlist_table(url=candidate_url, ytdlp_tool=ytdlp_tool):
playlist_selection_handled = True
try:
last_table = pipeline_context.get_last_result_table() if hasattr(pipeline_context, "get_last_result_table") else None
if hasattr(last_table, "rows") and getattr(last_table, "rows", None):
sample_index = 1
sample_fmt_id = None
try:
sample_row = last_table.rows[0]
sample_fmt_id = sample_row._full_metadata.get("item_selector") if getattr(sample_row, "_full_metadata", None) else None
except Exception:
sample_fmt_id = None
try:
sample_pipeline = f'download-file "{candidate_url}"'
hint = (
"To select non-interactively, re-run with an explicit format: "
"e.g. mm \"{pipeline} -format {fmt} | add-file -store <store>\" or "
"mm \"{pipeline} -query 'format:{index}' | add-file -store <store>\""
).format(
pipeline=sample_pipeline,
fmt=sample_fmt_id or "<format_id>",
index=sample_index,
)
log(hint, file=sys.stderr)
except Exception:
pass
except Exception:
pass
return 0
skip_per_url_preflight = False
if len(supported_url) > 1:
if not self._preflight_url_duplicates_bulk(
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir,
urls=list(supported_url),
):
return 0
skip_per_url_preflight = True
forced_single_format_id: Optional[str] = None
forced_single_format_for_batch = False
if len(supported_url) > 1 and not playlist_items and not ytdl_format:
try:
sample_url = str(supported_url[0])
fmts = self._list_formats_cached(
sample_url,
playlist_items_value=None,
formats_cache=formats_cache,
ytdlp_tool=ytdlp_tool,
)
if isinstance(fmts, list) and len(fmts) == 1 and isinstance(fmts[0], dict):
only_id = str(fmts[0].get("format_id") or "").strip()
if only_id:
forced_single_format_id = only_id
forced_single_format_for_batch = True
debug(
f"Playlist format preflight: only one format available; using {forced_single_format_id} for all items"
)
except Exception:
forced_single_format_id = None
forced_single_format_for_batch = False
early_ret = self._maybe_show_format_table_for_single_url(
mode=mode,
clip_spec=clip_spec,
clip_values=clip_values,
playlist_items=playlist_items,
ytdl_format=ytdl_format,
supported_url=supported_url,
playlist_selection_handled=playlist_selection_handled,
ytdlp_tool=ytdlp_tool,
formats_cache=formats_cache,
storage=storage,
hydrus_available=hydrus_available,
final_output_dir=final_output_dir,
args=args,
)
if early_ret is not None:
return int(early_ret)
return self._download_supported_urls(
supported_url=supported_url,
ytdlp_tool=ytdlp_tool,
args=args,
config=config,
final_output_dir=final_output_dir,
mode=mode,
clip_spec=clip_spec,
clip_ranges=clip_ranges,
query_hash_override=query_hash_override,
embed_chapters=embed_chapters,
write_sub=write_sub,
quiet_mode=quiet_mode,
playlist_items=playlist_items,
ytdl_format=ytdl_format,
skip_per_url_preflight=skip_per_url_preflight,
forced_single_format_id=forced_single_format_id,
forced_single_format_for_batch=forced_single_format_for_batch,
formats_cache=formats_cache,
storage=storage,
hydrus_available=hydrus_available,
)
except Exception as e:
log(f"Error in streaming download handler: {e}", file=sys.stderr)
return 1
def _resolve_streaming_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]:
path_override = parsed.get("path")
if path_override:
try:
candidate = Path(str(path_override)).expanduser()
if candidate.suffix:
candidate = candidate.parent
candidate.mkdir(parents=True, exist_ok=True)
debug(f"Using output directory override: {candidate}")
return candidate
except Exception as e:
log(f"Invalid -path output directory: {e}", file=sys.stderr)
return None
try:
temp_value = (config or {}).get("temp") if isinstance(config, dict) else None
except Exception:
temp_value = None
if temp_value:
try:
candidate = Path(str(temp_value)).expanduser()
candidate.mkdir(parents=True, exist_ok=True)
debug(f"Using config temp directory: {candidate}")
return candidate
except Exception as e:
log(f"Cannot use configured temp directory '{temp_value}': {e}", file=sys.stderr)
return None
try:
import tempfile
candidate = Path(tempfile.gettempdir()) / "Medios-Macina"
candidate.mkdir(parents=True, exist_ok=True)
debug(f"Using OS temp directory: {candidate}")
return candidate
except Exception as e:
log(f"Cannot create OS temp directory: {e}", file=sys.stderr)
return None
def _parse_time_ranges(self, spec: str) -> List[tuple[int, int]]:
def _to_seconds(ts: str) -> Optional[int]:
ts = str(ts).strip()
if not ts:
return None
try:
unit_match = re.fullmatch(r"(?i)\s*(?:(?P<h>\d+)h)?\s*(?:(?P<m>\d+)m)?\s*(?:(?P<s>\d+(?:\.\d+)?)s)?\s*", ts)
except Exception:
unit_match = None
if unit_match and unit_match.group(0).strip() and any(unit_match.group(g) for g in ("h", "m", "s")):
try:
hours = int(unit_match.group("h") or 0)
minutes = int(unit_match.group("m") or 0)
seconds = float(unit_match.group("s") or 0)
total = (hours * 3600) + (minutes * 60) + seconds
return int(total)
except Exception:
return None
if ":" in ts:
parts = [p.strip() for p in ts.split(":")]
if len(parts) == 2:
hh_s = "0"
mm_s, ss_s = parts
elif len(parts) == 3:
hh_s, mm_s, ss_s = parts
else:
return None
try:
hours = int(hh_s)
minutes = int(mm_s)
seconds = float(ss_s)
total = (hours * 3600) + (minutes * 60) + seconds
return int(total)
except Exception:
return None
try:
return int(float(ts))
except Exception:
return None
ranges: List[tuple[int, int]] = []
if not spec:
return ranges
for piece in str(spec).split(","):
piece = piece.strip()
if not piece:
continue
if "-" not in piece:
return []
start_s, end_s = [p.strip() for p in piece.split("-", 1)]
start = _to_seconds(start_s)
end = _to_seconds(end_s)
if start is None or end is None or start >= end:
return []
ranges.append((start, end))
return ranges
@staticmethod
def _parse_keyed_csv_spec(spec: str, *, default_key: str) -> Dict[str, List[str]]:
out: Dict[str, List[str]] = {}
if not isinstance(spec, str):
spec = str(spec)
text = spec.strip()
if not text:
return out
active = (default_key or "").strip().lower() or "clip"
key_pattern = re.compile(r"^([A-Za-z_][A-Za-z0-9_-]*)\s*:\s*(.*)$")
for raw_piece in text.split(","):
piece = raw_piece.strip()
if not piece:
continue
m = key_pattern.match(piece)
if m:
active = (m.group(1) or "").strip().lower() or active
value = (m.group(2) or "").strip()
if value:
out.setdefault(active, []).append(value)
continue
out.setdefault(active, []).append(piece)
return out
def _build_clip_sections_spec(self, clip_ranges: Optional[List[tuple[int, int]]]) -> Optional[str]:
ranges: List[str] = []
if clip_ranges:
for start_s, end_s in clip_ranges:
ranges.append(f"{start_s}-{end_s}")
return ",".join(ranges) if ranges else None
def _build_pipe_object(self, download_result: Any, url: str, opts: DownloadOptions) -> Dict[str, Any]:
info: Dict[str, Any] = download_result.info if isinstance(download_result.info, dict) else {}
media_path = Path(download_result.path)
hash_value = download_result.hash_value or self._compute_file_hash(media_path)
title = info.get("title") or media_path.stem
tag = list(download_result.tag or [])
if title and f"title:{title}" not in tag:
tag.insert(0, f"title:{title}")
final_url = None
try:
page_url = info.get("webpage_url") or info.get("original_url") or info.get("url")
if page_url:
final_url = str(page_url)
except Exception:
final_url = None
if not final_url and url:
final_url = str(url)
return {
"path": str(media_path),
"hash": hash_value,
"title": title,
"url": final_url,
"tag": tag,
"action": "cmdlet:download-file",
"is_temp": True,
"ytdl_format": getattr(opts, "ytdl_format", None),
"store": getattr(opts, "storage_name", None) or getattr(opts, "storage_location", None) or "PATH",
"media_kind": "video" if opts.mode == "video" else "audio",
}
2026-01-02 02:28:59 -08:00
@staticmethod
def download_streaming_url_as_pipe_objects(
url: str,
config: Dict[str, Any],
*,
mode_hint: Optional[str] = None,
ytdl_format_hint: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Download a yt-dlp-supported URL and return PipeObject-style dict(s).
This is a lightweight helper intended for cmdlets that need to expand streaming URLs
into local files without re-implementing yt-dlp glue.
"""
url_str = str(url or "").strip()
if not url_str:
return []
if not is_url_supported_by_ytdlp(url_str):
return []
try:
from SYS.config import resolve_output_dir
out_dir = resolve_output_dir(config)
if out_dir is None:
return []
except Exception:
return []
cookies_path = None
try:
cookie_candidate = YtDlpTool(config).resolve_cookiefile()
if cookie_candidate is not None and cookie_candidate.is_file():
cookies_path = cookie_candidate
except Exception:
cookies_path = None
quiet_download = False
try:
quiet_download = bool((config or {}).get("_quiet_background_output"))
except Exception:
quiet_download = False
mode = str(mode_hint or "").strip().lower() if mode_hint else ""
if mode not in {"audio", "video"}:
mode = "video"
try:
cf = (
str(cookies_path)
if cookies_path is not None and cookies_path.is_file() else None
)
fmts_probe = list_formats(
url_str,
no_playlist=False,
playlist_items=None,
cookiefile=cf,
)
if isinstance(fmts_probe, list) and fmts_probe:
has_video = False
for f in fmts_probe:
if not isinstance(f, dict):
continue
vcodec = str(f.get("vcodec", "none") or "none").strip().lower()
if vcodec and vcodec != "none":
has_video = True
break
mode = "video" if has_video else "audio"
except Exception:
mode = "video"
fmt_hint = str(ytdl_format_hint).strip() if ytdl_format_hint else ""
chosen_format: Optional[str]
if fmt_hint:
chosen_format = fmt_hint
else:
chosen_format = None
if mode == "audio":
chosen_format = "bestaudio/best"
opts = DownloadOptions(
url=url_str,
mode=mode,
output_dir=Path(out_dir),
cookies_path=cookies_path,
ytdl_format=chosen_format,
quiet=quiet_download,
embed_chapters=True,
write_sub=True,
)
try:
result_obj = _download_with_timeout(opts, timeout_seconds=300)
except Exception as exc:
log(f"[download-file] Download failed for {url_str}: {exc}", file=sys.stderr)
return []
results: List[Any]
if isinstance(result_obj, list):
results = list(result_obj)
else:
paths = getattr(result_obj, "paths", None)
if isinstance(paths, list) and paths:
results = []
for p in paths:
try:
p_path = Path(p)
except Exception:
continue
if not p_path.exists() or p_path.is_dir():
continue
try:
hv = sha256_file(p_path)
except Exception:
hv = None
try:
results.append(
DownloadMediaResult(
path=p_path,
info=getattr(result_obj, "info", {}) or {},
tag=list(getattr(result_obj, "tag", []) or []),
source_url=getattr(result_obj, "source_url", None) or url_str,
hash_value=hv,
)
)
except Exception:
continue
else:
results = [result_obj]
out: List[Dict[str, Any]] = []
for downloaded in results:
try:
info = (
downloaded.info
if isinstance(getattr(downloaded, "info", None), dict) else {}
)
except Exception:
info = {}
try:
media_path = Path(str(getattr(downloaded, "path", "") or ""))
except Exception:
continue
if not media_path.exists() or media_path.is_dir():
continue
try:
hash_value = getattr(downloaded, "hash_value", None) or sha256_file(media_path)
except Exception:
hash_value = None
title = None
try:
title = info.get("title")
except Exception:
title = None
title = title or media_path.stem
tags = list(getattr(downloaded, "tag", []) or [])
if title and f"title:{title}" not in tags:
tags.insert(0, f"title:{title}")
final_url = None
try:
page_url = info.get("webpage_url") or info.get("original_url") or info.get("url")
if page_url:
final_url = str(page_url)
except Exception:
final_url = None
if not final_url:
final_url = url_str
po: Dict[str, Any] = {
"path": str(media_path),
"hash": hash_value,
"title": title,
"url": final_url,
"tag": tags,
"action": "cmdlet:download-file",
"is_temp": True,
"ytdl_format": getattr(opts, "ytdl_format", None),
"store": getattr(opts, "storage_name", None) or getattr(opts, "storage_location", None) or "PATH",
"media_kind": "video" if opts.mode == "video" else "audio",
}
try:
chapters_text = _format_chapters_note(info)
except Exception:
chapters_text = None
if chapters_text:
notes = po.get("notes")
if not isinstance(notes, dict):
notes = {}
notes.setdefault("chapters", chapters_text)
po["notes"] = notes
try:
sub_path = _best_subtitle_sidecar(media_path)
except Exception:
sub_path = None
if sub_path is not None:
sub_text = _read_text_file(sub_path)
if sub_text:
notes = po.get("notes")
if not isinstance(notes, dict):
notes = {}
notes["sub"] = sub_text
po["notes"] = notes
try:
sub_path.unlink()
except Exception:
pass
out.append(po)
return out
2026-01-01 20:37:27 -08:00
@staticmethod
def _normalise_hash_hex(value: Optional[str]) -> Optional[str]:
if not value or not isinstance(value, str):
return None
candidate = value.strip().lower()
if len(candidate) == 64 and all(c in "0123456789abcdef" for c in candidate):
return candidate
return None
@classmethod
def _extract_hash_from_search_hit(cls, hit: Any) -> Optional[str]:
if not isinstance(hit, dict):
return None
for key in ("hash", "hash_hex", "file_hash", "hydrus_hash"):
v = hit.get(key)
normalized = cls._normalise_hash_hex(str(v) if v is not None else None)
if normalized:
return normalized
return None
@classmethod
def _find_existing_hash_for_url(
cls, storage: Any, canonical_url: str, *, hydrus_available: bool
) -> Optional[str]:
if storage is None or not canonical_url:
return None
try:
from Store.HydrusNetwork import HydrusNetwork
except Exception:
HydrusNetwork = None # type: ignore
try:
backend_names = list(storage.list_searchable_backends() or [])
except Exception:
backend_names = []
for backend_name in backend_names:
try:
backend = storage[backend_name]
except Exception:
continue
try:
if str(backend_name).strip().lower() == "temp":
continue
except Exception:
pass
try:
if HydrusNetwork is not None and isinstance(backend, HydrusNetwork) and not hydrus_available:
continue
except Exception:
pass
try:
hits = backend.search(f"url:{canonical_url}", limit=5) or []
except Exception:
hits = []
for hit in hits:
extracted = cls._extract_hash_from_search_hit(hit)
if extracted:
return extracted
return None
@staticmethod
def _format_timecode(seconds: int, *, force_hours: bool) -> str:
total = max(0, int(seconds))
minutes, secs = divmod(total, 60)
hours, minutes = divmod(minutes, 60)
if force_hours:
return f"{hours:02d}:{minutes:02d}:{secs:02d}"
return f"{minutes:02d}:{secs:02d}"
@classmethod
def _format_clip_range(cls, start_s: int, end_s: int) -> str:
force_hours = bool(start_s >= 3600 or end_s >= 3600)
return f"{cls._format_timecode(start_s, force_hours=force_hours)}-{cls._format_timecode(end_s, force_hours=force_hours)}"
@classmethod
def _apply_clip_decorations(
cls, pipe_objects: List[Dict[str, Any]], clip_ranges: List[tuple[int, int]], *, source_king_hash: Optional[str]
) -> None:
if not pipe_objects or len(pipe_objects) != len(clip_ranges):
return
for po, (start_s, end_s) in zip(pipe_objects, clip_ranges):
clip_range = cls._format_clip_range(start_s, end_s)
clip_tag = f"clip:{clip_range}"
po["title"] = clip_tag
tags = po.get("tag")
if not isinstance(tags, list):
tags = []
tags = [t for t in tags if not str(t).strip().lower().startswith("title:")]
tags = [t for t in tags if not str(t).strip().lower().startswith("relationship:")]
tags.insert(0, f"title:{clip_tag}")
if clip_tag not in tags:
tags.append(clip_tag)
po["tag"] = tags
if len(pipe_objects) < 2:
return
hashes: List[str] = []
for po in pipe_objects:
h_val = cls._normalise_hash_hex(str(po.get("hash") or ""))
hashes.append(h_val or "")
king_hash = cls._normalise_hash_hex(source_king_hash) if source_king_hash else None
if not king_hash:
king_hash = hashes[0] if hashes and hashes[0] else None
if not king_hash:
return
alt_hashes: List[str] = [h for h in hashes if h and h != king_hash]
if not alt_hashes:
return
for po in pipe_objects:
po["relationships"] = {"king": [king_hash], "alt": list(alt_hashes)}
def _run_impl(
self,
result: Any,
args: Sequence[str],
config: Dict[str,
Any]
) -> int:
"""Main download implementation for direct HTTP files."""
progress = PipelineProgress(pipeline_context)
prev_progress = None
had_progress_key = False
try:
debug("Starting download-file")
# Allow providers to tap into the active PipelineProgress (optional).
try:
if isinstance(config, dict):
had_progress_key = "_pipeline_progress" in config
prev_progress = config.get("_pipeline_progress")
config["_pipeline_progress"] = progress
except Exception:
pass
# Parse arguments
parsed = parse_cmdlet_args(args, self)
raw_url = self._normalize_urls(parsed)
piped_items = self._collect_piped_items_if_no_urls(result, raw_url)
2026-01-10 17:30:18 -08:00
# Handle TABLE_AUTO_STAGES routing: if a piped PipeObject has _selection_args,
# re-invoke download-file with those args instead of processing the PipeObject itself
if piped_items and not raw_url:
for item in piped_items:
try:
if hasattr(item, 'metadata') and isinstance(item.metadata, dict):
selection_args = item.metadata.get('_selection_args')
if selection_args and isinstance(selection_args, (list, tuple)):
# Found selection args - extract URL and re-invoke with format args
item_url = getattr(item, 'url', None) or item.metadata.get('url')
if item_url:
debug(f"[ytdlp] Detected selection args from table selection: {selection_args}")
# Reconstruct args: URL + selection args
new_args = [str(item_url)] + [str(arg) for arg in selection_args]
debug(f"[ytdlp] Re-invoking download-file with: {new_args}")
# Recursively call _run_impl with the new args
return self._run_impl(None, new_args, config)
except Exception as e:
debug(f"[ytdlp] Error handling selection args: {e}")
pass
2026-01-01 20:37:27 -08:00
had_piped_input = False
try:
if isinstance(result, list):
had_piped_input = bool(result)
else:
had_piped_input = bool(result)
except Exception:
had_piped_input = False
# UX: In piped mode, allow a single positional arg to be the destination directory.
# Example: @1-4 | download-file "C:\\Users\\Me\\Downloads\\yoyo"
if (had_piped_input and raw_url and len(raw_url) == 1
and (not parsed.get("path")) and (not parsed.get("output"))):
candidate = str(raw_url[0] or "").strip()
low = candidate.lower()
looks_like_url = low.startswith(("http://", "https://", "ftp://"))
2026-01-11 18:56:26 -08:00
looks_like_provider = (
":" in candidate and not candidate.startswith(("http:", "https:", "ftp:", "ftps:", "file:"))
2026-01-01 20:37:27 -08:00
)
looks_like_windows_path = (
(len(candidate) >= 2 and candidate[1] == ":")
or candidate.startswith("\\\\") or candidate.startswith("\\")
or candidate.endswith(("\\",
"/"))
)
if (not looks_like_url) and (
not looks_like_provider) and looks_like_windows_path:
parsed["path"] = candidate
raw_url = []
piped_items = self._collect_piped_items_if_no_urls(result, raw_url)
if not raw_url and not piped_items:
log("No url or piped items to download", file=sys.stderr)
return 1
2026-01-11 18:56:26 -08:00
registry = self._load_provider_registry()
# Provider-pre-check (e.g. Internet Archive format picker)
picker_result = self._maybe_show_provider_picker(
2026-01-03 03:37:48 -08:00
raw_urls=raw_url,
piped_items=piped_items,
parsed=parsed,
config=config,
2026-01-11 18:56:26 -08:00
registry=registry,
2026-01-03 03:37:48 -08:00
)
2026-01-11 18:56:26 -08:00
if picker_result is not None:
return int(picker_result)
2026-01-03 03:37:48 -08:00
2026-01-01 20:37:27 -08:00
streaming_candidates = self._append_urls_from_piped_result(list(raw_url), result)
supported_streaming, unsupported_streaming = self._filter_supported_urls(streaming_candidates)
streaming_exit_code: Optional[int] = None
streaming_downloaded = 0
if supported_streaming:
streaming_exit_code = self._run_streaming_urls(
streaming_urls=supported_streaming,
args=args,
config=config,
parsed=parsed,
)
if streaming_exit_code == 0:
streaming_downloaded += 1
2026-01-03 03:37:48 -08:00
# Only remove URLs from further processing when streaming succeeded.
raw_url = [u for u in raw_url if u not in supported_streaming]
if not raw_url and not unsupported_streaming:
piped_items = []
2026-01-01 20:37:27 -08:00
2026-01-03 03:37:48 -08:00
if not raw_url and not piped_items:
return int(streaming_exit_code or 0)
2026-01-01 20:37:27 -08:00
2026-01-11 18:56:26 -08:00
# Re-check picker if partial processing occurred
picker_result = self._maybe_show_provider_picker(
2026-01-01 20:37:27 -08:00
raw_urls=raw_url,
piped_items=piped_items,
parsed=parsed,
config=config,
2026-01-11 18:56:26 -08:00
registry=registry,
2026-01-01 20:37:27 -08:00
)
2026-01-11 18:56:26 -08:00
if picker_result is not None:
return int(picker_result)
2026-01-01 20:37:27 -08:00
# Get output directory
final_output_dir = self._resolve_output_dir(parsed, config)
if not final_output_dir:
return 1
debug(f"Output directory: {final_output_dir}")
# If the caller isn't running the shared pipeline Live progress UI (e.g. direct
# cmdlet execution), start a minimal local pipeline progress panel so downloads
# show consistent, Rich-formatted progress (like download-media).
total_items = self._safe_total_items(raw_url, piped_items)
preview = self._build_preview(raw_url, piped_items, total_items)
progress.ensure_local_ui(
label="download-file",
total_items=total_items,
items_preview=preview
)
2025-12-22 02:11:53 -08:00
downloaded_count = 0
2026-01-07 11:01:13 -08:00
# Special-case: support selection-inserted magnet-id arg to drive provider downloads
magnet_id_raw = parsed.get("magnet-id")
if magnet_id_raw:
try:
magnet_id = int(str(magnet_id_raw).strip())
except Exception:
log(f"[download-file] invalid magnet-id: {magnet_id_raw}", file=sys.stderr)
return 1
get_provider = registry.get("get_provider")
provider_name = str(parsed.get("provider") or "alldebrid").strip().lower()
provider_obj = None
if get_provider is not None:
try:
provider_obj = get_provider(provider_name, config)
except Exception:
provider_obj = None
if provider_obj is None:
log(f"[download-file] provider '{provider_name}' not available", file=sys.stderr)
return 1
SearchResult = registry.get("SearchResult")
try:
if SearchResult is not None:
sr = SearchResult(
table=provider_name,
title=f"magnet-{magnet_id}",
path=f"alldebrid:magnet:{magnet_id}",
full_metadata={
"magnet_id": magnet_id,
"provider": provider_name,
"provider_view": "files",
},
)
else:
sr = None
except Exception:
sr = None
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
title_hint = metadata.get("name") or relpath or f"magnet-{magnet_id}"
self._emit_local_file(
downloaded_path=path,
source=file_url or f"alldebrid:magnet:{magnet_id}",
title_hint=title_hint,
tags_hint=None,
media_kind_hint="file",
full_metadata=metadata,
progress=progress,
config=config,
provider_hint=provider_name,
)
try:
downloaded_extra = provider_obj.download_items(
sr,
final_output_dir,
emit=_on_emit,
progress=progress,
quiet_mode=quiet_mode,
path_from_result=self._path_from_download_result,
config=config,
)
except TypeError:
downloaded_extra = provider_obj.download_items(
sr,
final_output_dir,
emit=_on_emit,
progress=progress,
quiet_mode=quiet_mode,
path_from_result=self._path_from_download_result,
)
except Exception as exc:
log(f"[download-file] failed to download magnet {magnet_id}: {exc}", file=sys.stderr)
return 1
if downloaded_extra:
debug(f"[download-file] AllDebrid magnet {magnet_id} emitted {downloaded_extra} files")
return 0
log(
f"[download-file] AllDebrid magnet {magnet_id} produced no downloads",
file=sys.stderr,
)
return 1
2025-12-22 02:11:53 -08:00
urls_downloaded, early_exit = self._process_explicit_urls(
raw_urls=raw_url,
final_output_dir=final_output_dir,
config=config,
quiet_mode=quiet_mode,
registry=registry,
progress=progress,
2026-01-06 16:19:29 -08:00
context_items=(result if isinstance(result, list) else ([result] if result else [])),
2025-12-22 02:11:53 -08:00
)
downloaded_count += int(urls_downloaded)
if early_exit is not None:
return int(early_exit)
2026-01-05 07:51:19 -08:00
provider_downloaded, magnet_submissions = self._process_provider_items(
2025-12-22 02:11:53 -08:00
piped_items=piped_items,
final_output_dir=final_output_dir,
config=config,
quiet_mode=quiet_mode,
registry=registry,
progress=progress,
)
2026-01-05 07:51:19 -08:00
downloaded_count += provider_downloaded
2025-12-11 23:21:45 -08:00
2026-01-05 07:51:19 -08:00
if downloaded_count > 0 or streaming_downloaded > 0 or magnet_submissions > 0:
msg = f"✓ Successfully processed {downloaded_count} file(s)"
if magnet_submissions:
msg += f" and queued {magnet_submissions} magnet(s)"
debug(msg)
2025-12-11 12:47:30 -08:00
return 0
2026-01-01 20:37:27 -08:00
if streaming_exit_code is not None:
return int(streaming_exit_code)
2025-12-11 12:47:30 -08:00
log("No downloads completed", file=sys.stderr)
return 1
except Exception as e:
log(f"Error in download-file: {e}", file=sys.stderr)
return 1
2025-12-22 02:11:53 -08:00
finally:
2025-12-28 04:13:11 -08:00
try:
if isinstance(config, dict):
if had_progress_key:
config["_pipeline_progress"] = prev_progress
else:
config.pop("_pipeline_progress", None)
except Exception:
pass
2025-12-22 02:11:53 -08:00
progress.close_local_ui(force_complete=True)
2026-01-11 18:56:26 -08:00
def _maybe_show_provider_picker(
self,
*,
raw_urls: Sequence[str],
piped_items: Sequence[Any],
parsed: Dict[str, Any],
config: Dict[str, Any],
registry: Dict[str, Any],
) -> Optional[int]:
"""Generic hook for providers to show a selection table (e.g. Internet Archive format picker)."""
total_inputs = len(raw_urls or []) + len(piped_items or [])
if total_inputs != 1:
return None
target_url = None
if raw_urls:
target_url = str(raw_urls[0])
elif piped_items:
target_url = str(get_field(piped_items[0], "path") or get_field(piped_items[0], "url") or "")
if not target_url:
return None
match_provider_name_for_url = registry.get("match_provider_name_for_url")
get_provider = registry.get("get_provider")
provider_name = None
if match_provider_name_for_url:
try:
provider_name = match_provider_name_for_url(target_url)
except Exception:
pass
if provider_name and get_provider:
provider = get_provider(provider_name, config)
if provider and hasattr(provider, "maybe_show_picker"):
try:
quiet_mode = bool(config.get("_quiet_background_output"))
res = provider.maybe_show_picker(
url=target_url,
item=piped_items[0] if piped_items else None,
parsed=parsed,
config=config,
quiet_mode=quiet_mode,
)
if res is not None:
return int(res)
except Exception as e:
debug(f"Provider {provider_name} picker error: {e}")
return None
def _resolve_output_dir(self,
parsed: Dict[str,
Any],
config: Dict[str,
Any]) -> Optional[Path]:
2025-12-11 12:47:30 -08:00
"""Resolve the output directory from storage location or config."""
2025-12-18 22:50:21 -08:00
output_dir_arg = parsed.get("path") or parsed.get("output")
2025-12-11 23:21:45 -08:00
if output_dir_arg:
try:
out_path = Path(str(output_dir_arg)).expanduser()
out_path.mkdir(parents=True, exist_ok=True)
return out_path
except Exception as e:
log(
f"Cannot use output directory {output_dir_arg}: {e}",
file=sys.stderr
)
2025-12-11 23:21:45 -08:00
return None
2025-12-11 12:47:30 -08:00
storage_location = parsed.get("storage")
# Priority 1: --storage flag
if storage_location:
try:
return SharedArgs.resolve_storage(storage_location)
except Exception as e:
log(f"Invalid storage location: {e}", file=sys.stderr)
return None
2026-01-10 17:30:18 -08:00
# Priority 2: Config default output/temp directory, then OS temp
2025-12-13 00:18:30 -08:00
try:
from SYS.config import resolve_output_dir
2025-12-13 00:18:30 -08:00
final_output_dir = resolve_output_dir(config)
except Exception:
2026-01-10 19:56:19 -08:00
import tempfile
2026-01-11 10:59:50 -08:00
final_output_dir = Path(tempfile.gettempdir())
2025-12-11 12:47:30 -08:00
debug(f"Using default directory: {final_output_dir}")
# Ensure directory exists
try:
final_output_dir.mkdir(parents=True, exist_ok=True)
except Exception as e:
log(
f"Cannot create output directory {final_output_dir}: {e}",
file=sys.stderr
)
2025-12-11 12:47:30 -08:00
return None
return final_output_dir
def _compute_file_hash(self, filepath: Path) -> str:
"""Compute SHA256 hash of a file."""
import hashlib
2025-12-29 17:05:03 -08:00
2025-12-11 12:47:30 -08:00
sha256_hash = hashlib.sha256()
with open(filepath, "rb") as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
# Module-level singleton registration
CMDLET = Download_File()