fdf
This commit is contained in:
@@ -441,6 +441,35 @@ class Add_File(Cmdlet):
|
||||
ctx.emit(pipe_obj.to_dict())
|
||||
ctx.set_current_stage_table(None)
|
||||
|
||||
@staticmethod
|
||||
def _emit_storage_result(payload: Dict[str, Any]) -> None:
|
||||
"""Emit a storage-style result payload.
|
||||
|
||||
- Always emits the dict downstream (when in a pipeline).
|
||||
- If this is the last stage (or not in a pipeline), prints a search-store-like table
|
||||
and sets an overlay table/items for @N selection.
|
||||
"""
|
||||
# Always emit for downstream commands (no-op if not in a pipeline)
|
||||
ctx.emit(payload)
|
||||
|
||||
stage_ctx = ctx.get_stage_context()
|
||||
is_last = (stage_ctx is None) or bool(getattr(stage_ctx, "is_last_stage", False))
|
||||
if not is_last:
|
||||
return
|
||||
|
||||
try:
|
||||
from result_table import ResultTable
|
||||
table = ResultTable("Result")
|
||||
table.add_result(payload)
|
||||
# Overlay so @1 refers to this add-file result without overwriting search history
|
||||
ctx.set_last_result_table_overlay(table, [payload], subject=payload)
|
||||
except Exception:
|
||||
# If table rendering fails, still keep @ selection items
|
||||
try:
|
||||
ctx.set_last_result_items_only([payload])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _prepare_metadata(
|
||||
result: Any,
|
||||
@@ -788,7 +817,55 @@ class Add_File(Cmdlet):
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
Add_File._emit_pipe_object(pipe_obj)
|
||||
|
||||
# Emit a search-store-like payload for consistent tables and natural piping.
|
||||
# Keep hash/store for downstream commands (get-tag, get-file, etc.).
|
||||
resolved_hash = file_identifier if len(file_identifier) == 64 else (f_hash or file_identifier or "unknown")
|
||||
|
||||
meta: Dict[str, Any] = {}
|
||||
try:
|
||||
meta = backend.get_metadata(resolved_hash) or {}
|
||||
except Exception:
|
||||
meta = {}
|
||||
|
||||
# Determine size bytes
|
||||
size_bytes: Optional[int] = None
|
||||
for key in ("size_bytes", "size", "filesize", "file_size"):
|
||||
try:
|
||||
raw_size = meta.get(key)
|
||||
if raw_size is not None:
|
||||
size_bytes = int(raw_size)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
if size_bytes is None:
|
||||
try:
|
||||
size_bytes = int(media_path.stat().st_size)
|
||||
except Exception:
|
||||
size_bytes = None
|
||||
|
||||
# Determine title/ext
|
||||
title_out = (
|
||||
meta.get("title")
|
||||
or title
|
||||
or pipe_obj.title
|
||||
or media_path.stem
|
||||
or media_path.name
|
||||
)
|
||||
ext_out = (meta.get("ext") or media_path.suffix.lstrip("."))
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"title": title_out,
|
||||
"ext": str(ext_out or ""),
|
||||
"size_bytes": size_bytes,
|
||||
"store": backend_name,
|
||||
"hash": resolved_hash,
|
||||
# Preserve extra fields for downstream commands (kept hidden by default table rules)
|
||||
"path": stored_path,
|
||||
"tag": list(tags or []),
|
||||
"url": list(url or []),
|
||||
}
|
||||
Add_File._emit_storage_result(payload)
|
||||
|
||||
Add_File._cleanup_after_success(media_path, delete_source=delete_after)
|
||||
return 0
|
||||
|
||||
@@ -57,6 +57,9 @@ except ImportError:
|
||||
|
||||
_EXTRACTOR_CACHE: List[Any] | None = None
|
||||
|
||||
# Reused progress formatter for yt-dlp callbacks (stderr only).
|
||||
_YTDLP_PROGRESS_BAR = ProgressBar()
|
||||
|
||||
|
||||
def _ensure_yt_dlp_ready() -> None:
|
||||
if yt_dlp is not None:
|
||||
@@ -248,7 +251,8 @@ def _build_ytdlp_options(opts: DownloadOptions) -> Dict[str, Any]:
|
||||
"fragment_retries": 10,
|
||||
"http_chunk_size": 10_485_760,
|
||||
"restrictfilenames": True,
|
||||
"progress_hooks": [] if opts.quiet else [_progress_callback],
|
||||
# Always show a progress indicator; do not tie it to debug logging.
|
||||
"progress_hooks": [_progress_callback],
|
||||
}
|
||||
|
||||
if opts.cookies_path and opts.cookies_path.is_file():
|
||||
@@ -423,17 +427,36 @@ def _progress_callback(status: Dict[str, Any]) -> None:
|
||||
"""Simple progress callback using logger."""
|
||||
event = status.get("status")
|
||||
if event == "downloading":
|
||||
percent = status.get("_percent_str", "?")
|
||||
speed = status.get("_speed_str", "?")
|
||||
eta = status.get("_eta_str", "?")
|
||||
sys.stdout.write(f"\r[download] {percent} at {speed} ETA {eta} ")
|
||||
sys.stdout.flush()
|
||||
# Always print progress to stderr so piped stdout remains clean.
|
||||
percent = status.get("_percent_str")
|
||||
downloaded = status.get("downloaded_bytes")
|
||||
total = status.get("total_bytes") or status.get("total_bytes_estimate")
|
||||
speed = status.get("_speed_str")
|
||||
eta = status.get("_eta_str")
|
||||
|
||||
try:
|
||||
line = _YTDLP_PROGRESS_BAR.format_progress(
|
||||
percent_str=str(percent) if percent is not None else None,
|
||||
downloaded=int(downloaded) if downloaded is not None else None,
|
||||
total=int(total) if total is not None else None,
|
||||
speed_str=str(speed) if speed is not None else None,
|
||||
eta_str=str(eta) if eta is not None else None,
|
||||
)
|
||||
except Exception:
|
||||
pct = str(percent) if percent is not None else "?"
|
||||
spd = str(speed) if speed is not None else "?"
|
||||
et = str(eta) if eta is not None else "?"
|
||||
line = f"[download] {pct} at {spd} ETA {et}"
|
||||
|
||||
sys.stderr.write("\r" + line + " ")
|
||||
sys.stderr.flush()
|
||||
elif event == "finished":
|
||||
sys.stdout.write("\r" + " " * 70 + "\r")
|
||||
sys.stdout.flush()
|
||||
debug(f"✓ Download finished: {status.get('filename')}")
|
||||
# Clear the in-place progress line.
|
||||
sys.stderr.write("\r" + (" " * 140) + "\r")
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
elif event in ("postprocessing", "processing"):
|
||||
debug(f"Post-processing: {status.get('postprocessor')}")
|
||||
return
|
||||
|
||||
|
||||
def _download_direct_file(
|
||||
@@ -530,17 +553,17 @@ def _download_direct_file(
|
||||
speed_str=speed_str,
|
||||
eta_str=eta_str,
|
||||
)
|
||||
if not quiet:
|
||||
debug(progress_line)
|
||||
sys.stderr.write("\r" + progress_line + " ")
|
||||
sys.stderr.flush()
|
||||
last_progress_time[0] = now
|
||||
|
||||
with HTTPClient(timeout=30.0) as client:
|
||||
client.download(url, str(file_path), progress_callback=progress_callback)
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
avg_speed_str = progress_bar.format_bytes(downloaded_bytes[0] / elapsed if elapsed > 0 else 0) + "/s"
|
||||
if not quiet:
|
||||
debug(f"✓ Downloaded in {elapsed:.1f}s at {avg_speed_str}")
|
||||
# Clear progress line after completion.
|
||||
sys.stderr.write("\r" + (" " * 140) + "\r")
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# For direct file downloads, create minimal info dict without filename as title
|
||||
# This prevents creating duplicate title: tags when filename gets auto-generated
|
||||
@@ -1403,9 +1426,16 @@ class Download_Media(Cmdlet):
|
||||
# Emit one PipeObject per downloaded file (playlists/albums return a list)
|
||||
results_to_emit = result_obj if isinstance(result_obj, list) else [result_obj]
|
||||
debug(f"Emitting {len(results_to_emit)} result(s) to pipeline...")
|
||||
|
||||
stage_ctx = pipeline_context.get_stage_context()
|
||||
emit_enabled = bool(stage_ctx is not None and not getattr(stage_ctx, "is_last_stage", False))
|
||||
for downloaded in results_to_emit:
|
||||
pipe_obj_dict = self._build_pipe_object(downloaded, url, opts)
|
||||
pipeline_context.emit(pipe_obj_dict)
|
||||
|
||||
# Only emit when there is a downstream stage.
|
||||
# This keeps `download-media` from producing a result table when run standalone.
|
||||
if emit_enabled:
|
||||
pipeline_context.emit(pipe_obj_dict)
|
||||
|
||||
# Automatically register url with local library
|
||||
if pipe_obj_dict.get("url"):
|
||||
|
||||
Reference in New Issue
Block a user