re
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
This commit is contained in:
@@ -1593,6 +1593,36 @@ class Download_Media(Cmdlet):
|
||||
debug("Preflight URL check: no matches")
|
||||
return True
|
||||
|
||||
# If the user already answered the duplicate URL prompt for this pipeline/command,
|
||||
# respect that decision and don't re-prompt for every item.
|
||||
try:
|
||||
current_cmd_text = pipeline_context.get_current_command_text("")
|
||||
except Exception:
|
||||
current_cmd_text = ""
|
||||
|
||||
try:
|
||||
stage_ctx = pipeline_context.get_stage_context()
|
||||
except Exception:
|
||||
stage_ctx = None
|
||||
|
||||
in_pipeline = bool(stage_ctx is not None or ("|" in str(current_cmd_text or "")))
|
||||
if in_pipeline:
|
||||
try:
|
||||
cached_cmd = pipeline_context.load_value("preflight.url_duplicates.command", default="")
|
||||
cached_decision = pipeline_context.load_value("preflight.url_duplicates.continue", default=None)
|
||||
except Exception:
|
||||
cached_cmd = ""
|
||||
cached_decision = None
|
||||
|
||||
if cached_decision is not None and str(cached_cmd or "") == str(current_cmd_text or ""):
|
||||
if bool(cached_decision):
|
||||
return True
|
||||
try:
|
||||
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
table = ResultTable(f"URL already exists ({len(url_matches)} match(es))")
|
||||
results_list: List[Dict[str, Any]] = []
|
||||
for item in url_matches:
|
||||
@@ -1639,8 +1669,28 @@ class Download_Media(Cmdlet):
|
||||
with cm:
|
||||
get_stderr_console().print(table)
|
||||
setattr(table, "_rendered_by_cmdlet", True)
|
||||
if not Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()):
|
||||
if used_suspend:
|
||||
answered_yes = bool(Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()))
|
||||
|
||||
# Cache decision for the duration of this pipeline/command.
|
||||
if in_pipeline:
|
||||
try:
|
||||
existing = pipeline_context.load_value("preflight", default=None)
|
||||
except Exception:
|
||||
existing = None
|
||||
preflight_cache: Dict[str, Any] = existing if isinstance(existing, dict) else {}
|
||||
url_dup_cache = preflight_cache.get("url_duplicates")
|
||||
if not isinstance(url_dup_cache, dict):
|
||||
url_dup_cache = {}
|
||||
url_dup_cache["command"] = str(current_cmd_text or "")
|
||||
url_dup_cache["continue"] = bool(answered_yes)
|
||||
preflight_cache["url_duplicates"] = url_dup_cache
|
||||
try:
|
||||
pipeline_context.store_value("preflight", preflight_cache)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not answered_yes:
|
||||
if in_pipeline and used_suspend:
|
||||
try:
|
||||
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||
except Exception:
|
||||
@@ -1666,6 +1716,35 @@ class Download_Media(Cmdlet):
|
||||
debug("Bulk URL preflight skipped: storage unavailable")
|
||||
return True
|
||||
|
||||
# Honor any prior duplicate URL decision for this pipeline/command.
|
||||
try:
|
||||
current_cmd_text = pipeline_context.get_current_command_text("")
|
||||
except Exception:
|
||||
current_cmd_text = ""
|
||||
|
||||
try:
|
||||
stage_ctx = pipeline_context.get_stage_context()
|
||||
except Exception:
|
||||
stage_ctx = None
|
||||
|
||||
in_pipeline = bool(stage_ctx is not None or ("|" in str(current_cmd_text or "")))
|
||||
if in_pipeline:
|
||||
try:
|
||||
cached_cmd = pipeline_context.load_value("preflight.url_duplicates.command", default="")
|
||||
cached_decision = pipeline_context.load_value("preflight.url_duplicates.continue", default=None)
|
||||
except Exception:
|
||||
cached_cmd = ""
|
||||
cached_decision = None
|
||||
|
||||
if cached_decision is not None and str(cached_cmd or "") == str(current_cmd_text or ""):
|
||||
if bool(cached_decision):
|
||||
return True
|
||||
try:
|
||||
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
unique_urls: List[str] = []
|
||||
for u in urls or []:
|
||||
s = str(u or "").strip()
|
||||
@@ -1930,7 +2009,32 @@ class Download_Media(Cmdlet):
|
||||
cm = nullcontext()
|
||||
|
||||
with cm:
|
||||
if not Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()):
|
||||
answered_yes = bool(Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()))
|
||||
|
||||
# Cache decision for the duration of this pipeline/command.
|
||||
if in_pipeline:
|
||||
try:
|
||||
existing = pipeline_context.load_value("preflight", default=None)
|
||||
except Exception:
|
||||
existing = None
|
||||
preflight_cache: Dict[str, Any] = existing if isinstance(existing, dict) else {}
|
||||
url_dup_cache = preflight_cache.get("url_duplicates")
|
||||
if not isinstance(url_dup_cache, dict):
|
||||
url_dup_cache = {}
|
||||
url_dup_cache["command"] = str(current_cmd_text or "")
|
||||
url_dup_cache["continue"] = bool(answered_yes)
|
||||
preflight_cache["url_duplicates"] = url_dup_cache
|
||||
try:
|
||||
pipeline_context.store_value("preflight", preflight_cache)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not answered_yes:
|
||||
if in_pipeline:
|
||||
try:
|
||||
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -1947,6 +2051,20 @@ class Download_Media(Cmdlet):
|
||||
if not isinstance(entries, list) or len(entries) <= 1:
|
||||
return False
|
||||
|
||||
# Identify a stable table type so `@* | ...` pipelines can auto-insert the
|
||||
# appropriate downloader stage (e.g., Bandcamp selections should insert
|
||||
# `download-media` before `merge-file`).
|
||||
extractor_name = ""
|
||||
try:
|
||||
extractor_name = str(pr.get("extractor") or pr.get("extractor_key") or "").strip().lower()
|
||||
except Exception:
|
||||
extractor_name = ""
|
||||
table_type: Optional[str] = None
|
||||
if "bandcamp" in extractor_name:
|
||||
table_type = "bandcamp"
|
||||
elif "youtube" in extractor_name:
|
||||
table_type = "youtube"
|
||||
|
||||
# Display table (limit rows to keep output reasonable)
|
||||
max_rows = 200
|
||||
display_entries = entries[:max_rows]
|
||||
@@ -1976,6 +2094,11 @@ class Download_Media(Cmdlet):
|
||||
table = ResultTable()
|
||||
safe_url = str(url or "").strip()
|
||||
table.title = f'download-media -url "{safe_url}"' if safe_url else "download-media"
|
||||
if table_type:
|
||||
try:
|
||||
table.set_table(table_type)
|
||||
except Exception:
|
||||
table.table = table_type
|
||||
table.set_source_command("download-media", [])
|
||||
try:
|
||||
table.set_preserve_order(True)
|
||||
@@ -2581,7 +2704,10 @@ class Download_Media(Cmdlet):
|
||||
PipelineProgress(pipeline_context).step("finalized")
|
||||
|
||||
stage_ctx = pipeline_context.get_stage_context()
|
||||
emit_enabled = bool(stage_ctx is not None and not getattr(stage_ctx, "is_last_stage", False))
|
||||
# Emit per downloaded item whenever we're running under the pipeline runner.
|
||||
# Live progress advances on emit(), and suppressing emits for the last stage
|
||||
# causes the pipe bar to stay at 0% even while downloads complete.
|
||||
emit_enabled = bool(stage_ctx is not None)
|
||||
for pipe_obj_dict in pipe_objects:
|
||||
if emit_enabled:
|
||||
pipeline_context.emit(pipe_obj_dict)
|
||||
|
||||
Reference in New Issue
Block a user