f
This commit is contained in:
@@ -52,6 +52,9 @@ parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
register_url_with_local_library = sh.register_url_with_local_library
|
||||
coerce_to_pipe_object = sh.coerce_to_pipe_object
|
||||
get_field = sh.get_field
|
||||
resolve_target_dir = sh.resolve_target_dir
|
||||
coerce_to_path = sh.coerce_to_path
|
||||
build_pipeline_preview = sh.build_pipeline_preview
|
||||
|
||||
|
||||
class Download_File(Cmdlet):
|
||||
@@ -168,49 +171,67 @@ class Download_File(Cmdlet):
|
||||
debug(f"Provider {provider_name} claimed {url}")
|
||||
try:
|
||||
# Try generic handle_url
|
||||
handled = False
|
||||
if hasattr(provider, "handle_url"):
|
||||
handled, path = provider.handle_url(str(url), output_dir=final_output_dir)
|
||||
if handled:
|
||||
if path:
|
||||
self._emit_local_file(
|
||||
downloaded_path=Path(str(path)),
|
||||
source=str(url),
|
||||
title_hint=Path(str(path)).stem,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=None,
|
||||
progress=progress,
|
||||
config=config,
|
||||
provider_hint=provider_name
|
||||
)
|
||||
downloaded_count += 1
|
||||
continue
|
||||
|
||||
# Try generic download_url
|
||||
elif hasattr(provider, "download_url"):
|
||||
downloaded_path = provider.download_url(str(url), final_output_dir)
|
||||
if downloaded_path:
|
||||
self._emit_local_file(
|
||||
downloaded_path=Path(downloaded_path),
|
||||
source=str(url),
|
||||
title_hint=Path(str(downloaded_path)).stem,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=None,
|
||||
provider_hint=provider_name,
|
||||
progress=progress,
|
||||
config=config,
|
||||
)
|
||||
downloaded_count += 1
|
||||
continue
|
||||
try:
|
||||
handled, path = provider.handle_url(str(url), output_dir=final_output_dir)
|
||||
if handled:
|
||||
if path:
|
||||
self._emit_local_file(
|
||||
downloaded_path=Path(str(path)),
|
||||
source=str(url),
|
||||
title_hint=Path(str(path)).stem,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=None,
|
||||
progress=progress,
|
||||
config=config,
|
||||
provider_hint=provider_name
|
||||
)
|
||||
downloaded_count += 1
|
||||
continue
|
||||
except Exception as e:
|
||||
debug(f"Provider {provider_name} handle_url error: {e}")
|
||||
|
||||
# Try generic download_url if not already handled
|
||||
if not handled and hasattr(provider, "download_url"):
|
||||
res = provider.download_url(str(url), final_output_dir)
|
||||
if res:
|
||||
# Standardize result: can be Path, tuple(Path, Info), or dict with "path"
|
||||
p_val = None
|
||||
extra_meta = None
|
||||
if isinstance(res, (str, Path)):
|
||||
p_val = Path(res)
|
||||
elif isinstance(res, tuple) and len(res) > 0:
|
||||
p_val = Path(res[0])
|
||||
if len(res) > 1 and isinstance(res[1], dict):
|
||||
extra_meta = res[1]
|
||||
elif isinstance(res, dict):
|
||||
path_candidate = res.get("path") or res.get("file_path")
|
||||
if path_candidate:
|
||||
p_val = Path(path_candidate)
|
||||
extra_meta = res
|
||||
|
||||
if p_val:
|
||||
self._emit_local_file(
|
||||
downloaded_path=p_val,
|
||||
source=str(url),
|
||||
title_hint=p_val.stem,
|
||||
tags_hint=None,
|
||||
media_kind_hint=extra_meta.get("media_kind") if extra_meta else "file",
|
||||
full_metadata=extra_meta,
|
||||
provider_hint=provider_name,
|
||||
progress=progress,
|
||||
config=config,
|
||||
)
|
||||
downloaded_count += 1
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
log(f"Provider {provider_name} error handling {url}: {e}", file=sys.stderr)
|
||||
# Fallthrough to direct download?
|
||||
# If a provider explicitly claimed it but failed, maybe we shouldn't fallback?
|
||||
# But "barebones" implies robustness might be up to user.
|
||||
# We'll continue to next URL.
|
||||
continue
|
||||
# If a provider explicitly claimed it but failed, we'll try direct download as a last resort.
|
||||
pass
|
||||
|
||||
# Direct Download Fallback
|
||||
result_obj = _download_direct_file(
|
||||
@@ -409,7 +430,7 @@ class Download_File(Cmdlet):
|
||||
suggested_filename=suggested_name,
|
||||
pipeline_progress=progress,
|
||||
)
|
||||
downloaded_path = self._path_from_download_result(result_obj)
|
||||
downloaded_path = coerce_to_path(result_obj)
|
||||
|
||||
if downloaded_path is None:
|
||||
log(
|
||||
@@ -481,17 +502,6 @@ class Download_File(Cmdlet):
|
||||
|
||||
return downloaded_count, queued_magnet_submissions
|
||||
|
||||
@staticmethod
|
||||
def _path_from_download_result(result_obj: Any) -> Path:
|
||||
file_path = None
|
||||
if hasattr(result_obj, "path"):
|
||||
file_path = getattr(result_obj, "path")
|
||||
elif isinstance(result_obj, dict):
|
||||
file_path = result_obj.get("path")
|
||||
if not file_path:
|
||||
file_path = str(result_obj)
|
||||
return Path(str(file_path))
|
||||
|
||||
def _emit_local_file(
|
||||
self,
|
||||
*,
|
||||
@@ -506,7 +516,7 @@ class Download_File(Cmdlet):
|
||||
provider_hint: Optional[str] = None,
|
||||
) -> None:
|
||||
title_val = (title_hint or downloaded_path.stem or "Unknown").strip() or downloaded_path.stem
|
||||
hash_value = self._compute_file_hash(downloaded_path)
|
||||
hash_value = sha256_file(downloaded_path)
|
||||
notes: Optional[Dict[str, str]] = None
|
||||
try:
|
||||
if isinstance(full_metadata, dict):
|
||||
@@ -544,38 +554,6 @@ class Download_File(Cmdlet):
|
||||
|
||||
pipeline_context.emit(payload)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_urls(parsed: Dict[str, Any]) -> List[str]:
|
||||
urls: List[str] = []
|
||||
url_value: Any = None
|
||||
if isinstance(parsed, dict):
|
||||
url_value = parsed.get("url")
|
||||
|
||||
try:
|
||||
urls = normalize_url_list(url_value)
|
||||
except Exception:
|
||||
urls = []
|
||||
|
||||
if not urls and isinstance(parsed, dict):
|
||||
query_val = parsed.get("query")
|
||||
try:
|
||||
if isinstance(query_val, str) and query_val.strip().lower().startswith("url:"):
|
||||
urls = normalize_url_list(query_val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return urls
|
||||
|
||||
@staticmethod
|
||||
def _collect_piped_items_if_no_urls(result: Any, raw_url: Sequence[str]) -> List[Any]:
|
||||
if raw_url:
|
||||
return []
|
||||
if result is None:
|
||||
return []
|
||||
if isinstance(result, list):
|
||||
return list(result)
|
||||
return [result]
|
||||
|
||||
@staticmethod
|
||||
def _load_provider_registry() -> Dict[str, Any]:
|
||||
"""Lightweight accessor for provider helpers without hard dependencies."""
|
||||
@@ -597,73 +575,8 @@ class Download_File(Cmdlet):
|
||||
"SearchResult": None,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _safe_total_items(raw_url: Sequence[str], piped_items: Sequence[Any]) -> int:
|
||||
"""Return a sane item count for progress display."""
|
||||
try:
|
||||
url_count = len(raw_url or [])
|
||||
except Exception:
|
||||
url_count = 0
|
||||
try:
|
||||
piped_count = len(piped_items or [])
|
||||
except Exception:
|
||||
piped_count = 0
|
||||
total = url_count + piped_count
|
||||
return total if total > 0 else 1
|
||||
|
||||
@staticmethod
|
||||
def _build_preview(raw_url: Sequence[str], piped_items: Sequence[Any], total_items: int) -> List[str]:
|
||||
"""Construct a short preview list for the local progress UI."""
|
||||
preview: List[str] = []
|
||||
|
||||
try:
|
||||
for url in raw_url or []:
|
||||
if len(preview) >= 5:
|
||||
break
|
||||
preview.append(str(url))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if len(preview) < 5:
|
||||
try:
|
||||
items = piped_items if isinstance(piped_items, list) else list(piped_items or [])
|
||||
except Exception:
|
||||
items = []
|
||||
for item in items:
|
||||
if len(preview) >= 5:
|
||||
break
|
||||
try:
|
||||
label = get_field(item, "title") or get_field(item, "path") or get_field(item, "url")
|
||||
except Exception:
|
||||
label = None
|
||||
if label:
|
||||
preview.append(str(label))
|
||||
|
||||
# If we still have nothing, supply a generic placeholder to avoid empty previews.
|
||||
if not preview and total_items:
|
||||
preview.append(f"{total_items} item(s)")
|
||||
|
||||
return preview
|
||||
|
||||
# === Streaming helpers (yt-dlp) ===
|
||||
|
||||
@staticmethod
|
||||
def _append_urls_from_piped_result(raw_urls: List[str], result: Any) -> List[str]:
|
||||
if raw_urls:
|
||||
return raw_urls
|
||||
if not result:
|
||||
return raw_urls
|
||||
|
||||
results_to_check = result if isinstance(result, list) else [result]
|
||||
for item in results_to_check:
|
||||
try:
|
||||
url = get_field(item, "url") or get_field(item, "target")
|
||||
except Exception:
|
||||
url = None
|
||||
if url:
|
||||
raw_urls.append(url)
|
||||
return raw_urls
|
||||
|
||||
@staticmethod
|
||||
def _filter_supported_urls(raw_urls: Sequence[str]) -> tuple[List[str], List[str]]:
|
||||
supported = [url for url in (raw_urls or []) if is_url_supported_by_ytdlp(url)]
|
||||
@@ -1633,7 +1546,7 @@ class Download_File(Cmdlet):
|
||||
if unsupported_list:
|
||||
debug(f"Skipping {len(unsupported_list)} unsupported url (use direct HTTP mode)")
|
||||
|
||||
final_output_dir = self._resolve_streaming_output_dir(parsed, config)
|
||||
final_output_dir = resolve_target_dir(parsed, config)
|
||||
if not final_output_dir:
|
||||
return 1
|
||||
|
||||
@@ -1860,45 +1773,6 @@ class Download_File(Cmdlet):
|
||||
log(f"Error in streaming download handler: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
def _resolve_streaming_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]:
|
||||
path_override = parsed.get("path")
|
||||
if path_override:
|
||||
try:
|
||||
candidate = Path(str(path_override)).expanduser()
|
||||
if candidate.suffix:
|
||||
candidate = candidate.parent
|
||||
candidate.mkdir(parents=True, exist_ok=True)
|
||||
debug(f"Using output directory override: {candidate}")
|
||||
return candidate
|
||||
except Exception as e:
|
||||
log(f"Invalid -path output directory: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
temp_value = (config or {}).get("temp") if isinstance(config, dict) else None
|
||||
except Exception:
|
||||
temp_value = None
|
||||
if temp_value:
|
||||
try:
|
||||
candidate = Path(str(temp_value)).expanduser()
|
||||
candidate.mkdir(parents=True, exist_ok=True)
|
||||
debug(f"Using config temp directory: {candidate}")
|
||||
return candidate
|
||||
except Exception as e:
|
||||
log(f"Cannot use configured temp directory '{temp_value}': {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
import tempfile
|
||||
|
||||
candidate = Path(tempfile.gettempdir()) / "Medios-Macina"
|
||||
candidate.mkdir(parents=True, exist_ok=True)
|
||||
debug(f"Using OS temp directory: {candidate}")
|
||||
return candidate
|
||||
except Exception as e:
|
||||
log(f"Cannot create OS temp directory: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
def _parse_time_ranges(self, spec: str) -> List[tuple[int, int]]:
|
||||
def _to_seconds(ts: str) -> Optional[int]:
|
||||
ts = str(ts).strip()
|
||||
@@ -2001,7 +1875,7 @@ class Download_File(Cmdlet):
|
||||
def _build_pipe_object(self, download_result: Any, url: str, opts: DownloadOptions) -> Dict[str, Any]:
|
||||
info: Dict[str, Any] = download_result.info if isinstance(download_result.info, dict) else {}
|
||||
media_path = Path(download_result.path)
|
||||
hash_value = download_result.hash_value or self._compute_file_hash(media_path)
|
||||
hash_value = download_result.hash_value or sha256_file(media_path)
|
||||
title = info.get("title") or media_path.stem
|
||||
tag = list(download_result.tag or [])
|
||||
|
||||
@@ -2398,8 +2272,19 @@ class Download_File(Cmdlet):
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
raw_url = self._normalize_urls(parsed)
|
||||
piped_items = self._collect_piped_items_if_no_urls(result, raw_url)
|
||||
# Resolve URLs from -url or positional arguments
|
||||
url_candidates = parsed.get("url") or [a for a in parsed.get("args", []) if isinstance(a, str) and (a.startswith("http") or "://" in a)]
|
||||
raw_url = normalize_url_list(url_candidates)
|
||||
|
||||
quiet_mode = bool(config.get("_quiet_background_output")) if isinstance(config, dict) else False
|
||||
|
||||
# Fallback to piped items if no explicit URLs provided
|
||||
piped_items = []
|
||||
if not raw_url:
|
||||
if isinstance(result, list):
|
||||
piped_items = list(result)
|
||||
elif result is not None:
|
||||
piped_items = [result]
|
||||
|
||||
# Handle TABLE_AUTO_STAGES routing: if a piped PipeObject has _selection_args,
|
||||
# re-invoke download-file with those args instead of processing the PipeObject itself
|
||||
@@ -2470,7 +2355,7 @@ class Download_File(Cmdlet):
|
||||
if picker_result is not None:
|
||||
return int(picker_result)
|
||||
|
||||
streaming_candidates = self._append_urls_from_piped_result(list(raw_url), result)
|
||||
streaming_candidates = list(raw_url)
|
||||
supported_streaming, unsupported_streaming = self._filter_supported_urls(streaming_candidates)
|
||||
|
||||
streaming_exit_code: Optional[int] = None
|
||||
@@ -2504,7 +2389,7 @@ class Download_File(Cmdlet):
|
||||
return int(picker_result)
|
||||
|
||||
# Get output directory
|
||||
final_output_dir = self._resolve_output_dir(parsed, config)
|
||||
final_output_dir = resolve_target_dir(parsed, config)
|
||||
if not final_output_dir:
|
||||
return 1
|
||||
|
||||
@@ -2513,8 +2398,8 @@ class Download_File(Cmdlet):
|
||||
# If the caller isn't running the shared pipeline Live progress UI (e.g. direct
|
||||
# cmdlet execution), start a minimal local pipeline progress panel so downloads
|
||||
# show consistent, Rich-formatted progress (like download-media).
|
||||
total_items = self._safe_total_items(raw_url, piped_items)
|
||||
preview = self._build_preview(raw_url, piped_items, total_items)
|
||||
total_items = max(1, len(raw_url or []) + len(piped_items or []))
|
||||
preview = build_pipeline_preview(raw_url, piped_items)
|
||||
|
||||
progress.ensure_local_ui(
|
||||
label="download-file",
|
||||
@@ -2525,91 +2410,16 @@ class Download_File(Cmdlet):
|
||||
downloaded_count = 0
|
||||
|
||||
# Special-case: support selection-inserted magnet-id arg to drive provider downloads
|
||||
magnet_id_raw = parsed.get("magnet-id")
|
||||
if magnet_id_raw:
|
||||
try:
|
||||
magnet_id = int(str(magnet_id_raw).strip())
|
||||
except Exception:
|
||||
log(f"[download-file] invalid magnet-id: {magnet_id_raw}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
get_provider = registry.get("get_provider")
|
||||
provider_name = str(parsed.get("provider") or "alldebrid").strip().lower()
|
||||
provider_obj = None
|
||||
if get_provider is not None:
|
||||
try:
|
||||
provider_obj = get_provider(provider_name, config)
|
||||
except Exception:
|
||||
provider_obj = None
|
||||
|
||||
if provider_obj is None:
|
||||
log(f"[download-file] provider '{provider_name}' not available", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
SearchResult = registry.get("SearchResult")
|
||||
try:
|
||||
if SearchResult is not None:
|
||||
sr = SearchResult(
|
||||
table=provider_name,
|
||||
title=f"magnet-{magnet_id}",
|
||||
path=f"alldebrid:magnet:{magnet_id}",
|
||||
full_metadata={
|
||||
"magnet_id": magnet_id,
|
||||
"provider": provider_name,
|
||||
"provider_view": "files",
|
||||
},
|
||||
)
|
||||
else:
|
||||
sr = None
|
||||
except Exception:
|
||||
sr = None
|
||||
|
||||
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
|
||||
title_hint = metadata.get("name") or relpath or f"magnet-{magnet_id}"
|
||||
self._emit_local_file(
|
||||
downloaded_path=path,
|
||||
source=file_url or f"alldebrid:magnet:{magnet_id}",
|
||||
title_hint=title_hint,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=metadata,
|
||||
progress=progress,
|
||||
config=config,
|
||||
provider_hint=provider_name,
|
||||
)
|
||||
|
||||
try:
|
||||
downloaded_extra = provider_obj.download_items(
|
||||
sr,
|
||||
final_output_dir,
|
||||
emit=_on_emit,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
path_from_result=self._path_from_download_result,
|
||||
config=config,
|
||||
)
|
||||
except TypeError:
|
||||
downloaded_extra = provider_obj.download_items(
|
||||
sr,
|
||||
final_output_dir,
|
||||
emit=_on_emit,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
path_from_result=self._path_from_download_result,
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"[download-file] failed to download magnet {magnet_id}: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if downloaded_extra:
|
||||
debug(f"[download-file] AllDebrid magnet {magnet_id} emitted {downloaded_extra} files")
|
||||
return 0
|
||||
|
||||
log(
|
||||
f"[download-file] AllDebrid magnet {magnet_id} produced no downloads",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
magnet_ret = self._process_magnet_id(
|
||||
parsed=parsed,
|
||||
registry=registry,
|
||||
config=config,
|
||||
final_output_dir=final_output_dir,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode
|
||||
)
|
||||
if magnet_ret is not None:
|
||||
return magnet_ret
|
||||
|
||||
urls_downloaded, early_exit = self._process_explicit_urls(
|
||||
raw_urls=raw_url,
|
||||
@@ -2662,6 +2472,104 @@ class Download_File(Cmdlet):
|
||||
pass
|
||||
progress.close_local_ui(force_complete=True)
|
||||
|
||||
def _process_magnet_id(
|
||||
self,
|
||||
*,
|
||||
parsed: Dict[str, Any],
|
||||
registry: Dict[str, Any],
|
||||
config: Dict[str, Any],
|
||||
final_output_dir: Path,
|
||||
progress: PipelineProgress,
|
||||
quiet_mode: bool
|
||||
) -> Optional[int]:
|
||||
magnet_id_raw = parsed.get("magnet-id")
|
||||
if not magnet_id_raw:
|
||||
return None
|
||||
|
||||
try:
|
||||
magnet_id = int(str(magnet_id_raw).strip())
|
||||
except Exception:
|
||||
log(f"[download-file] invalid magnet-id: {magnet_id_raw}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
get_provider = registry.get("get_provider")
|
||||
provider_name = str(parsed.get("provider") or "alldebrid").strip().lower()
|
||||
provider_obj = None
|
||||
if get_provider is not None:
|
||||
try:
|
||||
provider_obj = get_provider(provider_name, config)
|
||||
except Exception:
|
||||
provider_obj = None
|
||||
|
||||
if provider_obj is None:
|
||||
log(f"[download-file] provider '{provider_name}' not available", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
SearchResult = registry.get("SearchResult")
|
||||
try:
|
||||
if SearchResult is not None:
|
||||
sr = SearchResult(
|
||||
table=provider_name,
|
||||
title=f"magnet-{magnet_id}",
|
||||
path=f"alldebrid:magnet:{magnet_id}",
|
||||
full_metadata={
|
||||
"magnet_id": magnet_id,
|
||||
"provider": provider_name,
|
||||
"provider_view": "files",
|
||||
},
|
||||
)
|
||||
else:
|
||||
sr = None
|
||||
except Exception:
|
||||
sr = None
|
||||
|
||||
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
|
||||
title_hint = metadata.get("name") or relpath or f"magnet-{magnet_id}"
|
||||
self._emit_local_file(
|
||||
downloaded_path=path,
|
||||
source=file_url or f"alldebrid:magnet:{magnet_id}",
|
||||
title_hint=title_hint,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=metadata,
|
||||
progress=progress,
|
||||
config=config,
|
||||
provider_hint=provider_name,
|
||||
)
|
||||
|
||||
try:
|
||||
downloaded_extra = provider_obj.download_items(
|
||||
sr,
|
||||
final_output_dir,
|
||||
emit=_on_emit,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
path_from_result=coerce_to_path,
|
||||
config=config,
|
||||
)
|
||||
except TypeError:
|
||||
downloaded_extra = provider_obj.download_items(
|
||||
sr,
|
||||
final_output_dir,
|
||||
emit=_on_emit,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
path_from_result=coerce_to_path,
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"[download-file] failed to download magnet {magnet_id}: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if downloaded_extra:
|
||||
debug(f"[download-file] AllDebrid magnet {magnet_id} emitted {downloaded_extra} files")
|
||||
return 0
|
||||
|
||||
log(
|
||||
f"[download-file] AllDebrid magnet {magnet_id} produced no downloads",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
|
||||
def _maybe_show_provider_picker(
|
||||
self,
|
||||
*,
|
||||
@@ -2714,67 +2622,6 @@ class Download_File(Cmdlet):
|
||||
|
||||
return None
|
||||
|
||||
def _resolve_output_dir(self,
|
||||
parsed: Dict[str,
|
||||
Any],
|
||||
config: Dict[str,
|
||||
Any]) -> Optional[Path]:
|
||||
"""Resolve the output directory from storage location or config."""
|
||||
output_dir_arg = parsed.get("path") or parsed.get("output")
|
||||
if output_dir_arg:
|
||||
try:
|
||||
out_path = Path(str(output_dir_arg)).expanduser()
|
||||
out_path.mkdir(parents=True, exist_ok=True)
|
||||
return out_path
|
||||
except Exception as e:
|
||||
log(
|
||||
f"Cannot use output directory {output_dir_arg}: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return None
|
||||
|
||||
storage_location = parsed.get("storage")
|
||||
|
||||
# Priority 1: --storage flag
|
||||
if storage_location:
|
||||
try:
|
||||
return SharedArgs.resolve_storage(storage_location)
|
||||
except Exception as e:
|
||||
log(f"Invalid storage location: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# Priority 2: Config default output/temp directory, then OS temp
|
||||
try:
|
||||
from SYS.config import resolve_output_dir
|
||||
final_output_dir = resolve_output_dir(config)
|
||||
except Exception:
|
||||
import tempfile
|
||||
final_output_dir = Path(tempfile.gettempdir())
|
||||
|
||||
debug(f"Using default directory: {final_output_dir}")
|
||||
|
||||
# Ensure directory exists
|
||||
try:
|
||||
final_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
log(
|
||||
f"Cannot create output directory {final_output_dir}: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
return None
|
||||
|
||||
return final_output_dir
|
||||
|
||||
def _compute_file_hash(self, filepath: Path) -> str:
|
||||
"""Compute SHA256 hash of a file."""
|
||||
import hashlib
|
||||
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(filepath, "rb") as f:
|
||||
for byte_block in iter(lambda: f.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
|
||||
# Module-level singleton registration
|
||||
CMDLET = Download_File()
|
||||
|
||||
Reference in New Issue
Block a user