df
This commit is contained in:
@@ -244,7 +244,7 @@ class SharedArgs:
|
||||
description="Destination location",
|
||||
)
|
||||
|
||||
DELETE_FLAG = CmdletArg(
|
||||
DELETE = CmdletArg(
|
||||
"delete",
|
||||
type="flag",
|
||||
description="Delete the file and its .tag after successful operation.",
|
||||
@@ -2081,6 +2081,12 @@ def extract_url_from_result(result: Any) -> list[str]:
|
||||
_extend(result.metadata.get("url"))
|
||||
_extend(result.metadata.get("url"))
|
||||
_extend(result.metadata.get("url"))
|
||||
if isinstance(getattr(result, "full_metadata", None), dict):
|
||||
fm = getattr(result, "full_metadata", None)
|
||||
if isinstance(fm, dict):
|
||||
_extend(fm.get("url"))
|
||||
_extend(fm.get("url"))
|
||||
_extend(fm.get("url"))
|
||||
elif hasattr(result, "url") or hasattr(result, "url"):
|
||||
# Handle objects with url/url attribute
|
||||
_extend(getattr(result, "url", None))
|
||||
@@ -2090,6 +2096,11 @@ def extract_url_from_result(result: Any) -> list[str]:
|
||||
_extend(result.get("url"))
|
||||
_extend(result.get("url"))
|
||||
_extend(result.get("url"))
|
||||
fm = result.get("full_metadata")
|
||||
if isinstance(fm, dict):
|
||||
_extend(fm.get("url"))
|
||||
_extend(fm.get("url"))
|
||||
_extend(fm.get("url"))
|
||||
extra = result.get("extra")
|
||||
if isinstance(extra, dict):
|
||||
_extend(extra.get("url"))
|
||||
@@ -2531,6 +2542,30 @@ def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
|
||||
metadata["_tidal_track_details_fetched"] = True
|
||||
except Exception:
|
||||
pass
|
||||
if not metadata.get("url"):
|
||||
try:
|
||||
resp_info = httpx.get(
|
||||
"https://tidal-api.binimum.org/info/",
|
||||
params={"id": str(track_int)},
|
||||
timeout=10.0,
|
||||
)
|
||||
resp_info.raise_for_status()
|
||||
info_payload = resp_info.json()
|
||||
info_data = info_payload.get("data") if isinstance(info_payload, dict) else None
|
||||
if isinstance(info_data, dict) and info_data:
|
||||
try:
|
||||
for k, v in info_data.items():
|
||||
if k not in metadata:
|
||||
metadata[k] = v
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if info_data.get("url"):
|
||||
metadata["url"] = info_data.get("url")
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -345,6 +345,14 @@ class Add_File(Cmdlet):
|
||||
else:
|
||||
items_to_process = [result]
|
||||
|
||||
total_items = len(items_to_process) if isinstance(items_to_process, list) else 0
|
||||
processed_items = 0
|
||||
try:
|
||||
if total_items:
|
||||
progress.set_percent(0)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Minimal step-based progress for single-item runs.
|
||||
# Many add-file flows don't emit intermediate items, so without steps the pipe can look "stuck".
|
||||
use_steps = False
|
||||
@@ -496,9 +504,25 @@ class Add_File(Cmdlet):
|
||||
and len(items_to_process) > 1
|
||||
)
|
||||
|
||||
for item in items_to_process:
|
||||
for idx, item in enumerate(items_to_process, 1):
|
||||
pipe_obj = coerce_to_pipe_object(item, path_arg)
|
||||
|
||||
try:
|
||||
label = pipe_obj.title or pipe_obj.name
|
||||
if not label and pipe_obj.path:
|
||||
try:
|
||||
label = Path(str(pipe_obj.path)).name
|
||||
except Exception:
|
||||
label = pipe_obj.path
|
||||
if not label:
|
||||
label = "file"
|
||||
if total_items:
|
||||
pending_pct = int(round(((idx - 1) / max(1, total_items)) * 100))
|
||||
progress.set_percent(pending_pct)
|
||||
progress.set_status(f"adding {idx}/{total_items}: {label}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
temp_dir_to_cleanup: Optional[Path] = None
|
||||
delete_after_item = delete_after
|
||||
try:
|
||||
@@ -597,6 +621,14 @@ class Add_File(Cmdlet):
|
||||
shutil.rmtree(temp_dir_to_cleanup, ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
processed_items += 1
|
||||
try:
|
||||
pct = int(round((processed_items / max(1, total_items)) * 100))
|
||||
progress.set_percent(pct)
|
||||
if processed_items >= total_items:
|
||||
progress.clear_status()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Apply deferred url associations (bulk) before showing the final store table.
|
||||
if pending_url_associations:
|
||||
|
||||
289
cmdlet/convert_file.py
Normal file
289
cmdlet/convert_file.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.utils import sha256_file
|
||||
from . import _shared as sh
|
||||
from SYS import pipeline as ctx
|
||||
|
||||
Cmdlet = sh.Cmdlet
|
||||
CmdletArg = sh.CmdletArg
|
||||
QueryArg = sh.QueryArg
|
||||
SharedArgs = sh.SharedArgs
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
normalize_result_input = sh.normalize_result_input
|
||||
extract_title_from_result = sh.extract_title_from_result
|
||||
|
||||
|
||||
VIDEO_EXTS = {
|
||||
"mp4",
|
||||
"mkv",
|
||||
"webm",
|
||||
"mov",
|
||||
"avi",
|
||||
"flv",
|
||||
"mpeg",
|
||||
"mpg",
|
||||
"m4v",
|
||||
}
|
||||
|
||||
AUDIO_EXTS = {
|
||||
"mp3",
|
||||
"m4a",
|
||||
"m4b",
|
||||
"aac",
|
||||
"flac",
|
||||
"wav",
|
||||
"ogg",
|
||||
"opus",
|
||||
"mka",
|
||||
}
|
||||
|
||||
IMAGE_EXTS = {
|
||||
"png",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"webp",
|
||||
"bmp",
|
||||
"tif",
|
||||
"tiff",
|
||||
"gif",
|
||||
}
|
||||
|
||||
DOC_EXTS = {
|
||||
"pdf",
|
||||
"mobi",
|
||||
"epub",
|
||||
"azw3",
|
||||
"txt",
|
||||
"rtf",
|
||||
"html",
|
||||
"htm",
|
||||
"md",
|
||||
"doc",
|
||||
"docx",
|
||||
}
|
||||
|
||||
|
||||
def _detect_kind(ext: str) -> str:
|
||||
e = ext.lower().lstrip(".")
|
||||
if e in VIDEO_EXTS:
|
||||
return "video"
|
||||
if e in AUDIO_EXTS:
|
||||
return "audio"
|
||||
if e in IMAGE_EXTS:
|
||||
return "image"
|
||||
if e in DOC_EXTS:
|
||||
return "doc"
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _allowed(source_kind: str, target_kind: str) -> bool:
|
||||
if source_kind == target_kind:
|
||||
return True
|
||||
if source_kind == "video" and target_kind == "audio":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _ffmpeg_convert(
|
||||
input_path: Path,
|
||||
output_path: Path,
|
||||
target_kind: str,
|
||||
copy_metadata: bool,
|
||||
) -> bool:
|
||||
ffmpeg_path = shutil.which("ffmpeg")
|
||||
if not ffmpeg_path:
|
||||
log("ffmpeg not found in PATH", file=sys.stderr)
|
||||
return False
|
||||
|
||||
cmd = [ffmpeg_path, "-y", "-i", str(input_path)]
|
||||
|
||||
if target_kind == "audio":
|
||||
cmd.extend(["-vn"])
|
||||
|
||||
if copy_metadata:
|
||||
cmd.extend(["-map_metadata", "0"])
|
||||
|
||||
cmd.append(str(output_path))
|
||||
|
||||
debug(f"[convert-file] Running ffmpeg: {' '.join(cmd)}")
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if proc.returncode != 0:
|
||||
log(f"ffmpeg error: {proc.stderr}", file=sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _doc_convert(input_path: Path, output_path: Path) -> bool:
|
||||
try:
|
||||
import pypandoc # type: ignore
|
||||
except Exception:
|
||||
log("pypandoc is required for document conversion; install pypandoc-binary", file=sys.stderr)
|
||||
return False
|
||||
|
||||
target_fmt = output_path.suffix.lstrip(".").lower() or "pdf"
|
||||
|
||||
try:
|
||||
pypandoc.convert_file(
|
||||
str(input_path),
|
||||
to=target_fmt,
|
||||
outputfile=str(output_path),
|
||||
)
|
||||
except OSError as exc:
|
||||
log(f"pandoc is missing or failed to run: {exc}", file=sys.stderr)
|
||||
return False
|
||||
except Exception as exc:
|
||||
log(f"pypandoc conversion failed: {exc}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not output_path.exists():
|
||||
log("pypandoc conversion did not produce an output file", file=sys.stderr)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="convert-file",
|
||||
summary="Convert files between media/container formats (video, audio, image, documents).",
|
||||
usage="convert-file -to <format> [-path <file|dir>] [-delete] [-query format:<fmt>]",
|
||||
arg=[
|
||||
QueryArg("to", key="format", query_only=False, required=True,
|
||||
description="Target format/extension (e.g., mp4, mp3, wav, jpg, pdf)."),
|
||||
SharedArgs.PATH,
|
||||
SharedArgs.QUERY,
|
||||
SharedArgs.DELETE,
|
||||
],
|
||||
detail=[
|
||||
"Allows video↔video, audio↔audio, image↔image, doc↔doc, and video→audio conversions.",
|
||||
"Disallows incompatible conversions (e.g., video→pdf).",
|
||||
"Uses ffmpeg for media and pypandoc-binary (bundled pandoc) for document formats (mobi/epub→pdf/txt/etc).",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _resolve_output_path(input_path: Path, outdir: Optional[Path], target_ext: str) -> Path:
|
||||
base = input_path.stem
|
||||
directory = outdir if outdir is not None else input_path.parent
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
candidate = directory / f"{base}.{target_ext}"
|
||||
if candidate.exists():
|
||||
for i in range(1, 1000):
|
||||
alt = directory / f"{base}_{i}.{target_ext}"
|
||||
if not alt.exists():
|
||||
candidate = alt
|
||||
break
|
||||
return candidate
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
target_fmt_raw = parsed.get("to") or parsed.get("format")
|
||||
if not target_fmt_raw:
|
||||
log("-to <format> is required", file=sys.stderr)
|
||||
return 1
|
||||
target_fmt = str(target_fmt_raw).lower().lstrip(".")
|
||||
target_kind = _detect_kind(target_fmt)
|
||||
if target_kind == "unknown":
|
||||
log(f"Unsupported target format: {target_fmt}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
delete_src = bool(parsed.get("delete", False))
|
||||
|
||||
inputs = normalize_result_input(result)
|
||||
path_arg = parsed.get("path")
|
||||
|
||||
outdir_override: Optional[Path] = None
|
||||
if path_arg:
|
||||
try:
|
||||
p = Path(str(path_arg)).expanduser()
|
||||
if p.exists() and p.is_dir():
|
||||
outdir_override = p
|
||||
else:
|
||||
inputs.append({"path": p})
|
||||
except Exception:
|
||||
inputs.append({"path": path_arg})
|
||||
|
||||
if not inputs:
|
||||
log("No input provided to convert-file", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
success = 0
|
||||
|
||||
for item in inputs:
|
||||
input_path: Optional[Path] = None
|
||||
if isinstance(item, dict):
|
||||
p = item.get("path") or item.get("target")
|
||||
elif hasattr(item, "path"):
|
||||
p = getattr(item, "path")
|
||||
else:
|
||||
p = item
|
||||
|
||||
try:
|
||||
input_path = Path(str(p)) if p else None
|
||||
except Exception:
|
||||
input_path = None
|
||||
|
||||
if not input_path or not input_path.exists() or not input_path.is_file():
|
||||
log("convert-file: input path missing or not found", file=sys.stderr)
|
||||
continue
|
||||
|
||||
source_ext = input_path.suffix.lower().lstrip(".")
|
||||
source_kind = _detect_kind(source_ext)
|
||||
|
||||
if not _allowed(source_kind, target_kind):
|
||||
log(
|
||||
f"Conversion from {source_kind or 'unknown'} to {target_kind} is not allowed",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
output_path = _resolve_output_path(input_path, outdir_override, target_fmt)
|
||||
|
||||
converted = False
|
||||
if target_kind in {"video", "audio", "image"}:
|
||||
converted = _ffmpeg_convert(input_path, output_path, target_kind, copy_metadata=True)
|
||||
elif target_kind == "doc":
|
||||
converted = _doc_convert(input_path, output_path)
|
||||
else:
|
||||
log(f"No converter for target kind {target_kind}", file=sys.stderr)
|
||||
|
||||
if not converted:
|
||||
continue
|
||||
|
||||
try:
|
||||
out_hash = sha256_file(output_path)
|
||||
except Exception:
|
||||
out_hash = None
|
||||
|
||||
title = extract_title_from_result(item) or output_path.stem
|
||||
|
||||
ctx.emit({
|
||||
"path": str(output_path),
|
||||
"title": title,
|
||||
"hash": out_hash,
|
||||
"media_kind": target_kind,
|
||||
"source_path": str(input_path),
|
||||
})
|
||||
|
||||
if delete_src:
|
||||
try:
|
||||
input_path.unlink()
|
||||
log(f"Deleted source file: {input_path}", file=sys.stderr)
|
||||
except Exception as exc:
|
||||
log(f"Failed to delete source {input_path}: {exc}", file=sys.stderr)
|
||||
|
||||
success += 1
|
||||
|
||||
return 0 if success else 1
|
||||
|
||||
|
||||
CMDLET.exec = _run
|
||||
CMDLET.register()
|
||||
@@ -22,8 +22,8 @@ from Provider import internetarchive as ia_provider
|
||||
from Provider import alldebrid as ad_provider
|
||||
from Provider import openlibrary as ol_provider
|
||||
|
||||
from SYS.download import DownloadError, _download_direct_file
|
||||
from SYS.models import DownloadOptions, DownloadMediaResult
|
||||
from API.HTTP import _download_direct_file
|
||||
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult
|
||||
from SYS.logger import log, debug
|
||||
from SYS.pipeline_progress import PipelineProgress
|
||||
from SYS.result_table import ResultTable
|
||||
@@ -890,7 +890,6 @@ class Download_File(Cmdlet):
|
||||
return expanded_items
|
||||
|
||||
def _process_provider_items(
|
||||
self,
|
||||
*,
|
||||
piped_items: Sequence[Any],
|
||||
final_output_dir: Path,
|
||||
@@ -900,8 +899,9 @@ class Download_File(Cmdlet):
|
||||
registry: Dict[str,
|
||||
Any],
|
||||
progress: PipelineProgress,
|
||||
) -> int:
|
||||
) -> tuple[int, int]:
|
||||
downloaded_count = 0
|
||||
queued_magnet_submissions = 0
|
||||
get_search_provider = registry.get("get_search_provider")
|
||||
SearchResult = registry.get("SearchResult")
|
||||
|
||||
@@ -911,8 +911,17 @@ class Download_File(Cmdlet):
|
||||
config=config
|
||||
)
|
||||
|
||||
total_items = len(expanded_items)
|
||||
processed_items = 0
|
||||
try:
|
||||
if total_items:
|
||||
progress.set_percent(0)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for item in expanded_items:
|
||||
try:
|
||||
label = "item"
|
||||
table = get_field(item, "table")
|
||||
title = get_field(item, "title")
|
||||
target = get_field(item, "path") or get_field(item, "url")
|
||||
@@ -933,6 +942,25 @@ class Download_File(Cmdlet):
|
||||
if isinstance(extra_md, dict):
|
||||
full_metadata = extra_md
|
||||
|
||||
try:
|
||||
label = title or target
|
||||
label = str(label or "item").strip()
|
||||
if total_items:
|
||||
pct = int(round((processed_items / max(1, total_items)) * 100))
|
||||
progress.set_percent(pct)
|
||||
progress.set_status(
|
||||
f"downloading {processed_items + 1}/{total_items}: {label}"
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
transfer_label = label
|
||||
if str(table or "").lower() == "hifi":
|
||||
try:
|
||||
progress.begin_transfer(label=transfer_label, total=None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If this looks like a provider item and providers are available, prefer provider.download()
|
||||
downloaded_path: Optional[Path] = None
|
||||
attempted_provider_download = False
|
||||
@@ -1065,6 +1093,45 @@ class Download_File(Cmdlet):
|
||||
|
||||
continue
|
||||
|
||||
# Magnet targets (e.g., torrent provider results) -> submit/download via AllDebrid
|
||||
if downloaded_path is None and isinstance(target, str) and is_magnet_link(str(target)):
|
||||
magnet_spec = ad_provider.resolve_magnet_spec(str(target))
|
||||
if magnet_spec:
|
||||
|
||||
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
|
||||
title_hint = metadata.get("name") or relpath or title
|
||||
self._emit_local_file(
|
||||
downloaded_path=path,
|
||||
source=file_url or target,
|
||||
title_hint=title_hint,
|
||||
tags_hint=None,
|
||||
media_kind_hint="file",
|
||||
full_metadata=metadata,
|
||||
progress=progress,
|
||||
config=config,
|
||||
provider_hint="alldebrid",
|
||||
)
|
||||
|
||||
downloaded, magnet_id = ad_provider.download_magnet(
|
||||
magnet_spec,
|
||||
str(target),
|
||||
final_output_dir,
|
||||
config,
|
||||
progress,
|
||||
quiet_mode,
|
||||
self._path_from_download_result,
|
||||
_on_emit,
|
||||
)
|
||||
|
||||
if downloaded > 0:
|
||||
downloaded_count += downloaded
|
||||
continue
|
||||
|
||||
# If queued but not yet ready, skip the generic unsupported-target error.
|
||||
if magnet_id is not None:
|
||||
queued_magnet_submissions += 1
|
||||
continue
|
||||
|
||||
# Fallback: if we have a direct HTTP URL, download it directly
|
||||
if (downloaded_path is None and isinstance(target,
|
||||
str)
|
||||
@@ -1080,6 +1147,7 @@ class Download_File(Cmdlet):
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
debug(
|
||||
f"[download-file] Provider item looks like direct URL, downloading: {target}"
|
||||
)
|
||||
@@ -1150,8 +1218,22 @@ class Download_File(Cmdlet):
|
||||
log(f"Download failed: {e}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
log(f"Error downloading item: {e}", file=sys.stderr)
|
||||
finally:
|
||||
if str(table or "").lower() == "hifi":
|
||||
try:
|
||||
progress.finish_transfer(label=transfer_label)
|
||||
except Exception:
|
||||
pass
|
||||
processed_items += 1
|
||||
try:
|
||||
pct = int(round((processed_items / max(1, total_items)) * 100))
|
||||
progress.set_percent(pct)
|
||||
if processed_items >= total_items:
|
||||
progress.clear_status()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return downloaded_count
|
||||
return downloaded_count, queued_magnet_submissions
|
||||
|
||||
# === Streaming helpers (yt-dlp) ===
|
||||
|
||||
@@ -2687,6 +2769,15 @@ class Download_File(Cmdlet):
|
||||
|
||||
debug(f"Output directory: {final_output_dir}")
|
||||
|
||||
try:
|
||||
PipelineProgress(pipeline_context).ensure_local_ui(
|
||||
label="download-file",
|
||||
total_items=len(supported_url),
|
||||
items_preview=supported_url,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
clip_spec = parsed.get("clip")
|
||||
query_spec = parsed.get("query")
|
||||
|
||||
@@ -3572,7 +3663,7 @@ class Download_File(Cmdlet):
|
||||
if early_exit is not None:
|
||||
return int(early_exit)
|
||||
|
||||
downloaded_count += self._process_provider_items(
|
||||
provider_downloaded, magnet_submissions = self._process_provider_items(
|
||||
piped_items=piped_items,
|
||||
final_output_dir=final_output_dir,
|
||||
config=config,
|
||||
@@ -3580,9 +3671,13 @@ class Download_File(Cmdlet):
|
||||
registry=registry,
|
||||
progress=progress,
|
||||
)
|
||||
downloaded_count += provider_downloaded
|
||||
|
||||
if downloaded_count > 0 or streaming_downloaded > 0:
|
||||
debug(f"✓ Successfully processed {downloaded_count} file(s)")
|
||||
if downloaded_count > 0 or streaming_downloaded > 0 or magnet_submissions > 0:
|
||||
msg = f"✓ Successfully processed {downloaded_count} file(s)"
|
||||
if magnet_submissions:
|
||||
msg += f" and queued {magnet_submissions} magnet(s)"
|
||||
debug(msg)
|
||||
return 0
|
||||
|
||||
if streaming_exit_code is not None:
|
||||
|
||||
@@ -255,7 +255,7 @@ def _pick_supported_ytdlp_url(urls: List[str]) -> Optional[str]:
|
||||
|
||||
# Prefer a true support check when the Python module is available.
|
||||
try:
|
||||
from SYS.download import is_url_supported_by_ytdlp
|
||||
from tool.ytdlp import is_url_supported_by_ytdlp
|
||||
|
||||
for text in candidates:
|
||||
try:
|
||||
|
||||
@@ -246,7 +246,7 @@ class search_file(Cmdlet):
|
||||
else:
|
||||
table_title = f"{provider_label}: {query}".strip().rstrip(":")
|
||||
|
||||
preserve_order = provider_lower in {"youtube", "openlibrary", "loc"}
|
||||
preserve_order = provider_lower in {"youtube", "openlibrary", "loc", "torrent"}
|
||||
table_type = provider_name
|
||||
table_meta: Dict[str, Any] = {"provider": provider_name}
|
||||
if provider_lower == "hifi":
|
||||
|
||||
Reference in New Issue
Block a user