dfslkjelf

This commit is contained in:
nose
2025-12-18 22:50:21 -08:00
parent 76691dbbf5
commit d637532237
16 changed files with 2587 additions and 299 deletions

2
.gitignore vendored
View File

@@ -220,3 +220,5 @@ luac.out
config.conf config.conf
config.d/ config.d/
MPV/ffmpeg/*
MPV/portable_config/*

25
CLI.py
View File

@@ -549,6 +549,20 @@ def _get_cmdlet_args(cmd_name: str) -> List[str]:
return [] return []
def get_store_choices() -> List[str]:
"""Return configured store backend names.
This is the same list used for REPL/Typer autocomplete for `-store`.
"""
try:
from Store import Store
storage = Store(_load_cli_config(), suppress_debug=True)
backends = storage.list_backends()
return list(backends or [])
except Exception:
return []
def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]: def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]:
"""Get list of valid choices for a specific cmdlet argument.""" """Get list of valid choices for a specific cmdlet argument."""
try: try:
@@ -558,14 +572,9 @@ def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]:
# Dynamic storage backends: use current config to enumerate available storages # Dynamic storage backends: use current config to enumerate available storages
# Support both "storage" and "store" argument names # Support both "storage" and "store" argument names
if normalized_arg in ("storage", "store"): if normalized_arg in ("storage", "store"):
try: backends = get_store_choices()
from Store import Store if backends:
storage = Store(_load_cli_config(), suppress_debug=True) return backends
backends = storage.list_backends()
if backends:
return backends
except Exception:
pass
# Dynamic search providers # Dynamic search providers
if normalized_arg == "provider": if normalized_arg == "provider":

File diff suppressed because it is too large Load Diff

View File

@@ -198,11 +198,13 @@ class MPV:
ipc_path: Optional[str] = None, ipc_path: Optional[str] = None,
lua_script_path: Optional[str | Path] = None, lua_script_path: Optional[str | Path] = None,
timeout: float = 5.0, timeout: float = 5.0,
check_mpv: bool = True,
) -> None: ) -> None:
ok, reason = _check_mpv_availability() if bool(check_mpv):
if not ok: ok, reason = _check_mpv_availability()
raise MPVIPCError(reason or "MPV unavailable") if not ok:
raise MPVIPCError(reason or "MPV unavailable")
self.timeout = timeout self.timeout = timeout
self.ipc_path = ipc_path or get_ipc_pipe_path() self.ipc_path = ipc_path or get_ipc_pipe_path()
@@ -246,6 +248,66 @@ class MPV:
resp = self.send({"command": ["set_property", name, value]}) resp = self.send({"command": ["set_property", name, value]})
return bool(resp and resp.get("error") == "success") return bool(resp and resp.get("error") == "success")
def download(
self,
*,
url: str,
fmt: str,
store: Optional[str] = None,
path: Optional[str] = None,
) -> Dict[str, Any]:
"""Download a URL using the same pipeline semantics as the MPV UI.
This is intended as a stable Python entrypoint for "button actions".
It does not require mpv.exe availability (set check_mpv=False if needed).
"""
url = str(url or "").strip()
fmt = str(fmt or "").strip()
store = str(store or "").strip() if store is not None else None
path = str(path or "").strip() if path is not None else None
if not url:
return {"success": False, "stdout": "", "stderr": "", "error": "Missing url"}
if not fmt:
return {"success": False, "stdout": "", "stderr": "", "error": "Missing fmt"}
if bool(store) == bool(path):
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Provide exactly one of store or path",
}
# Ensure any in-process cmdlets that talk to MPV pick up this IPC path.
try:
os.environ["MEDEIA_MPV_IPC"] = str(self.ipc_path)
except Exception:
pass
def _q(s: str) -> str:
return '"' + s.replace('\\', '\\\\').replace('"', '\\"') + '"'
pipeline = f"download-media -url {_q(url)} -format {_q(fmt)}"
if store:
pipeline += f" | add-file -store {_q(store)}"
else:
pipeline += f" | add-file -path {_q(path or '')}"
try:
from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433
executor = PipelineExecutor()
result = executor.run_pipeline(pipeline)
return {
"success": bool(getattr(result, "success", False)),
"stdout": getattr(result, "stdout", "") or "",
"stderr": getattr(result, "stderr", "") or "",
"error": getattr(result, "error", None),
"pipeline": pipeline,
}
except Exception as exc:
return {"success": False, "stdout": "", "stderr": "", "error": f"{type(exc).__name__}: {exc}", "pipeline": pipeline}
def get_playlist(self, silent: bool = False) -> Optional[List[Dict[str, Any]]]: def get_playlist(self, silent: bool = False) -> Optional[List[Dict[str, Any]]]:
resp = self.send({"command": ["get_property", "playlist"], "request_id": 100}, silent=silent) resp = self.send({"command": ["get_property", "playlist"], "request_id": 100}, silent=silent)
if resp is None: if resp is None:
@@ -427,13 +489,62 @@ class MPV:
http_header_fields: Optional[str] = None, http_header_fields: Optional[str] = None,
detached: bool = True, detached: bool = True,
) -> None: ) -> None:
# uosc reads its config from "~~/script-opts/uosc.conf".
# With --no-config, mpv makes ~~ expand to an empty string, so uosc can't load.
# Instead, point mpv at a repo-controlled config directory.
try:
repo_root = Path(__file__).resolve().parent.parent
except Exception:
repo_root = Path.cwd()
portable_config_dir = repo_root / "MPV" / "portable_config"
try:
(portable_config_dir / "script-opts").mkdir(parents=True, exist_ok=True)
except Exception:
pass
# Ensure uosc.conf is available at the location uosc expects.
try:
src_uosc_conf = repo_root / "MPV" / "LUA" / "uosc" / "uosc.conf"
dst_uosc_conf = portable_config_dir / "script-opts" / "uosc.conf"
if src_uosc_conf.exists():
# Only seed a default config if the user doesn't already have one.
if not dst_uosc_conf.exists():
dst_uosc_conf.write_bytes(src_uosc_conf.read_bytes())
except Exception:
pass
cmd: List[str] = [ cmd: List[str] = [
"mpv", "mpv",
f"--config-dir={str(portable_config_dir)}",
# Allow mpv to auto-load scripts from <config-dir>/scripts/ (e.g., thumbfast).
"--load-scripts=yes",
"--osc=no",
"--load-console=no",
"--load-commands=no",
"--load-select=no",
"--load-context-menu=no",
"--load-positioning=no",
"--load-stats-overlay=no",
"--load-auto-profiles=no",
"--ytdl=yes",
f"--input-ipc-server={self.ipc_path}", f"--input-ipc-server={self.ipc_path}",
"--idle=yes", "--idle=yes",
"--force-window=yes", "--force-window=yes",
] ]
# uosc and other scripts are expected to be auto-loaded from portable_config/scripts.
# We keep the back-compat fallback only if the user hasn't installed uosc.lua there.
try:
uosc_entry = portable_config_dir / "scripts" / "uosc.lua"
if not uosc_entry.exists() and self.lua_script_path:
lua_dir = Path(self.lua_script_path).resolve().parent
uosc_main = lua_dir / "uosc" / "scripts" / "uosc" / "main.lua"
if uosc_main.exists():
cmd.append(f"--script={str(uosc_main)}")
except Exception:
pass
# Always load the bundled Lua script at startup. # Always load the bundled Lua script at startup.
if self.lua_script_path and os.path.exists(self.lua_script_path): if self.lua_script_path and os.path.exists(self.lua_script_path):
cmd.append(f"--script={self.lua_script_path}") cmd.append(f"--script={self.lua_script_path}")
@@ -519,6 +630,112 @@ class MPVIPCClient:
self.sock: socket.socket | BinaryIO | None = None self.sock: socket.socket | BinaryIO | None = None
self.is_windows = platform.system() == "Windows" self.is_windows = platform.system() == "Windows"
self.silent = bool(silent) self.silent = bool(silent)
self._recv_buffer: bytes = b""
def _write_payload(self, payload: str) -> None:
if not self.sock:
if not self.connect():
raise MPVIPCError("Not connected")
if self.is_windows:
pipe = cast(BinaryIO, self.sock)
pipe.write(payload.encode("utf-8"))
pipe.flush()
else:
sock_obj = cast(socket.socket, self.sock)
sock_obj.sendall(payload.encode("utf-8"))
def _readline(self, *, timeout: Optional[float] = None) -> Optional[bytes]:
if not self.sock:
if not self.connect():
return None
effective_timeout = self.timeout if timeout is None else float(timeout)
deadline = _time.time() + max(0.0, effective_timeout)
if self.is_windows:
try:
pipe = cast(BinaryIO, self.sock)
return pipe.readline()
except (OSError, IOError):
return None
# Unix: buffer until newline.
sock_obj = cast(socket.socket, self.sock)
while True:
nl = self._recv_buffer.find(b"\n")
if nl != -1:
line = self._recv_buffer[: nl + 1]
self._recv_buffer = self._recv_buffer[nl + 1 :]
return line
remaining = deadline - _time.time()
if remaining <= 0:
return None
try:
# Temporarily narrow timeout for this read.
old_timeout = sock_obj.gettimeout()
try:
sock_obj.settimeout(remaining)
chunk = sock_obj.recv(4096)
finally:
sock_obj.settimeout(old_timeout)
except socket.timeout:
return None
except Exception:
return None
if not chunk:
# EOF
return b""
self._recv_buffer += chunk
def read_message(self, *, timeout: Optional[float] = None) -> Optional[Dict[str, Any]]:
"""Read the next JSON message/event from MPV.
Returns:
- dict: parsed JSON message/event
- {"event": "__eof__"} if the stream ended
- None on timeout / no data
"""
raw = self._readline(timeout=timeout)
if raw is None:
return None
if raw == b"":
return {"event": "__eof__"}
try:
return json.loads(raw.decode("utf-8", errors="replace").strip())
except Exception:
return None
def send_command_no_wait(self, command_data: Dict[str, Any] | List[Any]) -> Optional[int]:
"""Send a command to mpv without waiting for its response.
This is important for long-running event loops (helpers) so we don't
consume/lose async events (like property-change) while waiting.
"""
try:
request: Dict[str, Any]
if isinstance(command_data, list):
request = {"command": command_data}
else:
request = dict(command_data)
if "request_id" not in request:
request["request_id"] = int(_time.time() * 1000) % 100000
payload = json.dumps(request) + "\n"
self._write_payload(payload)
return int(request["request_id"])
except Exception as exc:
if not self.silent:
debug(f"Error sending no-wait command to MPV: {exc}")
try:
self.disconnect()
except Exception:
pass
return None
def connect(self) -> bool: def connect(self) -> bool:
"""Connect to mpv IPC socket. """Connect to mpv IPC socket.
@@ -592,44 +809,22 @@ class MPVIPCClient:
_debug(f"[IPC] Sending: {payload.strip()}") _debug(f"[IPC] Sending: {payload.strip()}")
# Send command # Send command
if self.is_windows: self._write_payload(payload)
pipe = cast(BinaryIO, self.sock)
pipe.write(payload.encode("utf-8"))
pipe.flush()
else:
sock_obj = cast(socket.socket, self.sock)
sock_obj.sendall(payload.encode("utf-8"))
# Receive response # Receive response
# We need to read lines until we find the one with matching request_id # We need to read lines until we find the one with matching request_id
# or until timeout/error. MPV might send events in between. # or until timeout/error. MPV might send events in between.
start_time = _time.time() start_time = _time.time()
while _time.time() - start_time < self.timeout: while _time.time() - start_time < self.timeout:
response_data = b"" response_data = self._readline(timeout=self.timeout)
if self.is_windows: if response_data is None:
try: return None
pipe = cast(BinaryIO, self.sock)
response_data = pipe.readline()
except (OSError, IOError):
return None
else:
try:
# This is simplistic for Unix socket (might not get full line)
# But for now assuming MPV sends line-buffered JSON
sock_obj = cast(socket.socket, self.sock)
chunk = sock_obj.recv(4096)
if not chunk:
break
response_data = chunk
# TODO: Handle partial lines if needed
except socket.timeout:
return None
if not response_data: if not response_data:
break break
try: try:
lines = response_data.decode('utf-8').strip().split('\n') lines = response_data.decode('utf-8', errors='replace').strip().split('\n')
for line in lines: for line in lines:
if not line: continue if not line: continue
resp = json.loads(line) resp = json.loads(line)

View File

@@ -17,15 +17,18 @@ Protocol (user-data properties):
This helper is intentionally minimal: one request at a time, last-write-wins. This helper is intentionally minimal: one request at a time, last-write-wins.
""" """
from __future__ import annotations from __future__ import annotations
MEDEIA_MPV_HELPER_VERSION = "2025-12-19"
import argparse import argparse
import json import json
import os import os
import sys import sys
import tempfile import tempfile
import time import time
import logging
import re
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
@@ -40,7 +43,9 @@ if _ROOT not in sys.path:
sys.path.insert(0, _ROOT) sys.path.insert(0, _ROOT)
from SYS.tasks import connect_ipc # noqa: E402 from MPV.mpv_ipc import MPVIPCClient # noqa: E402
from config import load_config # noqa: E402
from SYS.logger import set_debug, debug, set_thread_stream # noqa: E402
REQUEST_PROP = "user-data/medeia-pipeline-request" REQUEST_PROP = "user-data/medeia-pipeline-request"
@@ -49,68 +54,361 @@ READY_PROP = "user-data/medeia-pipeline-ready"
OBS_ID_REQUEST = 1001 OBS_ID_REQUEST = 1001
def _json_line(payload: Dict[str, Any]) -> bytes:
return (json.dumps(payload, ensure_ascii=False) + "\n").encode("utf-8")
class MPVWire:
def __init__(self, ipc_path: str, *, timeout: float = 5.0) -> None:
self.ipc_path = ipc_path
self.timeout = timeout
self._fh: Optional[Any] = None
self._req_id = 1
def connect(self) -> bool:
self._fh = connect_ipc(self.ipc_path, timeout=self.timeout)
return self._fh is not None
@property
def fh(self):
if self._fh is None:
raise RuntimeError("Not connected")
return self._fh
def send(self, command: list[Any]) -> int:
self._req_id = (self._req_id + 1) % 1000000
req_id = self._req_id
self.fh.write(_json_line({"command": command, "request_id": req_id}))
self.fh.flush()
return req_id
def set_property(self, name: str, value: Any) -> int:
return self.send(["set_property", name, value])
def observe_property(self, obs_id: int, name: str, fmt: str = "string") -> int:
# mpv requires an explicit format argument.
return self.send(["observe_property", obs_id, name, fmt])
def read_message(self) -> Optional[Dict[str, Any]]:
raw = self.fh.readline()
if raw == b"":
return {"event": "__eof__"}
if not raw:
return None
try:
return json.loads(raw.decode("utf-8", errors="replace"))
except Exception:
return None
def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]: def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
# Import after sys.path fix. # Import after sys.path fix.
from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433 from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433
def _table_to_payload(table: Any) -> Optional[Dict[str, Any]]:
if table is None:
return None
try:
title = getattr(table, "title", "")
except Exception:
title = ""
rows_payload = []
try:
rows = getattr(table, "rows", None)
except Exception:
rows = None
if isinstance(rows, list):
for r in rows:
cols_payload = []
try:
cols = getattr(r, "columns", None)
except Exception:
cols = None
if isinstance(cols, list):
for c in cols:
try:
cols_payload.append(
{
"name": getattr(c, "name", ""),
"value": getattr(c, "value", ""),
}
)
except Exception:
continue
sel_args = None
try:
sel = getattr(r, "selection_args", None)
if isinstance(sel, list):
sel_args = [str(x) for x in sel]
except Exception:
sel_args = None
rows_payload.append({"columns": cols_payload, "selection_args": sel_args})
# Only return JSON-serializable data (Lua only needs title + rows).
return {"title": str(title or ""), "rows": rows_payload}
executor = PipelineExecutor() executor = PipelineExecutor()
result = executor.run_pipeline(pipeline_text, seeds=seeds) result = executor.run_pipeline(pipeline_text, seeds=seeds)
table_payload = None
try:
table_payload = _table_to_payload(getattr(result, "result_table", None))
except Exception:
table_payload = None
return { return {
"success": bool(result.success), "success": bool(result.success),
"stdout": result.stdout or "", "stdout": result.stdout or "",
"stderr": result.stderr or "", "stderr": result.stderr or "",
"error": result.error, "error": result.error,
"table": table_payload,
} }
def _run_op(op: str, data: Any) -> Dict[str, Any]:
"""Run a helper-only operation.
These are NOT cmdlets and are not available via CLI pipelines. They exist
solely so MPV Lua can query lightweight metadata (e.g., autocomplete lists)
without inventing user-facing commands.
"""
op_name = str(op or "").strip().lower()
# Provide store backend choices using the same source as CLI/Typer autocomplete.
if op_name in {"store-choices", "store_choices", "get-store-choices", "get_store_choices"}:
# Preferred: call the same choice function used by the CLI completer.
try:
from CLI import get_store_choices # noqa: WPS433
backends = get_store_choices()
choices = sorted({str(n) for n in (backends or []) if str(n).strip()})
except Exception:
# Fallback: direct Store registry enumeration using loaded config.
try:
cfg = load_config() or {}
except Exception:
cfg = {}
try:
from Store import Store # noqa: WPS433
storage = Store(cfg, suppress_debug=True)
backends = storage.list_backends() or []
choices = sorted({str(n) for n in backends if str(n).strip()})
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"{type(exc).__name__}: {exc}",
"table": None,
"choices": [],
}
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": None,
"choices": choices,
}
# Provide yt-dlp format list for a URL (for MPV "Change format" menu).
# Returns a ResultTable-like payload so the Lua UI can render without running cmdlets.
if op_name in {"ytdlp-formats", "ytdlp_formats", "ytdl-formats", "ytdl_formats"}:
try:
url = None
if isinstance(data, dict):
url = data.get("url")
url = str(url or "").strip()
if not url:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Missing url",
"table": None,
}
# Fast gate: only for streaming URLs yt-dlp knows about.
try:
from SYS.download import is_url_supported_by_ytdlp # noqa: WPS433
if not is_url_supported_by_ytdlp(url):
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "URL not supported by yt-dlp",
"table": None,
}
except Exception:
# If probing support fails, still attempt extraction and let yt-dlp decide.
pass
try:
import yt_dlp # type: ignore
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"yt-dlp module not available: {type(exc).__name__}: {exc}",
"table": None,
}
cookiefile = None
try:
from tool.ytdlp import YtDlpTool # noqa: WPS433
cfg = load_config() or {}
cookie_path = YtDlpTool(cfg).resolve_cookiefile()
if cookie_path is not None:
cookiefile = str(cookie_path)
except Exception:
cookiefile = None
ydl_opts: Dict[str, Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
if cookiefile:
ydl_opts["cookiefile"] = cookiefile
def _format_bytes(n: Any) -> str:
try:
v = float(n)
except Exception:
return ""
if v <= 0:
return ""
units = ["B", "KB", "MB", "GB", "TB"]
i = 0
while v >= 1024 and i < len(units) - 1:
v /= 1024.0
i += 1
if i == 0:
return f"{int(v)} {units[i]}"
return f"{v:.1f} {units[i]}"
with yt_dlp.YoutubeDL(ydl_opts) as ydl: # type: ignore[attr-defined]
info = ydl.extract_info(url, download=False)
if not isinstance(info, dict):
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "yt-dlp returned non-dict info",
"table": None,
}
formats = info.get("formats")
if not isinstance(formats, list) or not formats:
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": []},
}
rows = []
for fmt in formats:
if not isinstance(fmt, dict):
continue
format_id = str(fmt.get("format_id") or "").strip()
if not format_id:
continue
# Prefer human-ish resolution.
resolution = str(fmt.get("resolution") or "").strip()
if not resolution:
w = fmt.get("width")
h = fmt.get("height")
try:
if w and h:
resolution = f"{int(w)}x{int(h)}"
elif h:
resolution = f"{int(h)}p"
except Exception:
resolution = ""
ext = str(fmt.get("ext") or "").strip()
size = _format_bytes(fmt.get("filesize") or fmt.get("filesize_approx"))
# Build selection args compatible with MPV Lua picker.
selection_args = ["-format", format_id]
rows.append(
{
"columns": [
{"name": "ID", "value": format_id},
{"name": "Resolution", "value": resolution or ""},
{"name": "Ext", "value": ext or ""},
{"name": "Size", "value": size or ""},
],
"selection_args": selection_args,
}
)
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": rows},
}
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"{type(exc).__name__}: {exc}",
"table": None,
}
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"Unknown op: {op_name}",
"table": None,
}
def _helper_log_path() -> Path:
try:
d = _repo_root() / "Log"
d.mkdir(parents=True, exist_ok=True)
return d / "medeia-mpv-helper.log"
except Exception:
return Path(tempfile.gettempdir()) / "medeia-mpv-helper.log"
def _append_helper_log(text: str) -> None:
payload = (text or "").rstrip()
if not payload:
return
try:
path = _helper_log_path()
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "a", encoding="utf-8", errors="replace") as fh:
fh.write(payload + "\n")
except Exception:
return
def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]:
"""Best-effort singleton lock per IPC path.
Multiple helpers subscribing to mpv log-message events causes duplicated
log output. Use a tiny file lock to ensure one helper per mpv instance.
"""
try:
safe = re.sub(r"[^a-zA-Z0-9_.-]+", "_", str(ipc_path or ""))
if not safe:
safe = "mpv"
# Keep lock files out of the repo's Log/ directory to avoid clutter.
lock_dir = Path(tempfile.gettempdir()) / "medeia-mpv-helper"
lock_dir.mkdir(parents=True, exist_ok=True)
lock_path = lock_dir / f"medeia-mpv-helper-{safe}.lock"
fh = open(lock_path, "a+", encoding="utf-8", errors="replace")
if os.name == "nt":
try:
import msvcrt # type: ignore
fh.seek(0)
msvcrt.locking(fh.fileno(), msvcrt.LK_NBLCK, 1)
except Exception:
try:
fh.close()
except Exception:
pass
return None
else:
try:
import fcntl # type: ignore
fcntl.flock(fh.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception:
try:
fh.close()
except Exception:
pass
return None
return fh
except Exception:
return None
def _parse_request(data: Any) -> Optional[Dict[str, Any]]: def _parse_request(data: Any) -> Optional[Dict[str, Any]]:
if data is None: if data is None:
return None return None
@@ -131,14 +429,87 @@ def _parse_request(data: Any) -> Optional[Dict[str, Any]]:
def main(argv: Optional[list[str]] = None) -> int: def main(argv: Optional[list[str]] = None) -> int:
parser = argparse.ArgumentParser(prog="mpv-pipeline-helper") parser = argparse.ArgumentParser(prog="mpv-pipeline-helper")
parser.add_argument("--ipc", required=True, help="mpv --input-ipc-server path") parser.add_argument("--ipc", required=True, help="mpv --input-ipc-server path")
parser.add_argument("--timeout", type=float, default=5.0) parser.add_argument("--timeout", type=float, default=15.0)
args = parser.parse_args(argv) args = parser.parse_args(argv)
# Load config once and configure logging similar to CLI.pipeline.
try:
cfg = load_config() or {}
except Exception:
cfg = {}
try:
debug_enabled = bool(isinstance(cfg, dict) and cfg.get("debug", False))
set_debug(debug_enabled)
if debug_enabled:
logging.basicConfig(
level=logging.DEBUG,
format='[%(name)s] %(levelname)s: %(message)s',
stream=sys.stderr,
)
for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"):
try:
logging.getLogger(noisy).setLevel(logging.WARNING)
except Exception:
pass
except Exception:
pass
# Ensure all in-process cmdlets that talk to MPV pick up the exact IPC server # Ensure all in-process cmdlets that talk to MPV pick up the exact IPC server
# path used by this helper (which comes from the running MPV instance). # path used by this helper (which comes from the running MPV instance).
os.environ["MEDEIA_MPV_IPC"] = str(args.ipc) os.environ["MEDEIA_MPV_IPC"] = str(args.ipc)
error_log_dir = Path(tempfile.gettempdir()) # Ensure single helper instance per ipc.
_lock_fh = _acquire_ipc_lock(str(args.ipc))
if _lock_fh is None:
_append_helper_log(f"[helper] another instance already holds lock for ipc={args.ipc}; exiting")
return 0
try:
_append_helper_log(f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}")
debug(f"[mpv-helper] logging to: {_helper_log_path()}")
except Exception:
pass
# Route SYS.logger output into the helper log file so diagnostics are not
# lost in mpv's console/terminal output.
try:
class _HelperLogStream:
def __init__(self) -> None:
self._pending = ""
def write(self, s: str) -> int:
if not s:
return 0
text = self._pending + str(s)
lines = text.splitlines(keepends=True)
if lines and not lines[-1].endswith(("\n", "\r")):
self._pending = lines[-1]
lines = lines[:-1]
else:
self._pending = ""
for line in lines:
payload = line.rstrip("\r\n")
if payload:
_append_helper_log("[py] " + payload)
return len(s)
def flush(self) -> None:
if self._pending:
_append_helper_log("[py] " + self._pending.rstrip("\r\n"))
self._pending = ""
set_thread_stream(_HelperLogStream())
except Exception:
pass
# Prefer a stable repo-local log folder for discoverability.
error_log_dir = _repo_root() / "Log"
try:
error_log_dir.mkdir(parents=True, exist_ok=True)
except Exception:
error_log_dir = Path(tempfile.gettempdir())
last_error_log = error_log_dir / "medeia-mpv-pipeline-last-error.log" last_error_log = error_log_dir / "medeia-mpv-pipeline-last-error.log"
def _write_error_log(text: str, *, req_id: str) -> Optional[str]: def _write_error_log(text: str, *, req_id: str) -> Optional[str]:
@@ -164,33 +535,131 @@ def main(argv: Optional[list[str]] = None) -> int:
return str(stamped) if stamped else str(last_error_log) return str(stamped) if stamped else str(last_error_log)
wire = MPVWire(args.ipc, timeout=float(args.timeout)) # Connect to mpv's JSON IPC. On Windows, the pipe can exist but reject opens
if not wire.connect(): # briefly during startup; also mpv may create the IPC server slightly after
return 2 # the Lua script launches us. Retry until timeout.
connect_deadline = time.time() + max(0.5, float(args.timeout))
last_connect_error: Optional[str] = None
# Mark ready ASAP. client = MPVIPCClient(socket_path=args.ipc, timeout=0.5, silent=True)
while True:
try:
if client.connect():
break
except Exception as exc:
last_connect_error = f"{type(exc).__name__}: {exc}"
if time.time() > connect_deadline:
_append_helper_log(f"[helper] failed to connect ipc={args.ipc} error={last_connect_error or 'timeout'}")
return 2
# Keep trying.
time.sleep(0.10)
# Mark ready ASAP and keep it fresh.
# Use a unix timestamp so the Lua side can treat it as a heartbeat.
last_ready_ts: float = 0.0
def _touch_ready() -> None:
nonlocal last_ready_ts
now = time.time()
# Throttle updates to reduce IPC chatter.
if (now - last_ready_ts) < 0.75:
return
try:
client.send_command_no_wait(["set_property", READY_PROP, str(int(now))])
last_ready_ts = now
except Exception:
return
_touch_ready()
# Mirror mpv's own log messages into our helper log file so debugging does
# not depend on the mpv on-screen console or mpv's log-file.
try: try:
wire.set_property(READY_PROP, "1") level = "debug" if debug_enabled else "warn"
client.send_command_no_wait(["request_log_messages", level])
_append_helper_log(f"[helper] requested mpv log messages level={level}")
except Exception: except Exception:
pass pass
# De-dup/throttle mpv log-message lines (mpv and yt-dlp can be very chatty).
last_mpv_line: Optional[str] = None
last_mpv_count: int = 0
last_mpv_ts: float = 0.0
def _flush_mpv_repeat() -> None:
nonlocal last_mpv_line, last_mpv_count
if last_mpv_line and last_mpv_count > 1:
_append_helper_log(f"[mpv] (previous line repeated {last_mpv_count}x)")
last_mpv_line = None
last_mpv_count = 0
# Observe request property changes. # Observe request property changes.
try: try:
wire.observe_property(OBS_ID_REQUEST, REQUEST_PROP, "string") client.send_command_no_wait(["observe_property", OBS_ID_REQUEST, REQUEST_PROP, "string"])
except Exception: except Exception:
return 3 return 3
last_seen_id: Optional[str] = None last_seen_id: Optional[str] = None
try:
_append_helper_log(f"[helper] connected to ipc={args.ipc}")
except Exception:
pass
while True: while True:
msg = wire.read_message() msg = client.read_message(timeout=0.25)
if msg is None: if msg is None:
time.sleep(0.05) # Keep READY fresh even when idle (Lua may clear it on timeouts).
_touch_ready()
time.sleep(0.02)
continue continue
if msg.get("event") == "__eof__": if msg.get("event") == "__eof__":
try:
_flush_mpv_repeat()
except Exception:
pass
return 0 return 0
if msg.get("event") == "log-message":
try:
level = str(msg.get("level") or "")
prefix = str(msg.get("prefix") or "")
text = str(msg.get("text") or "").rstrip()
if not text:
continue
# Filter excessive noise unless debug is enabled.
if not debug_enabled:
lower_prefix = prefix.lower()
if "quic" in lower_prefix and "DEBUG:" in text:
continue
# Suppress progress-bar style lines (keep true errors).
if ("ETA" in text or "%" in text) and ("ERROR:" not in text and "WARNING:" not in text):
# Typical yt-dlp progress bar line.
if text.lstrip().startswith("["):
continue
line = f"[mpv {level}] {prefix} {text}".strip()
now = time.time()
if last_mpv_line == line and (now - last_mpv_ts) < 2.0:
last_mpv_count += 1
last_mpv_ts = now
continue
_flush_mpv_repeat()
last_mpv_line = line
last_mpv_count = 1
last_mpv_ts = now
_append_helper_log(line)
except Exception:
pass
continue
if msg.get("event") != "property-change": if msg.get("event") != "property-change":
continue continue
@@ -202,10 +671,12 @@ def main(argv: Optional[list[str]] = None) -> int:
continue continue
req_id = str(req.get("id") or "") req_id = str(req.get("id") or "")
op = str(req.get("op") or "").strip()
data = req.get("data")
pipeline_text = str(req.get("pipeline") or "").strip() pipeline_text = str(req.get("pipeline") or "").strip()
seeds = req.get("seeds") seeds = req.get("seeds")
if not req_id or not pipeline_text: if not req_id:
continue continue
if last_seen_id == req_id: if last_seen_id == req_id:
@@ -213,14 +684,29 @@ def main(argv: Optional[list[str]] = None) -> int:
last_seen_id = req_id last_seen_id = req_id
try: try:
run = _run_pipeline(pipeline_text, seeds=seeds) label = pipeline_text if pipeline_text else (op and ("op=" + op) or "(empty)")
_append_helper_log(f"\n[request {req_id}] {label}")
except Exception:
pass
try:
if op:
run = _run_op(op, data)
else:
if not pipeline_text:
continue
run = _run_pipeline(pipeline_text, seeds=seeds)
resp = { resp = {
"id": req_id, "id": req_id,
"success": bool(run.get("success")), "success": bool(run.get("success")),
"stdout": run.get("stdout", ""), "stdout": run.get("stdout", ""),
"stderr": run.get("stderr", ""), "stderr": run.get("stderr", ""),
"error": run.get("error"), "error": run.get("error"),
"table": run.get("table"),
} }
if "choices" in run:
resp["choices"] = run.get("choices")
except Exception as exc: except Exception as exc:
resp = { resp = {
"id": req_id, "id": req_id,
@@ -228,8 +714,20 @@ def main(argv: Optional[list[str]] = None) -> int:
"stdout": "", "stdout": "",
"stderr": "", "stderr": "",
"error": f"{type(exc).__name__}: {exc}", "error": f"{type(exc).__name__}: {exc}",
"table": None,
} }
# Persist helper output for debugging MPV menu interactions.
try:
if resp.get("stdout"):
_append_helper_log("[stdout]\n" + str(resp.get("stdout")))
if resp.get("stderr"):
_append_helper_log("[stderr]\n" + str(resp.get("stderr")))
if resp.get("error"):
_append_helper_log("[error]\n" + str(resp.get("error")))
except Exception:
pass
if not resp.get("success"): if not resp.get("success"):
details = "" details = ""
if resp.get("error"): if resp.get("error"):
@@ -241,7 +739,9 @@ def main(argv: Optional[list[str]] = None) -> int:
resp["log_path"] = log_path resp["log_path"] = log_path
try: try:
wire.set_property(RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)) # IMPORTANT: don't wait for a response here; waiting would consume
# async events and can drop/skip property-change notifications.
client.send_command_no_wait(["set_property", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)])
except Exception: except Exception:
# If posting results fails, there's nothing more useful to do. # If posting results fails, there's nothing more useful to do.
pass pass

View File

@@ -1,9 +1,11 @@
from __future__ import annotations from __future__ import annotations
from pathlib import Path
import sys import sys
from typing import Any, Dict, Iterable, List, Optional from typing import Any, Dict, Iterable, List, Optional
from ProviderCore.base import SearchProvider, SearchResult from ProviderCore.base import SearchProvider, SearchResult
from ProviderCore.download import sanitize_filename
from SYS.logger import log from SYS.logger import log
@@ -66,6 +68,89 @@ class AllDebrid(SearchProvider):
# Consider "available" when configured; actual API connectivity can vary. # Consider "available" when configured; actual API connectivity can vary.
return bool(_get_debrid_api_key(self.config or {})) return bool(_get_debrid_api_key(self.config or {}))
def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]:
"""Download an AllDebrid SearchResult into output_dir.
AllDebrid magnet file listings often provide links that require an API
"unlock" step to produce a true direct-download URL. Without unlocking,
callers may download a small HTML/redirect page instead of file bytes.
This is used by the download-file cmdlet when a provider item is piped.
"""
try:
api_key = _get_debrid_api_key(self.config or {})
if not api_key:
return None
target = str(getattr(result, "path", "") or "").strip()
if not target.startswith(("http://", "https://")):
return None
try:
from API.alldebrid import AllDebridClient
client = AllDebridClient(api_key)
except Exception as exc:
log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr)
return None
# Quiet mode when download-file is mid-pipeline.
quiet = bool(self.config.get("_quiet_background_output")) if isinstance(self.config, dict) else False
unlocked_url = target
try:
unlocked = client.unlock_link(target)
if isinstance(unlocked, str) and unlocked.strip().startswith(("http://", "https://")):
unlocked_url = unlocked.strip()
except Exception as exc:
# Fall back to the raw link, but warn.
log(f"[alldebrid] Failed to unlock link: {exc}", file=sys.stderr)
# Prefer provider title as the output filename.
suggested = sanitize_filename(str(getattr(result, "title", "") or "").strip())
suggested_name = suggested if suggested else None
try:
from SYS.download import _download_direct_file
dl_res = _download_direct_file(
unlocked_url,
Path(output_dir),
quiet=quiet,
suggested_filename=suggested_name,
)
downloaded_path = getattr(dl_res, "path", None)
if downloaded_path is None:
return None
downloaded_path = Path(str(downloaded_path))
# Guard: if we got an HTML error/redirect page, treat as failure.
try:
if downloaded_path.exists():
size = downloaded_path.stat().st_size
if size > 0 and size <= 250_000 and downloaded_path.suffix.lower() not in (".html", ".htm"):
head = downloaded_path.read_bytes()[:512]
try:
text = head.decode("utf-8", errors="ignore").lower()
except Exception:
text = ""
if "<html" in text or "<!doctype html" in text:
try:
downloaded_path.unlink()
except Exception:
pass
log("[alldebrid] Download returned HTML page (not file bytes). Try again or check AllDebrid link status.", file=sys.stderr)
return None
except Exception:
pass
return downloaded_path if downloaded_path.exists() else None
except Exception as exc:
log(f"[alldebrid] Download failed: {exc}", file=sys.stderr)
return None
except Exception:
return None
@staticmethod @staticmethod
def _flatten_files(items: Any) -> Iterable[Dict[str, Any]]: def _flatten_files(items: Any) -> Iterable[Dict[str, Any]]:
"""Flatten AllDebrid magnet file tree into file dicts. """Flatten AllDebrid magnet file tree into file dicts.

View File

@@ -13,6 +13,7 @@ from typing import Any, Dict, List, Optional
from ProviderCore.base import SearchProvider, SearchResult from ProviderCore.base import SearchProvider, SearchResult
from SYS.logger import log, debug from SYS.logger import log, debug
from models import ProgressBar
_SOULSEEK_NOISE_SUBSTRINGS = ( _SOULSEEK_NOISE_SUBSTRINGS = (
@@ -502,58 +503,145 @@ async def download_soulseek_file(
raise RuntimeError("Soulseek credentials not configured (set provider=soulseek username/password)") raise RuntimeError("Soulseek credentials not configured (set provider=soulseek username/password)")
settings = Settings(credentials=CredentialsSettings(username=login_user, password=login_pass)) settings = Settings(credentials=CredentialsSettings(username=login_user, password=login_pass))
client = SoulSeekClient(settings)
with _suppress_aioslsk_noise(): async def _attempt_once(attempt_num: int) -> tuple[Optional[Path], Any, int, float]:
try: client = SoulSeekClient(settings)
await client.start() with _suppress_aioslsk_noise():
await client.login()
debug(f"[soulseek] Logged in as {login_user}")
debug(f"[soulseek] Requesting download from {username}: {filename}")
transfer = await client.transfers.add(Transfer(username, filename, TransferDirection.DOWNLOAD))
transfer.local_path = str(output_path)
await client.transfers.queue(transfer)
start_time = time.time()
last_log_time = 0.0
while not transfer.is_finalized():
if time.time() - start_time > timeout:
log(f"[soulseek] Download timeout after {timeout}s", file=sys.stderr)
return None
if time.time() - last_log_time >= 5.0 and transfer.bytes_transfered > 0:
progress = (transfer.bytes_transfered / transfer.filesize * 100) if transfer.filesize else 0
debug(
f"[soulseek] Progress: {progress:.1f}% "
f"({transfer.bytes_transfered}/{transfer.filesize})"
)
last_log_time = time.time()
await asyncio.sleep(1)
if transfer.state.VALUE == TransferState.COMPLETE and transfer.local_path:
downloaded_path = Path(transfer.local_path)
if downloaded_path.exists():
debug(f"[soulseek] Download complete: {downloaded_path}")
return downloaded_path
log(f"[soulseek] Transfer completed but file missing: {downloaded_path}", file=sys.stderr)
return None
log(
f"[soulseek] Download failed: state={transfer.state.VALUE} "
f"bytes={transfer.bytes_transfered}/{transfer.filesize}",
file=sys.stderr,
)
return None
finally:
try: try:
await client.stop() await client.start()
except Exception: await client.login()
pass debug(f"[soulseek] Logged in as {login_user}")
log(
f"[soulseek] Download attempt {attempt_num}: {username} :: {local_filename}",
file=sys.stderr,
)
debug(f"[soulseek] Requesting download from {username}: {filename}")
transfer = await client.transfers.add(Transfer(username, filename, TransferDirection.DOWNLOAD))
transfer.local_path = str(output_path)
await client.transfers.queue(transfer)
start_time = time.time()
last_progress_time = start_time
progress_bar = ProgressBar()
while not transfer.is_finalized():
elapsed = time.time() - start_time
if elapsed > timeout:
log(f"[soulseek] Download timeout after {timeout}s", file=sys.stderr)
bytes_done = int(getattr(transfer, "bytes_transfered", 0) or 0)
state_val = getattr(getattr(transfer, "state", None), "VALUE", None)
try:
if getattr(sys.stderr, "isatty", lambda: False)():
sys.stderr.write("\r" + (" " * 140) + "\r")
sys.stderr.flush()
except Exception:
pass
return None, state_val, bytes_done, elapsed
bytes_done = int(getattr(transfer, "bytes_transfered", 0) or 0)
total_bytes = int(getattr(transfer, "filesize", 0) or 0)
now = time.time()
if now - last_progress_time >= 0.5:
percent = (bytes_done / total_bytes) * 100.0 if total_bytes > 0 else 0.0
speed = bytes_done / elapsed if elapsed > 0 else 0.0
eta_str: Optional[str] = None
if total_bytes > 0 and speed > 0:
try:
eta_seconds = max(0.0, float(total_bytes - bytes_done) / float(speed))
minutes, seconds = divmod(int(eta_seconds), 60)
hours, minutes = divmod(minutes, 60)
eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}"
except Exception:
eta_str = None
speed_str = progress_bar.format_bytes(speed) + "/s"
progress_line = progress_bar.format_progress(
percent_str=f"{percent:.1f}%",
downloaded=bytes_done,
total=total_bytes if total_bytes > 0 else None,
speed_str=speed_str,
eta_str=eta_str,
)
try:
if getattr(sys.stderr, "isatty", lambda: False)():
sys.stderr.write("\r" + progress_line + " ")
sys.stderr.flush()
else:
log(progress_line, file=sys.stderr)
except Exception:
pass
last_progress_time = now
await asyncio.sleep(1)
final_state = getattr(getattr(transfer, "state", None), "VALUE", None)
downloaded_path = Path(transfer.local_path) if getattr(transfer, "local_path", None) else output_path
final_elapsed = time.time() - start_time
# Clear in-place progress bar.
try:
if getattr(sys.stderr, "isatty", lambda: False)():
sys.stderr.write("\r" + (" " * 140) + "\r")
sys.stderr.flush()
except Exception:
pass
# If a file was written, treat it as success even if state is odd.
try:
if downloaded_path.exists() and downloaded_path.stat().st_size > 0:
if final_state != TransferState.COMPLETE:
log(
f"[soulseek] Transfer finalized as {final_state}, but file exists ({downloaded_path.stat().st_size} bytes). Keeping file.",
file=sys.stderr,
)
return downloaded_path, final_state, int(downloaded_path.stat().st_size), final_elapsed
except Exception:
pass
if final_state == TransferState.COMPLETE and downloaded_path.exists():
debug(f"[soulseek] Download complete: {downloaded_path}")
return downloaded_path, final_state, int(downloaded_path.stat().st_size), final_elapsed
fail_bytes = int(getattr(transfer, "bytes_transfered", 0) or 0)
fail_total = int(getattr(transfer, "filesize", 0) or 0)
reason = getattr(transfer, "reason", None)
log(
f"[soulseek] Download failed: state={final_state} bytes={fail_bytes}/{fail_total} reason={reason}",
file=sys.stderr,
)
# Clean up 0-byte placeholder.
try:
if downloaded_path.exists() and downloaded_path.stat().st_size == 0:
downloaded_path.unlink(missing_ok=True)
except Exception:
pass
return None, final_state, fail_bytes, final_elapsed
finally:
try:
await client.stop()
except Exception:
pass
# Retry a couple times only for fast 0-byte failures (common transient case).
max_attempts = 3
for attempt in range(1, max_attempts + 1):
result_path, final_state, bytes_done, elapsed = await _attempt_once(attempt)
if result_path:
return result_path
should_retry = (bytes_done == 0) and (elapsed < 15.0)
if attempt < max_attempts and should_retry:
log(f"[soulseek] Retrying after fast failure (state={final_state})", file=sys.stderr)
await asyncio.sleep(2)
continue
break
return None
except ImportError: except ImportError:
log("[soulseek] aioslsk not installed. Install with: pip install aioslsk", file=sys.stderr) log("[soulseek] aioslsk not installed. Install with: pip install aioslsk", file=sys.stderr)

View File

@@ -510,6 +510,7 @@ def _download_direct_file(
output_dir: Path, output_dir: Path,
debug_logger: Optional[DebugLogger] = None, debug_logger: Optional[DebugLogger] = None,
quiet: bool = False, quiet: bool = False,
suggested_filename: Optional[str] = None,
) -> DownloadMediaResult: ) -> DownloadMediaResult:
"""Download a direct file (PDF, image, document, etc.) without yt-dlp.""" """Download a direct file (PDF, image, document, etc.) without yt-dlp."""
ensure_directory(output_dir) ensure_directory(output_dir)
@@ -517,6 +518,44 @@ def _download_direct_file(
from urllib.parse import unquote, urlparse, parse_qs from urllib.parse import unquote, urlparse, parse_qs
import re import re
def _sanitize_filename(name: str) -> str:
# Windows-safe filename sanitization.
# Keep it simple: strip path parts, drop invalid chars, collapse whitespace.
text = str(name or "").strip()
if not text:
return ""
# Remove any path components
text = text.replace("/", "\\")
text = text.split("\\")[-1]
invalid = set('<>:"/\\|?*')
cleaned_chars: List[str] = []
for ch in text:
o = ord(ch)
if o < 32:
cleaned_chars.append(" ")
continue
if ch in invalid:
cleaned_chars.append(" ")
continue
cleaned_chars.append(ch)
cleaned = " ".join("".join(cleaned_chars).split()).strip()
# Avoid trailing dots/spaces on Windows
cleaned = cleaned.rstrip(" .")
return cleaned
def _unique_path(path: Path) -> Path:
if not path.exists():
return path
stem = path.stem
suffix = path.suffix
parent = path.parent
for i in range(1, 10_000):
candidate = parent / f"{stem} ({i}){suffix}"
if not candidate.exists():
return candidate
return parent / f"{stem} ({int(time.time())}){suffix}"
# Extract filename from URL # Extract filename from URL
parsed_url = urlparse(url) parsed_url = urlparse(url)
url_path = parsed_url.path url_path = parsed_url.path
@@ -560,11 +599,29 @@ def _download_direct_file(
if not quiet: if not quiet:
log(f"Could not get filename from headers: {e}", file=sys.stderr) log(f"Could not get filename from headers: {e}", file=sys.stderr)
# Fallback if we still don't have a good filename # Apply suggested filename (from provider title) if given.
suggested = _sanitize_filename(suggested_filename) if suggested_filename else ""
if suggested:
# Preserve extension from suggested name if present; otherwise borrow from detected filename.
suggested_path = Path(suggested)
if suggested_path.suffix:
filename = suggested
else:
detected_ext = ""
try:
detected_ext = Path(str(filename)).suffix
except Exception:
detected_ext = ""
if detected_ext:
filename = suggested + detected_ext
else:
filename = suggested
# Final fallback if we still don't have a good filename
if not filename or "." not in filename: if not filename or "." not in filename:
filename = "downloaded_file.bin" filename = "downloaded_file.bin"
file_path = output_dir / filename file_path = _unique_path(output_dir / filename)
progress_bar = ProgressBar() progress_bar = ProgressBar()
if not quiet: if not quiet:
@@ -581,32 +638,57 @@ def _download_direct_file(
total_bytes[0] = content_length total_bytes[0] = content_length
now = time.time() now = time.time()
if now - last_progress_time[0] >= 0.5 and total_bytes[0] > 0: if now - last_progress_time[0] < 0.5:
elapsed = now - start_time return
percent = (bytes_downloaded / content_length) * 100 if content_length > 0 else 0
speed = bytes_downloaded / elapsed if elapsed > 0 else 0
eta_seconds = (content_length - bytes_downloaded) / speed if speed > 0 else 0
speed_str = progress_bar.format_bytes(speed) + "/s" elapsed = now - start_time
minutes, seconds = divmod(int(eta_seconds), 60) percent = (bytes_downloaded / content_length) * 100 if content_length > 0 else 0
hours, minutes = divmod(minutes, 60) speed = bytes_downloaded / elapsed if elapsed > 0 else 0
eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}" eta_str: Optional[str] = None
if content_length > 0 and speed > 0:
try:
eta_seconds = max(0.0, float(content_length - bytes_downloaded) / float(speed))
minutes, seconds = divmod(int(eta_seconds), 60)
hours, minutes = divmod(minutes, 60)
eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}"
except Exception:
eta_str = None
progress_line = progress_bar.format_progress( speed_str = progress_bar.format_bytes(speed) + "/s"
percent_str=f"{percent:.1f}%",
downloaded=bytes_downloaded, progress_line = progress_bar.format_progress(
total=content_length, percent_str=f"{percent:.1f}%",
speed_str=speed_str, downloaded=bytes_downloaded,
eta_str=eta_str, total=content_length if content_length > 0 else None,
) speed_str=speed_str,
if not quiet: eta_str=eta_str,
debug(progress_line) )
last_progress_time[0] = now
if not quiet:
try:
if getattr(sys.stderr, "isatty", lambda: False)():
sys.stderr.write("\r" + progress_line + " ")
sys.stderr.flush()
else:
# Non-interactive: print occasional progress lines.
log(progress_line, file=sys.stderr)
except Exception:
pass
last_progress_time[0] = now
with HTTPClient(timeout=30.0) as client: with HTTPClient(timeout=30.0) as client:
client.download(url, str(file_path), progress_callback=progress_callback) client.download(url, str(file_path), progress_callback=progress_callback)
elapsed = time.time() - start_time elapsed = time.time() - start_time
# Clear in-place progress bar.
if not quiet:
try:
if getattr(sys.stderr, "isatty", lambda: False)():
sys.stderr.write("\r" + (" " * 140) + "\r")
sys.stderr.flush()
except Exception:
pass
avg_speed_str = progress_bar.format_bytes(downloaded_bytes[0] / elapsed if elapsed > 0 else 0) + "/s" avg_speed_str = progress_bar.format_bytes(downloaded_bytes[0] / elapsed if elapsed > 0 else 0) + "/s"
if not quiet: if not quiet:
debug(f"✓ Downloaded in {elapsed:.1f}s at {avg_speed_str}") debug(f"✓ Downloaded in {elapsed:.1f}s at {avg_speed_str}")

View File

@@ -26,7 +26,9 @@ def connect_ipc(path: str, timeout: float = 5.0) -> IO[bytes] | None:
return None return None
time.sleep(0.05) time.sleep(0.05)
except OSError as exc: # Pipe busy except OSError as exc: # Pipe busy
if exc.errno not in (errno.ENOENT, errno.EPIPE, errno.EBUSY): # Windows named pipes can intermittently raise EINVAL while the pipe exists
# but is not ready/accepting connections yet.
if exc.errno not in (errno.ENOENT, errno.EPIPE, errno.EBUSY, errno.EINVAL):
raise raise
if time.time() > deadline: if time.time() > deadline:
return None return None

View File

@@ -94,15 +94,22 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log(f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}") log(f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}")
return 0 return 0
# Check if we have a piped TagItem with no args (i.e., from @1 | delete-tag) def _looks_like_tag_row(obj: Any) -> bool:
has_piped_tag = (result and hasattr(result, '__class__') and if obj is None:
result.__class__.__name__ == 'TagItem' and return False
hasattr(result, 'tag_name')) # TagItem (direct) or PipeObject/dict emitted from get-tag table rows.
try:
# Check if we have a piped list of TagItems (from @N selection) if hasattr(obj, '__class__') and obj.__class__.__name__ == 'TagItem' and hasattr(obj, 'tag_name'):
has_piped_tag_list = (isinstance(result, list) and result and return True
hasattr(result[0], '__class__') and except Exception:
result[0].__class__.__name__ == 'TagItem') pass
try:
return bool(get_field(obj, 'tag_name'))
except Exception:
return False
has_piped_tag = _looks_like_tag_row(result)
has_piped_tag_list = isinstance(result, list) and bool(result) and _looks_like_tag_row(result[0])
if not args and not has_piped_tag and not has_piped_tag_list: if not args and not has_piped_tag and not has_piped_tag_list:
log("Requires at least one tag argument") log("Requires at least one tag argument")
@@ -195,9 +202,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# If we have TagItems and no args, we are deleting the tags themselves # If we have TagItems and no args, we are deleting the tags themselves
# If we have Files (or other objects) and args, we are deleting tags FROM those files # If we have Files (or other objects) and args, we are deleting tags FROM those files
# Check if we are in "delete selected tags" mode (TagItems) # Check if we are in "delete selected tags" mode (tag rows)
is_tag_item_mode = (items_to_process and hasattr(items_to_process[0], '__class__') and is_tag_item_mode = bool(items_to_process) and _looks_like_tag_row(items_to_process[0])
items_to_process[0].__class__.__name__ == 'TagItem')
if is_tag_item_mode: if is_tag_item_mode:
# Collect all tags to delete from the TagItems # Collect all tags to delete from the TagItems
@@ -248,8 +254,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
) )
item_store = override_store or get_field(item, "store") item_store = override_store or get_field(item, "store")
if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem': if _looks_like_tag_row(item):
# It's a TagItem # It's a tag row (TagItem or PipeObject/dict with tag_name)
if tags_arg: if tags_arg:
# User provided tags to delete FROM this file (ignoring the tag name in the item?) # User provided tags to delete FROM this file (ignoring the tag name in the item?)
# Or maybe they want to delete the tag in the item AND the args? # Or maybe they want to delete the tag in the item AND the args?

View File

@@ -36,11 +36,13 @@ class Download_File(Cmdlet):
super().__init__( super().__init__(
name="download-file", name="download-file",
summary="Download files via HTTP or provider handlers", summary="Download files via HTTP or provider handlers",
usage="download-file <url> [options] OR @N | download-file [options]", usage="download-file <url> [-path DIR] [options] OR @N | download-file [-path DIR] [options]",
alias=["dl-file", "download-http"], alias=["dl-file", "download-http"],
arg=[ arg=[
CmdletArg(name="output", type="string", alias="o", description="Output directory (overrides defaults)"),
SharedArgs.URL, SharedArgs.URL,
SharedArgs.PATH,
# Prefer -path for output directory to match other cmdlets; keep -output for backwards compatibility.
CmdletArg(name="-output", type="string", alias="o", description="(deprecated) Output directory (use -path instead)"),
], ],
detail=["Download files directly via HTTP without yt-dlp processing.", "For streaming sites, use download-media."], detail=["Download files directly via HTTP without yt-dlp processing.", "For streaming sites, use download-media."],
@@ -50,10 +52,6 @@ class Download_File(Cmdlet):
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Main execution method.""" """Main execution method."""
stage_ctx = pipeline_context.get_stage_context()
in_pipeline = stage_ctx is not None and getattr(stage_ctx, "total_stages", 1) > 1
if in_pipeline and isinstance(config, dict):
config["_quiet_background_output"] = True
return self._run_impl(result, args, config) return self._run_impl(result, args, config)
def _run_impl(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: def _run_impl(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
@@ -169,7 +167,47 @@ class Download_File(Cmdlet):
log(f"Error processing {url}: {e}", file=sys.stderr) log(f"Error processing {url}: {e}", file=sys.stderr)
# 2) Provider item downloads (piped results) # 2) Provider item downloads (piped results)
# Expand provider "folder" rows into their contained files when possible (e.g., AllDebrid magnets).
expanded_items: List[Any] = []
for item in piped_items: for item in piped_items:
try:
table = get_field(item, "table")
media_kind = get_field(item, "media_kind")
full_metadata = get_field(item, "full_metadata")
target = get_field(item, "path") or get_field(item, "url")
if str(table or "").lower() == "alldebrid" and str(media_kind or "").lower() == "folder":
magnet_id = None
if isinstance(full_metadata, dict):
magnet_id = full_metadata.get("magnet_id")
if magnet_id is None and isinstance(target, str) and target.lower().startswith("alldebrid:magnet:"):
try:
magnet_id = int(target.split(":")[-1])
except Exception:
magnet_id = None
if magnet_id is not None and get_search_provider is not None:
provider = get_search_provider("alldebrid", config)
if provider is not None:
try:
files = provider.search("*", limit=10_000, filters={"view": "files", "magnet_id": int(magnet_id)})
except Exception:
files = []
# If the magnet isn't ready, provider.search returns a single not-ready folder row.
if files and len(files) == 1 and getattr(files[0], "media_kind", "") == "folder":
detail = getattr(files[0], "detail", "")
log(f"[download-file] AllDebrid magnet {magnet_id} not ready ({detail or 'unknown'})", file=sys.stderr)
else:
for sr in files:
expanded_items.append(sr.to_dict() if hasattr(sr, "to_dict") else sr)
continue
expanded_items.append(item)
except Exception:
expanded_items.append(item)
for item in expanded_items:
try: try:
table = get_field(item, "table") table = get_field(item, "table")
title = get_field(item, "title") title = get_field(item, "title")
@@ -226,8 +264,12 @@ class Download_File(Cmdlet):
from cmdlet.search_provider import CMDLET as _SEARCH_PROVIDER_CMDLET from cmdlet.search_provider import CMDLET as _SEARCH_PROVIDER_CMDLET
# Use plain title text (LibGen mirrors can be finicky with fielded query prefixes). # Use plain title text (LibGen mirrors can be finicky with fielded query prefixes).
fallback_query = title_text fallback_query = title_text
exec_fn = getattr(_SEARCH_PROVIDER_CMDLET, "exec", None)
if not callable(exec_fn):
log("[download-file] search-provider cmdlet unavailable; cannot run LibGen fallback search", file=sys.stderr)
continue
ret = _SEARCH_PROVIDER_CMDLET.exec( ret = exec_fn(
None, None,
["-provider", "libgen", "-query", fallback_query], ["-provider", "libgen", "-query", fallback_query],
config, config,
@@ -243,7 +285,10 @@ class Download_File(Cmdlet):
except Exception: except Exception:
pass pass
return int(ret) try:
return int(ret) # type: ignore[arg-type]
except Exception:
return 1
except Exception: except Exception:
pass pass
@@ -259,7 +304,14 @@ class Download_File(Cmdlet):
log("[download-file] Refusing to download LibGen landing page (expected provider to resolve file link)", file=sys.stderr) log("[download-file] Refusing to download LibGen landing page (expected provider to resolve file link)", file=sys.stderr)
continue continue
debug(f"[download-file] Provider item looks like direct URL, downloading: {target}") debug(f"[download-file] Provider item looks like direct URL, downloading: {target}")
result_obj = _download_direct_file(target, final_output_dir, quiet=quiet_mode) # Use provider title as filename hint so multiple items don't overwrite as downloaded_file.bin
suggested_name = str(title).strip() if title is not None else None
result_obj = _download_direct_file(
target,
final_output_dir,
quiet=quiet_mode,
suggested_filename=suggested_name,
)
file_path = None file_path = None
if hasattr(result_obj, "path"): if hasattr(result_obj, "path"):
file_path = getattr(result_obj, "path") file_path = getattr(result_obj, "path")
@@ -301,7 +353,7 @@ class Download_File(Cmdlet):
def _resolve_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]: def _resolve_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]:
"""Resolve the output directory from storage location or config.""" """Resolve the output directory from storage location or config."""
output_dir_arg = parsed.get("output") output_dir_arg = parsed.get("path") or parsed.get("output")
if output_dir_arg: if output_dir_arg:
try: try:
out_path = Path(str(output_dir_arg)).expanduser() out_path = Path(str(output_dir_arg)).expanduser()

View File

@@ -12,6 +12,7 @@ Focused cmdlet for video/audio downloads from yt-dlp-supported sites:
from __future__ import annotations from __future__ import annotations
import sys import sys
import os
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence from typing import Any, Dict, List, Optional, Sequence
@@ -430,10 +431,29 @@ def _build_ytdlp_options(opts: DownloadOptions) -> Dict[str, Any]:
"fragment_retries": 10, "fragment_retries": 10,
"http_chunk_size": 10_485_760, "http_chunk_size": 10_485_760,
"restrictfilenames": True, "restrictfilenames": True,
# Always show a progress indicator; do not tie it to debug logging.
"progress_hooks": [_progress_callback],
} }
# Prefer the bundled ffmpeg shipped with the repo (used for merges/remux/postproc).
try:
repo_root = Path(__file__).resolve().parents[1]
bundled_ffmpeg_dir = repo_root / "MPV" / "ffmpeg" / "bin"
if bundled_ffmpeg_dir.exists():
base_options.setdefault("ffmpeg_location", str(bundled_ffmpeg_dir))
except Exception:
pass
# On Windows, AV/indexers can transiently lock files at the end of a download.
# yt-dlp uses file_access_retries for renames (e.g. .part -> final). Default is low.
try:
if os.name == "nt":
base_options.setdefault("file_access_retries", 40)
except Exception:
pass
# Avoid writing progress bars when running in quiet/background mode (e.g. mpv detached pipelines).
if not getattr(opts, "quiet", False):
base_options["progress_hooks"] = [_progress_callback]
if opts.cookies_path and opts.cookies_path.is_file(): if opts.cookies_path and opts.cookies_path.is_file():
base_options["cookiefile"] = str(opts.cookies_path) base_options["cookiefile"] = str(opts.cookies_path)

View File

@@ -12,6 +12,8 @@ from __future__ import annotations
import sys import sys
from SYS.logger import log, debug
try: try:
from Provider.openlibrary import OpenLibrary from Provider.openlibrary import OpenLibrary
_ol_scrape_isbn_metadata = OpenLibrary.scrape_isbn_metadata _ol_scrape_isbn_metadata = OpenLibrary.scrape_isbn_metadata
@@ -94,7 +96,7 @@ def _emit_tags_as_table(
file_hash: Optional[str], file_hash: Optional[str],
store: str = "hydrus", store: str = "hydrus",
service_name: Optional[str] = None, service_name: Optional[str] = None,
config: Dict[str, Any] = None, config: Optional[Dict[str, Any]] = None,
item_title: Optional[str] = None, item_title: Optional[str] = None,
path: Optional[str] = None, path: Optional[str] = None,
subject: Optional[Any] = None, subject: Optional[Any] = None,
@@ -107,11 +109,10 @@ def _emit_tags_as_table(
from result_table import ResultTable from result_table import ResultTable
# Create ResultTable with just tag column (no title) # Create ResultTable with just tag column (no title)
table_title = "Tag" # Keep the title stable and avoid including hash fragments.
table_title = "tag"
if item_title: if item_title:
table_title = f"Tag: {item_title}" table_title = f"tag: {item_title}"
if file_hash:
table_title += f" [{file_hash[:8]}]"
table = ResultTable(table_title, max_columns=1) table = ResultTable(table_title, max_columns=1)
table.set_source_command("get-tag", []) table.set_source_command("get-tag", [])
@@ -140,6 +141,28 @@ def _emit_tags_as_table(
except AttributeError: except AttributeError:
ctx.set_last_result_table(table, tag_items, subject) ctx.set_last_result_table(table, tag_items, subject)
# Note: CLI will handle displaying the table via ResultTable formatting # Note: CLI will handle displaying the table via ResultTable formatting
def _filter_scraped_tags(tags: List[str]) -> List[str]:
"""Filter out tags we don't want to import from scraping."""
blocked = {"title", "artist", "source"}
out: List[str] = []
seen: set[str] = set()
for t in tags:
if not t:
continue
s = str(t).strip()
if not s:
continue
ns = s.split(":", 1)[0].strip().lower() if ":" in s else ""
if ns in blocked:
continue
key = s.lower()
if key in seen:
continue
seen.add(key)
out.append(s)
return out
def _summarize_tags(tags_list: List[str], limit: int = 8) -> str: def _summarize_tags(tags_list: List[str], limit: int = 8) -> str:
"""Create a summary of tags for display.""" """Create a summary of tags for display."""
shown = [t for t in tags_list[:limit] if t] shown = [t for t in tags_list[:limit] if t]
@@ -865,14 +888,32 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log(f"Unknown metadata provider: {scrape_url}", file=sys.stderr) log(f"Unknown metadata provider: {scrape_url}", file=sys.stderr)
return 1 return 1
# Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename # Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename.
# IMPORTANT: do not rely on `result.tag` for this because it can be stale (cached on
# the piped PipeObject). Always prefer the current store-backed tags when possible.
identifier_tags: List[str] = [] identifier_tags: List[str] = []
result_tags = get_field(result, "tag", None) file_hash_for_scrape = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None))
if isinstance(result_tags, list): store_for_scrape = get_field(result, "store", None)
identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))] if file_hash_for_scrape and store_for_scrape:
try:
# Try local sidecar if no tags present on result from Store import Store
storage = Store(config)
backend = storage[str(store_for_scrape)]
current_tags, _src = backend.get_tag(file_hash_for_scrape, config=config)
if isinstance(current_tags, (list, tuple, set)) and current_tags:
identifier_tags = [str(t) for t in current_tags if isinstance(t, (str, bytes))]
except Exception:
# Fall back to whatever is present on the piped result if store lookup fails.
pass
# Fall back to tags carried on the result (may be stale).
if not identifier_tags: if not identifier_tags:
result_tags = get_field(result, "tag", None)
if isinstance(result_tags, list):
identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))]
# As a last resort, try local sidecar only when the item is not store-backed.
if not identifier_tags and (not file_hash_for_scrape or not store_for_scrape):
file_path = get_field(result, "target", None) or get_field(result, "path", None) or get_field(result, "filename", None) file_path = get_field(result, "target", None) or get_field(result, "path", None) or get_field(result, "filename", None)
if isinstance(file_path, str) and file_path and not file_path.lower().startswith(("http://", "https://")): if isinstance(file_path, str) and file_path and not file_path.lower().startswith(("http://", "https://")):
try: try:
@@ -939,8 +980,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
selection_payload = [] selection_payload = []
hash_for_payload = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None)) hash_for_payload = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None))
store_for_payload = get_field(result, "store", None) store_for_payload = get_field(result, "store", None)
# Preserve a consistent path field when present so selecting a metadata row
# keeps referring to the original file.
path_for_payload = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None)
for idx, item in enumerate(items): for idx, item in enumerate(items):
tags = provider.to_tags(item) tags = _filter_scraped_tags(provider.to_tags(item))
row = table.add_row() row = table.add_row()
row.add_column("Title", item.get("title", "")) row.add_column("Title", item.get("title", ""))
row.add_column("Artist", item.get("artist", "")) row.add_column("Artist", item.get("artist", ""))
@@ -955,6 +999,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"year": item.get("year"), "year": item.get("year"),
"hash": hash_for_payload, "hash": hash_for_payload,
"store": store_for_payload, "store": store_for_payload,
"path": path_for_payload,
"extra": { "extra": {
"tag": tags, "tag": tags,
"provider": provider.name, "provider": provider.name,
@@ -967,7 +1012,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
ctx.set_current_stage_table(table) ctx.set_current_stage_table(table)
# Preserve items for @ selection and downstream pipes without emitting duplicates # Preserve items for @ selection and downstream pipes without emitting duplicates
ctx.set_last_result_items_only(selection_payload) ctx.set_last_result_items_only(selection_payload)
print(table)
return 0 return 0
# If -scrape was requested but no URL, that's an error # If -scrape was requested but no URL, that's an error
@@ -978,6 +1022,70 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Handle @N selection which creates a list - extract the first item # Handle @N selection which creates a list - extract the first item
if isinstance(result, list) and len(result) > 0: if isinstance(result, list) and len(result) > 0:
result = result[0] result = result[0]
# If the current result already carries a tag list (e.g. a selected metadata
# row from get-tag -scrape itunes), APPLY those tags to the file in the store.
result_provider = get_field(result, "provider", None)
result_tags = get_field(result, "tag", None)
if result_provider and isinstance(result_tags, list) and result_tags:
file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None))
store_name = get_field(result, "store", None)
subject_path = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None)
if not file_hash or not store_name:
log("Selected metadata row is missing hash/store; cannot apply tags", file=sys.stderr)
_emit_tags_as_table(
tags_list=[str(t) for t in result_tags if t is not None],
file_hash=file_hash,
store=str(store_name or "local"),
service_name=None,
config=config,
item_title=str(get_field(result, "title", None) or result_provider),
path=str(subject_path) if subject_path else None,
subject=result,
)
return 0
# Apply tags to the store backend (no sidecar writing here).
apply_tags = _filter_scraped_tags([str(t) for t in result_tags if t is not None])
if not apply_tags:
log("No applicable scraped tags to apply (title:/artist:/source: are skipped)", file=sys.stderr)
return 0
try:
from Store import Store
storage = Store(config)
backend = storage[str(store_name)]
ok = bool(backend.add_tag(file_hash, apply_tags, config=config))
if not ok:
log(f"Failed to apply tags to store '{store_name}'", file=sys.stderr)
except Exception as exc:
log(f"Failed to apply tags: {exc}", file=sys.stderr)
return 1
# Show updated tags after applying.
try:
updated_tags, _src = backend.get_tag(file_hash, config=config)
except Exception:
updated_tags = apply_tags
if not updated_tags:
updated_tags = apply_tags
_emit_tags_as_table(
tags_list=list(updated_tags),
file_hash=file_hash,
store=str(store_name),
service_name=None,
config=config,
item_title=str(get_field(result, "title", None) or get_field(result, "name", None) or str(result_provider)),
path=str(subject_path) if subject_path else None,
subject={
"hash": file_hash,
"store": str(store_name),
"path": str(subject_path) if subject_path else None,
"title": get_field(result, "title", None) or get_field(result, "name", None),
"extra": {"applied_provider": str(result_provider)},
},
)
return 0
hash_from_result = normalize_hash(get_field(result, "hash", None)) hash_from_result = normalize_hash(get_field(result, "hash", None))
file_hash = hash_override or hash_from_result file_hash = hash_override or hash_from_result
@@ -1022,6 +1130,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Build a subject payload representing the file whose tags are being shown # Build a subject payload representing the file whose tags are being shown
subject_store = get_field(result, "store", None) or store_name subject_store = get_field(result, "store", None) or store_name
subject_path = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None)
subject_payload: Dict[str, Any] = { subject_payload: Dict[str, Any] = {
"tag": list(current), "tag": list(current),
"title": item_title, "title": item_title,
@@ -1034,12 +1143,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
} }
if file_hash: if file_hash:
subject_payload["hash"] = file_hash subject_payload["hash"] = file_hash
if local_path: if subject_path:
try: try:
path_text = str(local_path) subject_payload["path"] = str(subject_path)
subject_payload.update({
"path": path_text,
})
except Exception: except Exception:
pass pass
@@ -1050,7 +1156,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
service_name=service_name if source == "hydrus" else None, service_name=service_name if source == "hydrus" else None,
config=config, config=config,
item_title=item_title, item_title=item_title,
path=str(local_path) if local_path else None, path=str(subject_path) if subject_path else None,
subject=subject_payload, subject=subject_payload,
) )
@@ -1116,55 +1222,7 @@ class Get_Tag(Cmdlet):
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Execute get-tag cmdlet.""" """Execute get-tag cmdlet."""
# Parse arguments return _run(result, args, config)
parsed = parse_cmdlet_args(args, self)
# Get hash and store from parsed args or result
hash_override = parsed.get("hash")
file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash"))
store_name = parsed.get("store") or get_field(result, "store")
if not file_hash:
log("No hash available in result", file=sys.stderr)
return 1
if not store_name:
log("No store specified in result", file=sys.stderr)
return 1
# Get tags using storage backend
try:
from Store import Store
storage_obj = Store(config)
backend = storage_obj[store_name]
current, source = backend.get_tag(file_hash, config=config)
if not current:
log("No tags found", file=sys.stderr)
return 1
# Build table and emit
item_title = get_field(result, "title") or file_hash[:16]
_emit_tags_as_table(
tags_list=current,
file_hash=file_hash,
store=store_name,
service_name="",
config=config,
item_title=item_title,
path=None,
subject=result,
)
return 0
except KeyError:
log(f"Store '{store_name}' not found", file=sys.stderr)
return 1
except Exception as exc:
log(f"Failed to get tags: {exc}", file=sys.stderr)
import traceback
traceback.print_exc(file=sys.stderr)
return 1
# Create and register the cmdlet # Create and register the cmdlet

View File

@@ -22,6 +22,77 @@ from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
_ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {} _ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {}
def _repo_root() -> Path:
try:
return Path(__file__).resolve().parent.parent
except Exception:
return Path(os.getcwd())
def _repo_log_dir() -> Path:
d = _repo_root() / "Log"
try:
d.mkdir(parents=True, exist_ok=True)
except Exception:
pass
return d
def _helper_log_file() -> Path:
return _repo_log_dir() / "medeia-mpv-helper.log"
def _lua_log_file() -> Path:
return _repo_log_dir() / "medeia-mpv-lua.log"
def _try_enable_mpv_file_logging(mpv_log_path: str, *, attempts: int = 3) -> bool:
"""Best-effort enable mpv log-file + verbose level on a running instance.
Note: mpv may not honor changing log-file at runtime on all builds/platforms.
We still try; if it fails, callers can fall back to restart-on-demand.
"""
if not isinstance(mpv_log_path, str) or not mpv_log_path.strip():
return False
mpv_log_path = mpv_log_path.strip()
try:
Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True)
with open(mpv_log_path, "a", encoding="utf-8", errors="replace"):
pass
except Exception:
pass
ok = False
for _ in range(max(1, int(attempts))):
try:
# Try to set log-file and verbose level.
r1 = _send_ipc_command({"command": ["set_property", "options/log-file", mpv_log_path]})
r2 = _send_ipc_command({"command": ["set_property", "options/msg-level", "all=v"]})
ok = bool((r1 and r1.get("error") == "success") or (r2 and r2.get("error") == "success"))
# Emit a predictable line so the file isn't empty if logging is active.
_send_ipc_command({"command": ["print-text", f"medeia: log enabled -> {mpv_log_path}"]}, silent=True)
except Exception:
ok = False
# If mpv has opened the log file, it should have content shortly.
try:
p = Path(mpv_log_path)
if p.exists() and p.is_file() and p.stat().st_size > 0:
return True
except Exception:
pass
try:
import time
time.sleep(0.15)
except Exception:
break
return bool(ok)
def _get_alldebrid_api_key(config: Optional[Dict[str, Any]]) -> Optional[str]: def _get_alldebrid_api_key(config: Optional[Dict[str, Any]]) -> Optional[str]:
try: try:
if not isinstance(config, dict): if not isinstance(config, dict):
@@ -838,10 +909,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
set_debug(True) set_debug(True)
set_thread_stream(sys.stdout) set_thread_stream(sys.stdout)
try: try:
tmp_dir = Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".") log_dir = _repo_log_dir()
mpv_log_path = str((log_dir / "medeia-mpv.log").resolve())
except Exception: except Exception:
tmp_dir = Path(".") mpv_log_path = str((Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".") / "medeia-mpv.log").resolve())
mpv_log_path = str((tmp_dir / "medeia-mpv.log").resolve())
# Ensure file exists early so we can tail it even if mpv writes later. # Ensure file exists early so we can tail it even if mpv writes later.
try: try:
Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True) Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True)
@@ -851,6 +922,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
pass pass
debug(f"MPV log file: {mpv_log_path}") debug(f"MPV log file: {mpv_log_path}")
# Try to enable mpv file logging on the currently running instance.
# (If mpv wasn't started with --log-file, this may not work everywhere.)
try:
_try_enable_mpv_file_logging(mpv_log_path, attempts=3)
except Exception:
pass
# If mpv is already running, set log options live via IPC. # If mpv is already running, set log options live via IPC.
try: try:
mpv_live = MPV() mpv_live = MPV()
@@ -899,6 +977,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
load_mode = parsed.get("load") load_mode = parsed.get("load")
current_mode = parsed.get("current") current_mode = parsed.get("current")
# Pure log mode: `.pipe -log` should not run any playlist actions and
# should not print the playlist table. It should only enable/tail logs
# (handled in the `finally` block).
only_log = bool(
log_requested
and not url_arg
and index_arg is None
and not clear_mode
and not list_mode
and not play_mode
and not pause_mode
and not save_mode
and not load_mode
and not current_mode
)
if only_log:
return 0
# Handle --current flag: emit currently playing item to pipeline # Handle --current flag: emit currently playing item to pipeline
if current_mode: if current_mode:
items = _get_playlist() items = _get_playlist()
@@ -959,6 +1055,19 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Fallback: just list the playlist if we can't determine index # Fallback: just list the playlist if we can't determine index
list_mode = True list_mode = True
# If the user explicitly requested -play while queueing a URL, interpret that
# as "play the URL I just queued" (not merely "unpause whatever is currently playing").
if play_mode and index_arg is None:
if mpv_started:
# MPV was just started; give it a moment, then play first item.
import time
time.sleep(0.5)
index_arg = "1"
else:
playlist = _get_playlist(silent=True)
if playlist and len(playlist) > 0:
index_arg = str(len(playlist))
# Ensure lyric overlay is running (auto-discovery handled by MPV.lyric). # Ensure lyric overlay is running (auto-discovery handled by MPV.lyric).
try: try:
mpv = MPV() mpv = MPV()
@@ -1411,11 +1520,64 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
finally: finally:
if log_requested and isinstance(mpv_log_path, str) and mpv_log_path.strip(): if log_requested and isinstance(mpv_log_path, str) and mpv_log_path.strip():
try: try:
tail_lines = _tail_text_file(mpv_log_path, max_lines=160) # Give mpv a short moment to flush logs, then print a tail that is easy to copy.
print(f"MPV log file: {mpv_log_path}")
# Best-effort: re-try enabling file logging at the end too (mpv may have
# been unreachable at the start).
try:
_try_enable_mpv_file_logging(mpv_log_path, attempts=2)
except Exception:
pass
tail_lines: List[str] = []
for _ in range(8):
tail_lines = _tail_text_file(mpv_log_path, max_lines=200)
if tail_lines:
break
try:
import time
time.sleep(0.25)
except Exception:
break
if tail_lines: if tail_lines:
print("MPV log (tail):") print("MPV log (tail):")
for ln in tail_lines: for ln in tail_lines:
print(ln) print(ln)
else:
print("MPV log (tail): <empty>")
print("Note: On some Windows builds, mpv cannot start writing to --log-file after launch.")
print("If you need full [main2] logs, restart mpv so it starts with --log-file.")
# Also print the helper log tail (this captures Python helper output that won't
# necessarily show up in MPV's own log-file).
try:
helper_path = _helper_log_file()
helper_tail = _tail_text_file(str(helper_path), max_lines=200)
print(f"Helper log file: {str(helper_path)}")
if helper_tail:
print("Helper log (tail):")
for ln in helper_tail:
print(ln)
else:
print("Helper log (tail): <empty>")
except Exception:
pass
# Also print the Lua-side log tail (mp.msg output isn't always written to mpv's log-file).
try:
lua_path = _lua_log_file()
lua_tail = _tail_text_file(str(lua_path), max_lines=200)
print(f"Lua log file: {str(lua_path)}")
if lua_tail:
print("Lua log (tail):")
for ln in lua_tail:
print(ln)
else:
print("Lua log (tail): <empty>")
except Exception:
pass
except Exception: except Exception:
pass pass
try: try:
@@ -1486,8 +1648,7 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_
debug("Timed out waiting for MPV IPC connection", file=sys.stderr) debug("Timed out waiting for MPV IPC connection", file=sys.stderr)
return return
# Ensure Lua script is loaded (redundant when started with --script, but safe) # main.lua is loaded at startup via --script; don't reload it here.
mpv.ensure_lua_loaded()
# Ensure lyric overlay is running (auto-discovery handled by MPV.lyric). # Ensure lyric overlay is running (auto-discovery handled by MPV.lyric).
_ensure_lyric_overlay(mpv) _ensure_lyric_overlay(mpv)