Files
Medios-Macina/MPV/pipeline_helper.py

1112 lines
38 KiB
Python
Raw Permalink Normal View History

2025-12-17 17:42:46 -08:00
"""Persistent MPV pipeline helper.
This process connects to MPV's IPC server, observes a user-data property for
pipeline execution requests, runs the pipeline in-process, and posts results
back to MPV via user-data properties.
Why:
- Avoid spawning a new Python process for every MPV action.
- Enable MPV Lua scripts to trigger any cmdlet pipeline cheaply.
Protocol (user-data properties):
- Request: user-data/medeia-pipeline-request (JSON string)
{"id": "...", "pipeline": "...", "seeds": [...]} (seeds optional)
- Response: user-data/medeia-pipeline-response (JSON string)
{"id": "...", "success": bool, "stdout": "...", "stderr": "...", "error": "..."}
- Ready: user-data/medeia-pipeline-ready ("1")
This helper is intentionally minimal: one request at a time, last-write-wins.
"""
2025-12-29 17:05:03 -08:00
2025-12-17 17:42:46 -08:00
from __future__ import annotations
2025-12-18 22:50:21 -08:00
MEDEIA_MPV_HELPER_VERSION = "2025-12-19"
2025-12-17 17:42:46 -08:00
import argparse
import json
import os
import sys
import tempfile
import time
2025-12-18 22:50:21 -08:00
import logging
import re
2025-12-19 02:29:42 -08:00
import hashlib
2025-12-24 02:13:21 -08:00
import subprocess
import platform
2025-12-17 17:42:46 -08:00
from pathlib import Path
from typing import Any, Dict, Optional
def _repo_root() -> Path:
return Path(__file__).resolve().parent.parent
2025-12-27 03:13:16 -08:00
def _runtime_config_root() -> Path:
"""Best-effort config root for runtime execution.
MPV can spawn this helper from an installed location while setting `cwd` to
the repo root (see MPV.mpv_ipc). Prefer `cwd` when it contains `config.conf`.
"""
try:
cwd = Path.cwd().resolve()
if (cwd / "config.conf").exists():
return cwd
except Exception:
pass
return _repo_root()
2025-12-17 17:42:46 -08:00
# Make repo-local packages importable even when mpv starts us from another cwd.
_ROOT = str(_repo_root())
if _ROOT not in sys.path:
sys.path.insert(0, _ROOT)
2025-12-18 22:50:21 -08:00
from MPV.mpv_ipc import MPVIPCClient # noqa: E402
from SYS.config import load_config # noqa: E402
2025-12-18 22:50:21 -08:00
from SYS.logger import set_debug, debug, set_thread_stream # noqa: E402
2025-12-17 17:42:46 -08:00
REQUEST_PROP = "user-data/medeia-pipeline-request"
RESPONSE_PROP = "user-data/medeia-pipeline-response"
READY_PROP = "user-data/medeia-pipeline-ready"
OBS_ID_REQUEST = 1001
2025-12-18 22:50:21 -08:00
def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
# Import after sys.path fix.
from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433
2025-12-17 17:42:46 -08:00
2025-12-18 22:50:21 -08:00
def _table_to_payload(table: Any) -> Optional[Dict[str, Any]]:
if table is None:
2025-12-17 17:42:46 -08:00
return None
try:
2025-12-18 22:50:21 -08:00
title = getattr(table, "title", "")
2025-12-17 17:42:46 -08:00
except Exception:
2025-12-18 22:50:21 -08:00
title = ""
2025-12-17 17:42:46 -08:00
2025-12-18 22:50:21 -08:00
rows_payload = []
try:
rows = getattr(table, "rows", None)
except Exception:
rows = None
if isinstance(rows, list):
for r in rows:
cols_payload = []
try:
cols = getattr(r, "columns", None)
except Exception:
cols = None
if isinstance(cols, list):
for c in cols:
try:
cols_payload.append(
{
"name": getattr(c,
"name",
""),
"value": getattr(c,
"value",
""),
2025-12-18 22:50:21 -08:00
}
)
except Exception:
continue
sel_args = None
try:
sel = getattr(r, "selection_args", None)
if isinstance(sel, list):
sel_args = [str(x) for x in sel]
except Exception:
sel_args = None
rows_payload.append(
{
"columns": cols_payload,
"selection_args": sel_args
}
)
2025-12-18 22:50:21 -08:00
# Only return JSON-serializable data (Lua only needs title + rows).
return {
"title": str(title or ""),
"rows": rows_payload
}
2025-12-17 17:42:46 -08:00
executor = PipelineExecutor()
result = executor.run_pipeline(pipeline_text, seeds=seeds)
2025-12-18 22:50:21 -08:00
table_payload = None
try:
table_payload = _table_to_payload(getattr(result, "result_table", None))
except Exception:
table_payload = None
2025-12-17 17:42:46 -08:00
return {
"success": bool(result.success),
"stdout": result.stdout or "",
"stderr": result.stderr or "",
"error": result.error,
2025-12-18 22:50:21 -08:00
"table": table_payload,
2025-12-17 17:42:46 -08:00
}
2025-12-18 22:50:21 -08:00
def _run_op(op: str, data: Any) -> Dict[str, Any]:
"""Run a helper-only operation.
These are NOT cmdlets and are not available via CLI pipelines. They exist
solely so MPV Lua can query lightweight metadata (e.g., autocomplete lists)
without inventing user-facing commands.
"""
op_name = str(op or "").strip().lower()
if op_name in {"run-detached",
"run_detached",
"pipeline-detached",
"pipeline_detached"}:
2025-12-24 02:13:21 -08:00
pipeline_text = ""
seeds = None
if isinstance(data, dict):
pipeline_text = str(data.get("pipeline") or "").strip()
seeds = data.get("seeds")
if not pipeline_text:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Missing pipeline",
"table": None,
}
py = sys.executable or "python"
if platform.system() == "Windows":
try:
exe = str(py or "").strip()
except Exception:
exe = ""
low = exe.lower()
if low.endswith("python.exe"):
try:
candidate = exe[:-10] + "pythonw.exe"
if os.path.exists(candidate):
py = candidate
except Exception:
pass
2025-12-29 17:05:03 -08:00
cmd = [
py,
str((_repo_root() / "CLI.py").resolve()),
"pipeline",
"--pipeline",
pipeline_text,
]
2025-12-24 02:13:21 -08:00
if seeds is not None:
try:
cmd.extend(["--seeds-json", json.dumps(seeds, ensure_ascii=False)])
except Exception:
# Best-effort; seeds are optional.
pass
popen_kwargs: Dict[str,
Any] = {
"stdin": subprocess.DEVNULL,
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
"cwd": str(_repo_root()),
}
2025-12-24 02:13:21 -08:00
if platform.system() == "Windows":
flags = 0
try:
flags |= int(getattr(subprocess, "DETACHED_PROCESS", 0x00000008))
except Exception:
flags |= 0x00000008
try:
flags |= int(getattr(subprocess, "CREATE_NO_WINDOW", 0x08000000))
except Exception:
flags |= 0x08000000
popen_kwargs["creationflags"] = int(flags)
try:
si = subprocess.STARTUPINFO()
si.dwFlags |= int(
getattr(subprocess,
"STARTF_USESHOWWINDOW",
0x00000001)
)
2025-12-24 02:13:21 -08:00
si.wShowWindow = subprocess.SW_HIDE
popen_kwargs["startupinfo"] = si
except Exception:
pass
else:
popen_kwargs["start_new_session"] = True
try:
proc = subprocess.Popen(cmd, **popen_kwargs)
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error":
f"Failed to spawn detached pipeline: {type(exc).__name__}: {exc}",
2025-12-24 02:13:21 -08:00
"table": None,
}
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": None,
"pid": int(getattr(proc,
"pid",
0) or 0),
2025-12-24 02:13:21 -08:00
}
2025-12-18 22:50:21 -08:00
# Provide store backend choices using the same source as CLI/Typer autocomplete.
if op_name in {"store-choices",
"store_choices",
"get-store-choices",
"get_store_choices"}:
2025-12-27 03:13:16 -08:00
# IMPORTANT:
# - Prefer runtime cwd for config discovery (mpv spawns us with cwd=repo_root).
# - Avoid returning a cached empty result if config was loaded before it existed.
try:
from SYS.config import reload_config # noqa: WPS433
2025-12-27 03:13:16 -08:00
from Store import Store # noqa: WPS433
config_root = _runtime_config_root()
cfg = reload_config(config_dir=config_root)
storage = Store(config=cfg, suppress_debug=True)
backends = storage.list_backends() or []
choices = sorted({str(n)
for n in backends if str(n).strip()})
2025-12-27 03:13:16 -08:00
# Fallback: if initialization gated all backends (e.g., missing deps or offline stores),
# still return configured instance names so the UI can present something.
if not choices:
store_cfg = cfg.get("store") if isinstance(cfg, dict) else None
if isinstance(store_cfg, dict):
seen = set()
for _, instances in store_cfg.items():
if not isinstance(instances, dict):
continue
for instance_key, instance_cfg in instances.items():
name = None
if isinstance(instance_cfg, dict):
name = instance_cfg.get("NAME"
) or instance_cfg.get("name")
2025-12-29 17:05:03 -08:00
candidate = str(name or instance_key or "").strip()
2025-12-27 03:13:16 -08:00
if candidate:
seen.add(candidate)
choices = sorted(seen)
2025-12-18 22:50:21 -08:00
2025-12-27 03:13:16 -08:00
debug(f"[store-choices] config_dir={config_root} choices={len(choices)}")
2025-12-18 22:50:21 -08:00
2025-12-27 03:13:16 -08:00
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": None,
"choices": choices,
}
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"store-choices failed: {type(exc).__name__}: {exc}",
"table": None,
"choices": [],
}
2025-12-18 22:50:21 -08:00
# Provide yt-dlp format list for a URL (for MPV "Change format" menu).
# Returns a ResultTable-like payload so the Lua UI can render without running cmdlets.
if op_name in {"ytdlp-formats",
"ytdlp_formats",
"ytdl-formats",
"ytdl_formats"}:
2025-12-18 22:50:21 -08:00
try:
url = None
if isinstance(data, dict):
url = data.get("url")
url = str(url or "").strip()
if not url:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Missing url",
"table": None,
}
# Fast gate: only for streaming URLs yt-dlp knows about.
try:
2026-01-05 07:51:19 -08:00
from tool.ytdlp import is_url_supported_by_ytdlp # noqa: WPS433
2025-12-18 22:50:21 -08:00
if not is_url_supported_by_ytdlp(url):
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "URL not supported by yt-dlp",
"table": None,
}
except Exception:
# If probing support fails, still attempt extraction and let yt-dlp decide.
pass
try:
import yt_dlp # type: ignore
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error":
f"yt-dlp module not available: {type(exc).__name__}: {exc}",
2025-12-18 22:50:21 -08:00
"table": None,
}
cookiefile = None
try:
from tool.ytdlp import YtDlpTool # noqa: WPS433
cfg = load_config() or {}
cookie_path = YtDlpTool(cfg).resolve_cookiefile()
if cookie_path is not None:
cookiefile = str(cookie_path)
except Exception:
cookiefile = None
ydl_opts: Dict[str,
Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
2025-12-18 22:50:21 -08:00
if cookiefile:
ydl_opts["cookiefile"] = cookiefile
def _format_bytes(n: Any) -> str:
try:
v = float(n)
except Exception:
return ""
if v <= 0:
return ""
units = ["B", "KB", "MB", "GB", "TB"]
i = 0
while v >= 1024 and i < len(units) - 1:
v /= 1024.0
i += 1
if i == 0:
return f"{int(v)} {units[i]}"
return f"{v:.1f} {units[i]}"
with yt_dlp.YoutubeDL(ydl_opts) as ydl: # type: ignore[attr-defined]
info = ydl.extract_info(url, download=False)
2025-12-19 02:29:42 -08:00
# Debug: dump a short summary of the format list to the helper log.
try:
formats_any = info.get("formats") if isinstance(info, dict) else None
count = len(formats_any) if isinstance(formats_any, list) else 0
_append_helper_log(
f"[ytdlp-formats] extracted formats count={count} url={url}"
)
2025-12-19 02:29:42 -08:00
if isinstance(formats_any, list) and formats_any:
limit = 60
for i, f in enumerate(formats_any[:limit], start=1):
if not isinstance(f, dict):
continue
fid = str(f.get("format_id") or "")
ext = str(f.get("ext") or "")
note = f.get("format_note") or f.get("format") or ""
vcodec = str(f.get("vcodec") or "")
acodec = str(f.get("acodec") or "")
size = f.get("filesize") or f.get("filesize_approx")
res = str(f.get("resolution") or "")
if not res:
try:
w = f.get("width")
h = f.get("height")
if w and h:
res = f"{int(w)}x{int(h)}"
elif h:
res = f"{int(h)}p"
except Exception:
res = ""
_append_helper_log(
f"[ytdlp-format {i:02d}] id={fid} ext={ext} res={res} note={note} codecs={vcodec}/{acodec} size={size}"
)
if count > limit:
_append_helper_log(
f"[ytdlp-formats] (truncated; total={count})"
)
2025-12-19 02:29:42 -08:00
except Exception:
pass
# Optional: dump the full extracted JSON for inspection.
try:
dump = os.environ.get("MEDEIA_MPV_YTDLP_DUMP", "").strip()
if dump and dump != "0" and isinstance(info, dict):
h = hashlib.sha1(url.encode("utf-8",
errors="replace")).hexdigest()[:10]
2025-12-19 02:29:42 -08:00
out_path = _repo_root() / "Log" / f"ytdlp-probe-{h}.json"
2025-12-29 17:05:03 -08:00
out_path.write_text(
json.dumps(info,
ensure_ascii=False,
indent=2),
2025-12-29 17:05:03 -08:00
encoding="utf-8",
errors="replace",
)
2025-12-19 02:29:42 -08:00
_append_helper_log(f"[ytdlp-formats] wrote probe json: {out_path}")
except Exception:
pass
2025-12-18 22:50:21 -08:00
if not isinstance(info, dict):
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "yt-dlp returned non-dict info",
"table": None,
}
formats = info.get("formats")
if not isinstance(formats, list) or not formats:
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": {
"title": "Formats",
"rows": []
},
2025-12-18 22:50:21 -08:00
}
rows = []
for fmt in formats:
if not isinstance(fmt, dict):
continue
format_id = str(fmt.get("format_id") or "").strip()
if not format_id:
continue
# Prefer human-ish resolution.
resolution = str(fmt.get("resolution") or "").strip()
if not resolution:
w = fmt.get("width")
h = fmt.get("height")
try:
if w and h:
resolution = f"{int(w)}x{int(h)}"
elif h:
resolution = f"{int(h)}p"
except Exception:
resolution = ""
ext = str(fmt.get("ext") or "").strip()
size = _format_bytes(fmt.get("filesize") or fmt.get("filesize_approx"))
# Build selection args compatible with MPV Lua picker.
selection_args = ["-format", format_id]
rows.append(
{
"columns": [
{
"name": "ID",
"value": format_id
},
{
"name": "Resolution",
"value": resolution or ""
},
{
"name": "Ext",
"value": ext or ""
},
{
"name": "Size",
"value": size or ""
},
2025-12-18 22:50:21 -08:00
],
"selection_args":
selection_args,
2025-12-18 22:50:21 -08:00
}
)
return {
"success": True,
"stdout": "",
"stderr": "",
"error": None,
"table": {
"title": "Formats",
"rows": rows
},
2025-12-18 22:50:21 -08:00
}
except Exception as exc:
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"{type(exc).__name__}: {exc}",
"table": None,
}
return {
"success": False,
"stdout": "",
"stderr": "",
"error": f"Unknown op: {op_name}",
"table": None,
}
def _helper_log_path() -> Path:
try:
d = _repo_root() / "Log"
d.mkdir(parents=True, exist_ok=True)
return d / "medeia-mpv-helper.log"
except Exception:
return Path(tempfile.gettempdir()) / "medeia-mpv-helper.log"
def _append_helper_log(text: str) -> None:
payload = (text or "").rstrip()
if not payload:
return
try:
path = _helper_log_path()
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "a", encoding="utf-8", errors="replace") as fh:
fh.write(payload + "\n")
except Exception:
return
def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]:
"""Best-effort singleton lock per IPC path.
Multiple helpers subscribing to mpv log-message events causes duplicated
log output. Use a tiny file lock to ensure one helper per mpv instance.
"""
try:
safe = re.sub(r"[^a-zA-Z0-9_.-]+", "_", str(ipc_path or ""))
if not safe:
safe = "mpv"
# Keep lock files out of the repo's Log/ directory to avoid clutter.
lock_dir = Path(tempfile.gettempdir()) / "medeia-mpv-helper"
lock_dir.mkdir(parents=True, exist_ok=True)
lock_path = lock_dir / f"medeia-mpv-helper-{safe}.lock"
fh = open(lock_path, "a+", encoding="utf-8", errors="replace")
if os.name == "nt":
try:
import msvcrt # type: ignore
fh.seek(0)
msvcrt.locking(fh.fileno(), msvcrt.LK_NBLCK, 1)
except Exception:
try:
fh.close()
except Exception:
pass
return None
else:
try:
import fcntl # type: ignore
fcntl.flock(fh.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception:
try:
fh.close()
except Exception:
pass
return None
return fh
except Exception:
return None
2025-12-17 17:42:46 -08:00
def _parse_request(data: Any) -> Optional[Dict[str, Any]]:
if data is None:
return None
if isinstance(data, str):
text = data.strip()
if not text:
return None
try:
obj = json.loads(text)
except Exception:
return None
return obj if isinstance(obj, dict) else None
if isinstance(data, dict):
return data
return None
def main(argv: Optional[list[str]] = None) -> int:
parser = argparse.ArgumentParser(prog="mpv-pipeline-helper")
parser.add_argument("--ipc", required=True, help="mpv --input-ipc-server path")
2025-12-18 22:50:21 -08:00
parser.add_argument("--timeout", type=float, default=15.0)
2025-12-17 17:42:46 -08:00
args = parser.parse_args(argv)
2025-12-18 22:50:21 -08:00
# Load config once and configure logging similar to CLI.pipeline.
try:
cfg = load_config() or {}
except Exception:
cfg = {}
try:
debug_enabled = bool(isinstance(cfg, dict) and cfg.get("debug", False))
set_debug(debug_enabled)
if debug_enabled:
logging.basicConfig(
level=logging.DEBUG,
2025-12-29 17:05:03 -08:00
format="[%(name)s] %(levelname)s: %(message)s",
2025-12-18 22:50:21 -08:00
stream=sys.stderr,
)
for noisy in ("httpx",
"httpcore",
"httpcore.http11",
"httpcore.connection"):
2025-12-18 22:50:21 -08:00
try:
logging.getLogger(noisy).setLevel(logging.WARNING)
except Exception:
pass
except Exception:
pass
2025-12-17 17:42:46 -08:00
# Ensure all in-process cmdlets that talk to MPV pick up the exact IPC server
# path used by this helper (which comes from the running MPV instance).
os.environ["MEDEIA_MPV_IPC"] = str(args.ipc)
2025-12-18 22:50:21 -08:00
# Ensure single helper instance per ipc.
_lock_fh = _acquire_ipc_lock(str(args.ipc))
if _lock_fh is None:
2025-12-29 17:05:03 -08:00
_append_helper_log(
f"[helper] another instance already holds lock for ipc={args.ipc}; exiting"
)
2025-12-18 22:50:21 -08:00
return 0
try:
_append_helper_log(
f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}"
)
2025-12-27 03:13:16 -08:00
try:
2025-12-29 17:05:03 -08:00
_append_helper_log(
f"[helper] file={Path(__file__).resolve()} cwd={Path.cwd().resolve()}"
)
2025-12-27 03:13:16 -08:00
except Exception:
pass
try:
runtime_root = _runtime_config_root()
_append_helper_log(
f"[helper] config_root={runtime_root} exists={bool((runtime_root / 'config.conf').exists())}"
)
except Exception:
pass
2025-12-18 22:50:21 -08:00
debug(f"[mpv-helper] logging to: {_helper_log_path()}")
except Exception:
pass
# Route SYS.logger output into the helper log file so diagnostics are not
# lost in mpv's console/terminal output.
try:
2025-12-29 17:05:03 -08:00
2025-12-18 22:50:21 -08:00
class _HelperLogStream:
2025-12-18 22:50:21 -08:00
def __init__(self) -> None:
self._pending = ""
def write(self, s: str) -> int:
if not s:
return 0
text = self._pending + str(s)
lines = text.splitlines(keepends=True)
if lines and not lines[-1].endswith(("\n", "\r")):
self._pending = lines[-1]
lines = lines[:-1]
else:
self._pending = ""
for line in lines:
payload = line.rstrip("\r\n")
if payload:
_append_helper_log("[py] " + payload)
return len(s)
def flush(self) -> None:
if self._pending:
_append_helper_log("[py] " + self._pending.rstrip("\r\n"))
self._pending = ""
set_thread_stream(_HelperLogStream())
except Exception:
pass
# Prefer a stable repo-local log folder for discoverability.
error_log_dir = _repo_root() / "Log"
try:
error_log_dir.mkdir(parents=True, exist_ok=True)
except Exception:
error_log_dir = Path(tempfile.gettempdir())
2025-12-17 17:42:46 -08:00
last_error_log = error_log_dir / "medeia-mpv-pipeline-last-error.log"
def _write_error_log(text: str, *, req_id: str) -> Optional[str]:
try:
error_log_dir.mkdir(parents=True, exist_ok=True)
except Exception:
pass
payload = (text or "").strip()
if not payload:
return None
stamped = error_log_dir / f"medeia-mpv-pipeline-error-{req_id}.log"
try:
stamped.write_text(payload, encoding="utf-8", errors="replace")
except Exception:
stamped = None
try:
last_error_log.write_text(payload, encoding="utf-8", errors="replace")
except Exception:
pass
return str(stamped) if stamped else str(last_error_log)
2025-12-18 22:50:21 -08:00
# Connect to mpv's JSON IPC. On Windows, the pipe can exist but reject opens
# briefly during startup; also mpv may create the IPC server slightly after
# the Lua script launches us. Retry until timeout.
connect_deadline = time.time() + max(0.5, float(args.timeout))
last_connect_error: Optional[str] = None
client = MPVIPCClient(socket_path=args.ipc, timeout=0.5, silent=True)
while True:
try:
if client.connect():
break
except Exception as exc:
last_connect_error = f"{type(exc).__name__}: {exc}"
if time.time() > connect_deadline:
2025-12-29 17:05:03 -08:00
_append_helper_log(
f"[helper] failed to connect ipc={args.ipc} error={last_connect_error or 'timeout'}"
)
2025-12-18 22:50:21 -08:00
return 2
# Keep trying.
time.sleep(0.10)
# Mark ready ASAP and keep it fresh.
# Use a unix timestamp so the Lua side can treat it as a heartbeat.
last_ready_ts: float = 0.0
def _touch_ready() -> None:
nonlocal last_ready_ts
now = time.time()
# Throttle updates to reduce IPC chatter.
if (now - last_ready_ts) < 0.75:
return
try:
client.send_command_no_wait(
["set_property_string",
READY_PROP,
str(int(now))]
)
2025-12-18 22:50:21 -08:00
last_ready_ts = now
except Exception:
return
# Mirror mpv's own log messages into our helper log file so debugging does
# not depend on the mpv on-screen console or mpv's log-file.
2025-12-17 17:42:46 -08:00
try:
2025-12-19 02:29:42 -08:00
# IMPORTANT: mpv debug logs can be extremely chatty (especially ytdl_hook)
# and can starve request handling. Default to warn unless explicitly overridden.
level = os.environ.get("MEDEIA_MPV_HELPER_MPVLOG", "").strip() or "warn"
2025-12-18 22:50:21 -08:00
client.send_command_no_wait(["request_log_messages", level])
_append_helper_log(f"[helper] requested mpv log messages level={level}")
2025-12-17 17:42:46 -08:00
except Exception:
pass
2025-12-18 22:50:21 -08:00
# De-dup/throttle mpv log-message lines (mpv and yt-dlp can be very chatty).
last_mpv_line: Optional[str] = None
last_mpv_count: int = 0
last_mpv_ts: float = 0.0
def _flush_mpv_repeat() -> None:
nonlocal last_mpv_line, last_mpv_count
if last_mpv_line and last_mpv_count > 1:
_append_helper_log(f"[mpv] (previous line repeated {last_mpv_count}x)")
last_mpv_line = None
last_mpv_count = 0
2025-12-17 17:42:46 -08:00
# Observe request property changes.
try:
client.send_command_no_wait(
["observe_property",
OBS_ID_REQUEST,
REQUEST_PROP,
"string"]
)
2025-12-17 17:42:46 -08:00
except Exception:
return 3
2025-12-27 03:13:16 -08:00
# Mark ready only after the observer is installed to avoid races where Lua
# sends a request before we can receive property-change notifications.
try:
_touch_ready()
_append_helper_log(f"[helper] ready heartbeat armed prop={READY_PROP}")
except Exception:
pass
# Pre-compute store choices at startup and publish to a cached property so Lua
# can read immediately without waiting for a request/response cycle (which may timeout).
try:
startup_choices_payload = _run_op("store-choices", None)
2025-12-29 17:05:03 -08:00
startup_choices = (
startup_choices_payload.get("choices")
if isinstance(startup_choices_payload,
dict) else None
2025-12-29 17:05:03 -08:00
)
2025-12-27 03:13:16 -08:00
if isinstance(startup_choices, list):
preview = ", ".join(str(x) for x in startup_choices[:50])
2025-12-29 17:05:03 -08:00
_append_helper_log(
f"[helper] startup store-choices count={len(startup_choices)} items={preview}"
)
2025-12-27 03:13:16 -08:00
# Publish to a cached property for Lua to read without IPC request.
try:
2025-12-29 17:05:03 -08:00
cached_json = json.dumps(
{
"success": True,
"choices": startup_choices
},
ensure_ascii=False
2025-12-29 17:05:03 -08:00
)
client.send_command_no_wait(
[
"set_property_string",
"user-data/medeia-store-choices-cached",
cached_json
]
2025-12-29 17:05:03 -08:00
)
_append_helper_log(
f"[helper] published store-choices to user-data/medeia-store-choices-cached"
)
2025-12-27 03:13:16 -08:00
except Exception as exc:
2025-12-29 17:05:03 -08:00
_append_helper_log(
f"[helper] failed to publish store-choices: {type(exc).__name__}: {exc}"
)
2025-12-27 03:13:16 -08:00
else:
_append_helper_log("[helper] startup store-choices unavailable")
except Exception as exc:
_append_helper_log(
f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}"
)
2025-12-29 17:05:03 -08:00
2025-12-27 03:13:16 -08:00
# Also publish config temp directory if available
try:
from SYS.config import load_config
2025-12-29 17:05:03 -08:00
2025-12-27 03:13:16 -08:00
cfg = load_config()
temp_dir = cfg.get("temp", "").strip() or os.getenv("TEMP") or "/tmp"
if temp_dir:
2025-12-29 17:05:03 -08:00
client.send_command_no_wait(
["set_property_string",
"user-data/medeia-config-temp",
temp_dir]
2025-12-29 17:05:03 -08:00
)
_append_helper_log(
f"[helper] published config temp to user-data/medeia-config-temp={temp_dir}"
)
2025-12-27 03:13:16 -08:00
except Exception as exc:
_append_helper_log(
f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}"
)
2025-12-27 03:13:16 -08:00
2025-12-17 17:42:46 -08:00
last_seen_id: Optional[str] = None
2025-12-18 22:50:21 -08:00
try:
_append_helper_log(f"[helper] connected to ipc={args.ipc}")
except Exception:
pass
2025-12-17 17:42:46 -08:00
while True:
2025-12-18 22:50:21 -08:00
msg = client.read_message(timeout=0.25)
2025-12-17 17:42:46 -08:00
if msg is None:
2025-12-18 22:50:21 -08:00
# Keep READY fresh even when idle (Lua may clear it on timeouts).
_touch_ready()
time.sleep(0.02)
2025-12-17 17:42:46 -08:00
continue
if msg.get("event") == "__eof__":
2025-12-18 22:50:21 -08:00
try:
_flush_mpv_repeat()
except Exception:
pass
2025-12-17 17:42:46 -08:00
return 0
2025-12-18 22:50:21 -08:00
if msg.get("event") == "log-message":
try:
level = str(msg.get("level") or "")
prefix = str(msg.get("prefix") or "")
text = str(msg.get("text") or "").rstrip()
if not text:
continue
# Filter excessive noise unless debug is enabled.
if not debug_enabled:
lower_prefix = prefix.lower()
if "quic" in lower_prefix and "DEBUG:" in text:
continue
# Suppress progress-bar style lines (keep true errors).
if ("ETA" in text or "%" in text) and ("ERROR:" not in text
and "WARNING:" not in text):
2025-12-18 22:50:21 -08:00
# Typical yt-dlp progress bar line.
if text.lstrip().startswith("["):
continue
line = f"[mpv {level}] {prefix} {text}".strip()
now = time.time()
if last_mpv_line == line and (now - last_mpv_ts) < 2.0:
last_mpv_count += 1
last_mpv_ts = now
continue
_flush_mpv_repeat()
last_mpv_line = line
last_mpv_count = 1
last_mpv_ts = now
_append_helper_log(line)
except Exception:
pass
continue
2025-12-17 17:42:46 -08:00
if msg.get("event") != "property-change":
continue
if msg.get("id") != OBS_ID_REQUEST:
continue
2025-12-19 02:29:42 -08:00
raw = msg.get("data")
req = _parse_request(raw)
2025-12-17 17:42:46 -08:00
if not req:
2025-12-19 02:29:42 -08:00
try:
if isinstance(raw, str) and raw.strip():
snippet = raw.strip().replace("\r", "").replace("\n", " ")
if len(snippet) > 220:
snippet = snippet[:220] + ""
_append_helper_log(
f"[request-raw] could not parse request json: {snippet}"
)
2025-12-19 02:29:42 -08:00
except Exception:
pass
2025-12-17 17:42:46 -08:00
continue
req_id = str(req.get("id") or "")
2025-12-18 22:50:21 -08:00
op = str(req.get("op") or "").strip()
data = req.get("data")
2025-12-17 17:42:46 -08:00
pipeline_text = str(req.get("pipeline") or "").strip()
seeds = req.get("seeds")
2025-12-18 22:50:21 -08:00
if not req_id:
2025-12-17 17:42:46 -08:00
continue
if last_seen_id == req_id:
continue
last_seen_id = req_id
try:
label = pipeline_text if pipeline_text else (
op and ("op=" + op) or "(empty)"
)
2025-12-18 22:50:21 -08:00
_append_helper_log(f"\n[request {req_id}] {label}")
except Exception:
pass
try:
if op:
run = _run_op(op, data)
else:
if not pipeline_text:
continue
run = _run_pipeline(pipeline_text, seeds=seeds)
2025-12-17 17:42:46 -08:00
resp = {
"id": req_id,
"success": bool(run.get("success")),
"stdout": run.get("stdout",
""),
"stderr": run.get("stderr",
""),
2025-12-17 17:42:46 -08:00
"error": run.get("error"),
2025-12-18 22:50:21 -08:00
"table": run.get("table"),
2025-12-17 17:42:46 -08:00
}
2025-12-18 22:50:21 -08:00
if "choices" in run:
resp["choices"] = run.get("choices")
2025-12-17 17:42:46 -08:00
except Exception as exc:
resp = {
"id": req_id,
"success": False,
"stdout": "",
"stderr": "",
"error": f"{type(exc).__name__}: {exc}",
2025-12-18 22:50:21 -08:00
"table": None,
2025-12-17 17:42:46 -08:00
}
2025-12-18 22:50:21 -08:00
# Persist helper output for debugging MPV menu interactions.
try:
if resp.get("stdout"):
_append_helper_log("[stdout]\n" + str(resp.get("stdout")))
if resp.get("stderr"):
_append_helper_log("[stderr]\n" + str(resp.get("stderr")))
if resp.get("error"):
_append_helper_log("[error]\n" + str(resp.get("error")))
except Exception:
pass
2025-12-17 17:42:46 -08:00
if not resp.get("success"):
details = ""
if resp.get("error"):
details += str(resp.get("error"))
if resp.get("stderr"):
details = (details + "\n" if details else "") + str(resp.get("stderr"))
log_path = _write_error_log(details, req_id=req_id)
if log_path:
resp["log_path"] = log_path
try:
2025-12-18 22:50:21 -08:00
# IMPORTANT: don't wait for a response here; waiting would consume
# async events and can drop/skip property-change notifications.
2025-12-29 17:05:03 -08:00
client.send_command_no_wait(
[
"set_property_string",
RESPONSE_PROP,
json.dumps(resp,
ensure_ascii=False)
]
2025-12-29 17:05:03 -08:00
)
2025-12-17 17:42:46 -08:00
except Exception:
# If posting results fails, there's nothing more useful to do.
pass
if __name__ == "__main__":
raise SystemExit(main())