df
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
This commit is contained in:
77
MPV/lyric.py
77
MPV/lyric.py
@@ -150,7 +150,9 @@ def _osd_overlay_set_ass(client: MPVIPCClient, ass_text: str) -> Optional[dict]:
|
||||
|
||||
|
||||
def _osd_overlay_clear(client: MPVIPCClient) -> None:
|
||||
client.send_command({"command": {"name": "osd-overlay", "id": _LYRIC_OSD_OVERLAY_ID, "format": "none"}})
|
||||
client.send_command(
|
||||
{"command": {"name": "osd-overlay", "id": _LYRIC_OSD_OVERLAY_ID, "format": "none"}}
|
||||
)
|
||||
|
||||
|
||||
def _log(msg: str) -> None:
|
||||
@@ -181,6 +183,8 @@ def _ipc_get_property(
|
||||
if resp and resp.get("error") == "success":
|
||||
return resp.get("data", default)
|
||||
return default
|
||||
|
||||
|
||||
def _http_get_json(url: str, *, timeout_s: float = 10.0) -> Optional[dict]:
|
||||
try:
|
||||
req = Request(
|
||||
@@ -262,7 +266,9 @@ def _wrap_plain_lyrics_as_lrc(text: str) -> str:
|
||||
return "\n".join(out) + "\n"
|
||||
|
||||
|
||||
def _fetch_lrclib(*, artist: Optional[str], title: Optional[str], duration_s: Optional[float] = None) -> Optional[str]:
|
||||
def _fetch_lrclib(
|
||||
*, artist: Optional[str], title: Optional[str], duration_s: Optional[float] = None
|
||||
) -> Optional[str]:
|
||||
base = "https://lrclib.net/api"
|
||||
|
||||
# Require both artist and title; title-only lookups cause frequent mismatches.
|
||||
@@ -506,7 +512,9 @@ def _write_temp_sub_file(*, key: str, text: str) -> Path:
|
||||
tmp_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
ext = _infer_sub_extension(text)
|
||||
digest = hashlib.sha1((key + "\n" + (text or "")).encode("utf-8", errors="ignore")).hexdigest()[:16]
|
||||
digest = hashlib.sha1((key + "\n" + (text or "")).encode("utf-8", errors="ignore")).hexdigest()[
|
||||
:16
|
||||
]
|
||||
safe_key = hashlib.sha1((key or "").encode("utf-8", errors="ignore")).hexdigest()[:12]
|
||||
path = (tmp_dir / f"sub-{safe_key}-{digest}{ext}").resolve()
|
||||
path.write_text(text or "", encoding="utf-8", errors="replace")
|
||||
@@ -747,7 +755,9 @@ def _infer_store_for_target(*, target: str, config: dict) -> Optional[str]:
|
||||
continue
|
||||
root = None
|
||||
try:
|
||||
root = getattr(backend, "_location", None) or getattr(backend, "location", lambda: None)()
|
||||
root = (
|
||||
getattr(backend, "_location", None) or getattr(backend, "location", lambda: None)()
|
||||
)
|
||||
except Exception:
|
||||
root = None
|
||||
if not root:
|
||||
@@ -816,7 +826,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
while True:
|
||||
try:
|
||||
# Toggle support (mpv Lua script sets this property; default to visible).
|
||||
visible_raw = _ipc_get_property(client, _LYRIC_VISIBLE_PROP, True, raise_on_disconnect=True)
|
||||
visible_raw = _ipc_get_property(
|
||||
client, _LYRIC_VISIBLE_PROP, True, raise_on_disconnect=True
|
||||
)
|
||||
raw_path = _ipc_get_property(client, "path", None, raise_on_disconnect=True)
|
||||
except ConnectionError:
|
||||
try:
|
||||
@@ -915,7 +927,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
if is_http:
|
||||
# HTTP/HTTPS targets are only valid if they map to a store backend.
|
||||
store_from_url = _extract_store_from_url_target(target)
|
||||
store_name = store_from_url or _infer_hydrus_store_from_url_target(target=target, config=cfg)
|
||||
store_name = store_from_url or _infer_hydrus_store_from_url_target(
|
||||
target=target, config=cfg
|
||||
)
|
||||
if not store_name:
|
||||
_log("HTTP target has no store mapping; lyrics disabled")
|
||||
current_store_name = None
|
||||
@@ -962,7 +976,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
except Exception:
|
||||
meta = None
|
||||
if meta is None:
|
||||
_log(f"HTTP target not found in store DB (store={store_name!r} hash={current_file_hash}); lyrics disabled")
|
||||
_log(
|
||||
f"HTTP target not found in store DB (store={store_name!r} hash={current_file_hash}); lyrics disabled"
|
||||
)
|
||||
current_store_name = None
|
||||
current_backend = None
|
||||
current_key = None
|
||||
@@ -988,9 +1004,15 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
file_hash=current_file_hash,
|
||||
config=cfg,
|
||||
)
|
||||
current_key = f"{current_store_name}:{current_file_hash}" if current_store_name and current_file_hash else None
|
||||
current_key = (
|
||||
f"{current_store_name}:{current_file_hash}"
|
||||
if current_store_name and current_file_hash
|
||||
else None
|
||||
)
|
||||
|
||||
_log(f"Resolved store={current_store_name!r} hash={current_file_hash!r} valid={bool(current_key)}")
|
||||
_log(
|
||||
f"Resolved store={current_store_name!r} hash={current_file_hash!r} valid={bool(current_key)}"
|
||||
)
|
||||
|
||||
if not current_key or not current_backend:
|
||||
current_store_name = None
|
||||
@@ -1010,7 +1032,13 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
|
||||
# Load/reload lyrics when we have a resolvable key and it differs from what we loaded.
|
||||
# This is important for the autofetch path: the note can appear without the mpv target changing.
|
||||
if current_key and current_key != last_loaded_key and current_store_name and current_file_hash and current_backend:
|
||||
if (
|
||||
current_key
|
||||
and current_key != last_loaded_key
|
||||
and current_store_name
|
||||
and current_file_hash
|
||||
and current_backend
|
||||
):
|
||||
notes: Dict[str, str] = {}
|
||||
try:
|
||||
notes = current_backend.get_note(current_file_hash, config=cfg) or {}
|
||||
@@ -1018,7 +1046,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
notes = {}
|
||||
|
||||
try:
|
||||
_log(f"Loaded notes keys: {sorted([str(k) for k in notes.keys()]) if isinstance(notes, dict) else 'N/A'}")
|
||||
_log(
|
||||
f"Loaded notes keys: {sorted([str(k) for k in notes.keys()]) if isinstance(notes, dict) else 'N/A'}"
|
||||
)
|
||||
except Exception:
|
||||
_log("Loaded notes keys: <error>")
|
||||
|
||||
@@ -1062,7 +1092,11 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
# Throttle attempts per key to avoid hammering APIs.
|
||||
autofetch_enabled = bool(cfg.get("lyric_autofetch", True))
|
||||
now = time.time()
|
||||
if autofetch_enabled and current_key != last_fetch_attempt_key and (now - last_fetch_attempt_at) > 2.0:
|
||||
if (
|
||||
autofetch_enabled
|
||||
and current_key != last_fetch_attempt_key
|
||||
and (now - last_fetch_attempt_at) > 2.0
|
||||
):
|
||||
last_fetch_attempt_key = current_key
|
||||
last_fetch_attempt_at = now
|
||||
|
||||
@@ -1082,7 +1116,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_log(f"Autofetch query artist={artist!r} title={title!r} duration={duration_s!r}")
|
||||
_log(
|
||||
f"Autofetch query artist={artist!r} title={title!r} duration={duration_s!r}"
|
||||
)
|
||||
|
||||
if not artist or not title:
|
||||
_log("Autofetch skipped: requires both artist and title")
|
||||
@@ -1091,13 +1127,19 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
|
||||
fetched = _fetch_lrclib(
|
||||
artist=artist,
|
||||
title=title,
|
||||
duration_s=float(duration_s) if isinstance(duration_s, (int, float)) else None,
|
||||
duration_s=(
|
||||
float(duration_s) if isinstance(duration_s, (int, float)) else None
|
||||
),
|
||||
)
|
||||
if not fetched or not fetched.strip():
|
||||
fetched = _fetch_lyrics_ovh(artist=artist, title=title)
|
||||
if fetched and fetched.strip():
|
||||
try:
|
||||
ok = bool(current_backend.set_note(current_file_hash, "lyric", fetched, config=cfg))
|
||||
ok = bool(
|
||||
current_backend.set_note(
|
||||
current_file_hash, "lyric", fetched, config=cfg
|
||||
)
|
||||
)
|
||||
_log(f"Autofetch stored lyric note ok={ok}")
|
||||
# Next loop iteration will re-load the note.
|
||||
except Exception as exc:
|
||||
@@ -1187,7 +1229,9 @@ def run_overlay(*, mpv: MPV, entries: List[LrcLine], poll_s: float = 0.15) -> in
|
||||
|
||||
client = mpv.client()
|
||||
if not client.connect():
|
||||
print("mpv IPC is not reachable (is mpv running with --input-ipc-server?).", file=sys.stderr)
|
||||
print(
|
||||
"mpv IPC is not reachable (is mpv running with --input-ipc-server?).", file=sys.stderr
|
||||
)
|
||||
return 3
|
||||
|
||||
while True:
|
||||
@@ -1240,7 +1284,6 @@ def run_overlay(*, mpv: MPV, entries: List[LrcLine], poll_s: float = 0.15) -> in
|
||||
time.sleep(poll_s)
|
||||
|
||||
|
||||
|
||||
def main(argv: Optional[List[str]] = None) -> int:
|
||||
parser = argparse.ArgumentParser(prog="python -m MPV.lyric", add_help=True)
|
||||
parser.add_argument(
|
||||
|
||||
135
MPV/mpv_ipc.py
135
MPV/mpv_ipc.py
@@ -133,9 +133,7 @@ def _windows_list_lyric_helper_pids(ipc_path: str) -> List[int]:
|
||||
# Use CIM to query command lines; output as JSON for robust parsing.
|
||||
# Note: `ConvertTo-Json` returns a number for single item, array for many, or null.
|
||||
ps_script = (
|
||||
"$ipc = "
|
||||
+ json.dumps(ipc_path)
|
||||
+ "; "
|
||||
"$ipc = " + json.dumps(ipc_path) + "; "
|
||||
"Get-CimInstance Win32_Process | "
|
||||
"Where-Object { $_.CommandLine -and $_.CommandLine -match ' -m\\s+MPV\\.lyric(\\s|$)' -and $_.CommandLine -match ('--ipc\\s+' + [regex]::Escape($ipc)) } | "
|
||||
"Select-Object -ExpandProperty ProcessId | ConvertTo-Json -Compress"
|
||||
@@ -201,6 +199,7 @@ def _windows_kill_pids(pids: List[int]) -> None:
|
||||
|
||||
class MPVIPCError(Exception):
|
||||
"""Raised when MPV IPC communication fails."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -248,7 +247,9 @@ class MPV:
|
||||
finally:
|
||||
client.disconnect()
|
||||
|
||||
def send(self, command: Dict[str, Any] | List[Any], silent: bool = False) -> Optional[Dict[str, Any]]:
|
||||
def send(
|
||||
self, command: Dict[str, Any] | List[Any], silent: bool = False
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
client = self.client(silent=bool(silent))
|
||||
try:
|
||||
if not client.connect():
|
||||
@@ -308,7 +309,7 @@ class MPV:
|
||||
pass
|
||||
|
||||
def _q(s: str) -> str:
|
||||
return '"' + s.replace('\\', '\\\\').replace('"', '\\"') + '"'
|
||||
return '"' + s.replace("\\", "\\\\").replace('"', '\\"') + '"'
|
||||
|
||||
pipeline = f"download-media -url {_q(url)} -format {_q(fmt)}"
|
||||
if store:
|
||||
@@ -329,10 +330,18 @@ class MPV:
|
||||
"pipeline": pipeline,
|
||||
}
|
||||
except Exception as exc:
|
||||
return {"success": False, "stdout": "", "stderr": "", "error": f"{type(exc).__name__}: {exc}", "pipeline": pipeline}
|
||||
return {
|
||||
"success": False,
|
||||
"stdout": "",
|
||||
"stderr": "",
|
||||
"error": f"{type(exc).__name__}: {exc}",
|
||||
"pipeline": pipeline,
|
||||
}
|
||||
|
||||
def get_playlist(self, silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||
resp = self.send({"command": ["get_property", "playlist"], "request_id": 100}, silent=silent)
|
||||
resp = self.send(
|
||||
{"command": ["get_property", "playlist"], "request_id": 100}, silent=silent
|
||||
)
|
||||
if resp is None:
|
||||
return None
|
||||
if resp.get("error") == "success":
|
||||
@@ -467,7 +476,11 @@ class MPV:
|
||||
env["PYTHONUNBUFFERED"] = "1"
|
||||
try:
|
||||
existing_pp = env.get("PYTHONPATH")
|
||||
env["PYTHONPATH"] = str(repo_root) if not existing_pp else (str(repo_root) + os.pathsep + str(existing_pp))
|
||||
env["PYTHONPATH"] = (
|
||||
str(repo_root)
|
||||
if not existing_pp
|
||||
else (str(repo_root) + os.pathsep + str(existing_pp))
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
kwargs["env"] = env
|
||||
@@ -486,7 +499,13 @@ class MPV:
|
||||
except Exception:
|
||||
flags |= 0x08000000
|
||||
kwargs["creationflags"] = flags
|
||||
kwargs.update({k: v for k, v in _windows_hidden_subprocess_kwargs().items() if k != "creationflags"})
|
||||
kwargs.update(
|
||||
{
|
||||
k: v
|
||||
for k, v in _windows_hidden_subprocess_kwargs().items()
|
||||
if k != "creationflags"
|
||||
}
|
||||
)
|
||||
|
||||
_LYRIC_PROCESS = subprocess.Popen(cmd, **kwargs)
|
||||
debug(f"Lyric loader started (log={log_path})")
|
||||
@@ -608,10 +627,22 @@ class MPV:
|
||||
flags |= 0x08000000
|
||||
kwargs["creationflags"] = flags
|
||||
# startupinfo is harmless for GUI apps; helps hide flashes for console-subsystem builds.
|
||||
kwargs.update({k: v for k, v in _windows_hidden_subprocess_kwargs().items() if k != "creationflags"})
|
||||
kwargs.update(
|
||||
{
|
||||
k: v
|
||||
for k, v in _windows_hidden_subprocess_kwargs().items()
|
||||
if k != "creationflags"
|
||||
}
|
||||
)
|
||||
|
||||
debug("Starting MPV")
|
||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
||||
subprocess.Popen(
|
||||
cmd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# Start the persistent pipeline helper eagerly so MPV Lua can issue
|
||||
# non-blocking requests (e.g., format list prefetch) without needing
|
||||
@@ -634,7 +665,11 @@ class MPV:
|
||||
helper_env = os.environ.copy()
|
||||
try:
|
||||
existing_pp = helper_env.get("PYTHONPATH")
|
||||
helper_env["PYTHONPATH"] = str(repo_root) if not existing_pp else (str(repo_root) + os.pathsep + str(existing_pp))
|
||||
helper_env["PYTHONPATH"] = (
|
||||
str(repo_root)
|
||||
if not existing_pp
|
||||
else (str(repo_root) + os.pathsep + str(existing_pp))
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -650,7 +685,13 @@ class MPV:
|
||||
except Exception:
|
||||
flags |= 0x08000000
|
||||
helper_kwargs["creationflags"] = flags
|
||||
helper_kwargs.update({k: v for k, v in _windows_hidden_subprocess_kwargs().items() if k != "creationflags"})
|
||||
helper_kwargs.update(
|
||||
{
|
||||
k: v
|
||||
for k, v in _windows_hidden_subprocess_kwargs().items()
|
||||
if k != "creationflags"
|
||||
}
|
||||
)
|
||||
|
||||
helper_kwargs["cwd"] = str(repo_root)
|
||||
helper_kwargs["env"] = helper_env
|
||||
@@ -668,10 +709,10 @@ class MPV:
|
||||
|
||||
def get_ipc_pipe_path() -> str:
|
||||
"""Get the fixed IPC pipe/socket path for persistent MPV connection.
|
||||
|
||||
|
||||
Uses a fixed name so all playback sessions connect to the same MPV
|
||||
window/process instead of creating new instances.
|
||||
|
||||
|
||||
Returns:
|
||||
Path to IPC pipe (Windows) or socket (Linux/macOS)
|
||||
"""
|
||||
@@ -680,7 +721,7 @@ def get_ipc_pipe_path() -> str:
|
||||
return str(override)
|
||||
|
||||
system = platform.system()
|
||||
|
||||
|
||||
if system == "Windows":
|
||||
return f"\\\\.\\pipe\\{FIXED_IPC_PIPE_NAME}"
|
||||
elif system == "Darwin": # macOS
|
||||
@@ -695,7 +736,7 @@ def _unwrap_memory_target(text: Optional[str]) -> Optional[str]:
|
||||
return text
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#') or line.startswith('memory://'):
|
||||
if not line or line.startswith("#") or line.startswith("memory://"):
|
||||
continue
|
||||
return line
|
||||
return text
|
||||
@@ -703,14 +744,16 @@ def _unwrap_memory_target(text: Optional[str]) -> Optional[str]:
|
||||
|
||||
class MPVIPCClient:
|
||||
"""Client for communicating with mpv via IPC socket/pipe.
|
||||
|
||||
|
||||
This is the unified interface for all Python code to communicate with mpv.
|
||||
It handles platform-specific differences (Windows named pipes vs Unix sockets).
|
||||
"""
|
||||
|
||||
def __init__(self, socket_path: Optional[str] = None, timeout: float = 5.0, silent: bool = False):
|
||||
|
||||
def __init__(
|
||||
self, socket_path: Optional[str] = None, timeout: float = 5.0, silent: bool = False
|
||||
):
|
||||
"""Initialize MPV IPC client.
|
||||
|
||||
|
||||
Args:
|
||||
socket_path: Path to IPC socket/pipe. If None, uses the fixed persistent path.
|
||||
timeout: Socket timeout in seconds.
|
||||
@@ -826,10 +869,10 @@ class MPVIPCClient:
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def connect(self) -> bool:
|
||||
"""Connect to mpv IPC socket.
|
||||
|
||||
|
||||
Returns:
|
||||
True if connection successful, False otherwise.
|
||||
"""
|
||||
@@ -838,7 +881,7 @@ class MPVIPCClient:
|
||||
# Windows named pipes
|
||||
try:
|
||||
# Try to open the named pipe
|
||||
self.sock = open(self.socket_path, 'r+b', buffering=0)
|
||||
self.sock = open(self.socket_path, "r+b", buffering=0)
|
||||
return True
|
||||
except (OSError, IOError) as exc:
|
||||
if not self.silent:
|
||||
@@ -866,20 +909,20 @@ class MPVIPCClient:
|
||||
debug(f"Failed to connect to MPV IPC: {exc}")
|
||||
self.sock = None
|
||||
return False
|
||||
|
||||
|
||||
def send_command(self, command_data: Dict[str, Any] | List[Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Send a command to mpv and get response.
|
||||
|
||||
|
||||
Args:
|
||||
command_data: Command dict (e.g. {"command": [...]}) or list (e.g. ["loadfile", ...])
|
||||
|
||||
|
||||
Returns:
|
||||
Response dict with 'error' key (value 'success' on success), or None on error.
|
||||
"""
|
||||
if not self.sock:
|
||||
if not self.connect():
|
||||
return None
|
||||
|
||||
|
||||
try:
|
||||
# Format command as JSON (mpv IPC protocol)
|
||||
request: Dict[str, Any]
|
||||
@@ -887,20 +930,21 @@ class MPVIPCClient:
|
||||
request = {"command": command_data}
|
||||
else:
|
||||
request = command_data
|
||||
|
||||
|
||||
# Add request_id if not present to match response
|
||||
if "request_id" not in request:
|
||||
request["request_id"] = int(_time.time() * 1000) % 100000
|
||||
|
||||
|
||||
payload = json.dumps(request) + "\n"
|
||||
|
||||
|
||||
# Debug: log the command being sent
|
||||
from SYS.logger import debug as _debug
|
||||
|
||||
_debug(f"[IPC] Sending: {payload.strip()}")
|
||||
|
||||
|
||||
# Send command
|
||||
self._write_payload(payload)
|
||||
|
||||
|
||||
# Receive response
|
||||
# We need to read lines until we find the one with matching request_id
|
||||
# or until timeout/error. MPV might send events in between.
|
||||
@@ -909,20 +953,22 @@ class MPVIPCClient:
|
||||
response_data = self._readline(timeout=self.timeout)
|
||||
if response_data is None:
|
||||
return None
|
||||
|
||||
|
||||
if not response_data:
|
||||
break
|
||||
|
||||
|
||||
try:
|
||||
lines = response_data.decode('utf-8', errors='replace').strip().split('\n')
|
||||
lines = response_data.decode("utf-8", errors="replace").strip().split("\n")
|
||||
for line in lines:
|
||||
if not line: continue
|
||||
if not line:
|
||||
continue
|
||||
resp = json.loads(line)
|
||||
|
||||
|
||||
# Debug: log responses
|
||||
from SYS.logger import debug as _debug
|
||||
|
||||
_debug(f"[IPC] Received: {line}")
|
||||
|
||||
|
||||
# Check if this is the response to our request
|
||||
if resp.get("request_id") == request.get("request_id"):
|
||||
return resp
|
||||
@@ -940,13 +986,13 @@ class MPVIPCClient:
|
||||
debug(f"[MPV error] {resp}")
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
|
||||
return None
|
||||
except Exception as exc:
|
||||
debug(f"Error sending command to MPV: {exc}")
|
||||
self.disconnect()
|
||||
return None
|
||||
|
||||
|
||||
def disconnect(self) -> None:
|
||||
"""Disconnect from mpv IPC socket."""
|
||||
if self.sock:
|
||||
@@ -955,17 +1001,16 @@ class MPVIPCClient:
|
||||
except Exception:
|
||||
pass
|
||||
self.sock = None
|
||||
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""Cleanup on object destruction."""
|
||||
self.disconnect()
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
self.connect()
|
||||
return self
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
self.disconnect()
|
||||
|
||||
|
||||
@@ -24,20 +24,19 @@ def setup_logging(log_file: Optional[Path] = None) -> logging.Logger:
|
||||
"""Setup logging for MPV API calls."""
|
||||
logger = logging.getLogger("mpv-lua-api")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
if not logger.handlers:
|
||||
if log_file:
|
||||
handler = logging.FileHandler(str(log_file), encoding="utf-8")
|
||||
else:
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
|
||||
|
||||
formatter = logging.Formatter(
|
||||
"[%(asctime)s][%(levelname)s] %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S"
|
||||
"[%(asctime)s][%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
@@ -54,19 +53,19 @@ def execute_pipeline(
|
||||
dry_run: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute a pipeline command and return result as JSON.
|
||||
|
||||
|
||||
Args:
|
||||
pipeline_cmd: Pipeline command string (e.g. "trim-file -path ... | add-file -store ...")
|
||||
log_file: Optional path to helper log file for logging
|
||||
dry_run: If True, log but don't execute
|
||||
|
||||
|
||||
Returns:
|
||||
JSON object with keys: success, stdout, stderr, error, returncode
|
||||
"""
|
||||
try:
|
||||
if log_file:
|
||||
log_to_helper(f"[api] execute_pipeline cmd={pipeline_cmd}", log_file)
|
||||
|
||||
|
||||
if dry_run:
|
||||
return {
|
||||
"success": True,
|
||||
@@ -76,30 +75,30 @@ def execute_pipeline(
|
||||
"returncode": 0,
|
||||
"cmd": pipeline_cmd,
|
||||
}
|
||||
|
||||
|
||||
# Call the CLI directly as subprocess
|
||||
import subprocess
|
||||
import shlex
|
||||
|
||||
|
||||
# Parse the pipeline command into separate arguments
|
||||
cmd_args = shlex.split(pipeline_cmd)
|
||||
|
||||
|
||||
result = subprocess.run(
|
||||
[sys.executable, "-m", "CLI"] + cmd_args,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=str(_ROOT_DIR),
|
||||
env={**dict(__import__('os').environ), "MEDEIA_MPV_CALLER": "lua"},
|
||||
env={**dict(__import__("os").environ), "MEDEIA_MPV_CALLER": "lua"},
|
||||
)
|
||||
|
||||
|
||||
if log_file:
|
||||
log_to_helper(
|
||||
f"[api] result returncode={result.returncode} len_stdout={len(result.stdout or '')} len_stderr={len(result.stderr or '')}",
|
||||
log_file
|
||||
log_file,
|
||||
)
|
||||
if result.stderr:
|
||||
log_to_helper(f"[api] stderr: {result.stderr[:500]}", log_file)
|
||||
|
||||
|
||||
return {
|
||||
"success": result.returncode == 0,
|
||||
"stdout": result.stdout or "",
|
||||
@@ -108,12 +107,12 @@ def execute_pipeline(
|
||||
"returncode": result.returncode,
|
||||
"cmd": pipeline_cmd,
|
||||
}
|
||||
|
||||
|
||||
except Exception as exc:
|
||||
msg = f"{type(exc).__name__}: {exc}"
|
||||
if log_file:
|
||||
log_to_helper(f"[api] exception {msg}", log_file)
|
||||
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"stdout": "",
|
||||
@@ -126,48 +125,52 @@ def execute_pipeline(
|
||||
|
||||
def handle_api_request(request_json: str, log_file: Optional[Path] = None) -> str:
|
||||
"""Handle an API request from Lua and return JSON response.
|
||||
|
||||
|
||||
Request format:
|
||||
{
|
||||
"cmd": "execute_pipeline",
|
||||
"pipeline": "trim-file -path ... | add-file -store ...",
|
||||
...
|
||||
}
|
||||
|
||||
|
||||
Response format: JSON with result of the operation.
|
||||
"""
|
||||
try:
|
||||
request = json.loads(request_json)
|
||||
cmd = request.get("cmd")
|
||||
|
||||
|
||||
if cmd == "execute_pipeline":
|
||||
pipeline_cmd = request.get("pipeline", "")
|
||||
result = execute_pipeline(pipeline_cmd, log_file)
|
||||
return json.dumps(result)
|
||||
|
||||
|
||||
else:
|
||||
return json.dumps({
|
||||
"success": False,
|
||||
"error": f"Unknown command: {cmd}",
|
||||
})
|
||||
|
||||
return json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Unknown command: {cmd}",
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
return json.dumps({
|
||||
"success": False,
|
||||
"error": f"{type(exc).__name__}: {exc}",
|
||||
})
|
||||
return json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"{type(exc).__name__}: {exc}",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# When called from Lua via subprocess:
|
||||
# python mpv_lua_api.py <json-request>
|
||||
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print(json.dumps({"success": False, "error": "No request provided"}))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
request_json = sys.argv[1]
|
||||
log_file = Path(sys.argv[2]) if len(sys.argv) > 2 else None
|
||||
|
||||
|
||||
response = handle_api_request(request_json, log_file)
|
||||
print(response)
|
||||
|
||||
@@ -17,6 +17,7 @@ Protocol (user-data properties):
|
||||
|
||||
This helper is intentionally minimal: one request at a time, last-write-wins.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
MEDEIA_MPV_HELPER_VERSION = "2025-12-19"
|
||||
@@ -72,8 +73,6 @@ READY_PROP = "user-data/medeia-pipeline-ready"
|
||||
|
||||
OBS_ID_REQUEST = 1001
|
||||
|
||||
|
||||
|
||||
|
||||
def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
|
||||
# Import after sys.path fix.
|
||||
@@ -181,7 +180,13 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
cmd = [py, str((_repo_root() / "CLI.py").resolve()), "pipeline", "--pipeline", pipeline_text]
|
||||
cmd = [
|
||||
py,
|
||||
str((_repo_root() / "CLI.py").resolve()),
|
||||
"pipeline",
|
||||
"--pipeline",
|
||||
pipeline_text,
|
||||
]
|
||||
if seeds is not None:
|
||||
try:
|
||||
cmd.extend(["--seeds-json", json.dumps(seeds, ensure_ascii=False)])
|
||||
@@ -265,7 +270,7 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
|
||||
name = None
|
||||
if isinstance(instance_cfg, dict):
|
||||
name = instance_cfg.get("NAME") or instance_cfg.get("name")
|
||||
candidate = (str(name or instance_key or "").strip())
|
||||
candidate = str(name or instance_key or "").strip()
|
||||
if candidate:
|
||||
seen.add(candidate)
|
||||
choices = sorted(seen)
|
||||
@@ -419,7 +424,11 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
|
||||
if dump and dump != "0" and isinstance(info, dict):
|
||||
h = hashlib.sha1(url.encode("utf-8", errors="replace")).hexdigest()[:10]
|
||||
out_path = _repo_root() / "Log" / f"ytdlp-probe-{h}.json"
|
||||
out_path.write_text(json.dumps(info, ensure_ascii=False, indent=2), encoding="utf-8", errors="replace")
|
||||
out_path.write_text(
|
||||
json.dumps(info, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8",
|
||||
errors="replace",
|
||||
)
|
||||
_append_helper_log(f"[ytdlp-formats] wrote probe json: {out_path}")
|
||||
except Exception:
|
||||
pass
|
||||
@@ -610,7 +619,7 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
if debug_enabled:
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='[%(name)s] %(levelname)s: %(message)s',
|
||||
format="[%(name)s] %(levelname)s: %(message)s",
|
||||
stream=sys.stderr,
|
||||
)
|
||||
for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"):
|
||||
@@ -628,13 +637,17 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
# Ensure single helper instance per ipc.
|
||||
_lock_fh = _acquire_ipc_lock(str(args.ipc))
|
||||
if _lock_fh is None:
|
||||
_append_helper_log(f"[helper] another instance already holds lock for ipc={args.ipc}; exiting")
|
||||
_append_helper_log(
|
||||
f"[helper] another instance already holds lock for ipc={args.ipc}; exiting"
|
||||
)
|
||||
return 0
|
||||
|
||||
try:
|
||||
_append_helper_log(f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}")
|
||||
try:
|
||||
_append_helper_log(f"[helper] file={Path(__file__).resolve()} cwd={Path.cwd().resolve()}")
|
||||
_append_helper_log(
|
||||
f"[helper] file={Path(__file__).resolve()} cwd={Path.cwd().resolve()}"
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
@@ -651,6 +664,7 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
# Route SYS.logger output into the helper log file so diagnostics are not
|
||||
# lost in mpv's console/terminal output.
|
||||
try:
|
||||
|
||||
class _HelperLogStream:
|
||||
def __init__(self) -> None:
|
||||
self._pending = ""
|
||||
@@ -726,7 +740,9 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
last_connect_error = f"{type(exc).__name__}: {exc}"
|
||||
|
||||
if time.time() > connect_deadline:
|
||||
_append_helper_log(f"[helper] failed to connect ipc={args.ipc} error={last_connect_error or 'timeout'}")
|
||||
_append_helper_log(
|
||||
f"[helper] failed to connect ipc={args.ipc} error={last_connect_error or 'timeout'}"
|
||||
)
|
||||
return 2
|
||||
|
||||
# Keep trying.
|
||||
@@ -789,31 +805,50 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
# can read immediately without waiting for a request/response cycle (which may timeout).
|
||||
try:
|
||||
startup_choices_payload = _run_op("store-choices", None)
|
||||
startup_choices = startup_choices_payload.get("choices") if isinstance(startup_choices_payload, dict) else None
|
||||
startup_choices = (
|
||||
startup_choices_payload.get("choices")
|
||||
if isinstance(startup_choices_payload, dict)
|
||||
else None
|
||||
)
|
||||
if isinstance(startup_choices, list):
|
||||
preview = ", ".join(str(x) for x in startup_choices[:50])
|
||||
_append_helper_log(f"[helper] startup store-choices count={len(startup_choices)} items={preview}")
|
||||
|
||||
_append_helper_log(
|
||||
f"[helper] startup store-choices count={len(startup_choices)} items={preview}"
|
||||
)
|
||||
|
||||
# Publish to a cached property for Lua to read without IPC request.
|
||||
try:
|
||||
cached_json = json.dumps({"success": True, "choices": startup_choices}, ensure_ascii=False)
|
||||
client.send_command_no_wait(["set_property_string", "user-data/medeia-store-choices-cached", cached_json])
|
||||
_append_helper_log(f"[helper] published store-choices to user-data/medeia-store-choices-cached")
|
||||
cached_json = json.dumps(
|
||||
{"success": True, "choices": startup_choices}, ensure_ascii=False
|
||||
)
|
||||
client.send_command_no_wait(
|
||||
["set_property_string", "user-data/medeia-store-choices-cached", cached_json]
|
||||
)
|
||||
_append_helper_log(
|
||||
f"[helper] published store-choices to user-data/medeia-store-choices-cached"
|
||||
)
|
||||
except Exception as exc:
|
||||
_append_helper_log(f"[helper] failed to publish store-choices: {type(exc).__name__}: {exc}")
|
||||
_append_helper_log(
|
||||
f"[helper] failed to publish store-choices: {type(exc).__name__}: {exc}"
|
||||
)
|
||||
else:
|
||||
_append_helper_log("[helper] startup store-choices unavailable")
|
||||
except Exception as exc:
|
||||
_append_helper_log(f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}")
|
||||
|
||||
|
||||
# Also publish config temp directory if available
|
||||
try:
|
||||
from config import load_config
|
||||
|
||||
cfg = load_config()
|
||||
temp_dir = cfg.get("temp", "").strip() or os.getenv("TEMP") or "/tmp"
|
||||
if temp_dir:
|
||||
client.send_command_no_wait(["set_property_string", "user-data/medeia-config-temp", temp_dir])
|
||||
_append_helper_log(f"[helper] published config temp to user-data/medeia-config-temp={temp_dir}")
|
||||
client.send_command_no_wait(
|
||||
["set_property_string", "user-data/medeia-config-temp", temp_dir]
|
||||
)
|
||||
_append_helper_log(
|
||||
f"[helper] published config temp to user-data/medeia-config-temp={temp_dir}"
|
||||
)
|
||||
except Exception as exc:
|
||||
_append_helper_log(f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}")
|
||||
|
||||
@@ -854,7 +889,9 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
if "quic" in lower_prefix and "DEBUG:" in text:
|
||||
continue
|
||||
# Suppress progress-bar style lines (keep true errors).
|
||||
if ("ETA" in text or "%" in text) and ("ERROR:" not in text and "WARNING:" not in text):
|
||||
if ("ETA" in text or "%" in text) and (
|
||||
"ERROR:" not in text and "WARNING:" not in text
|
||||
):
|
||||
# Typical yt-dlp progress bar line.
|
||||
if text.lstrip().startswith("["):
|
||||
continue
|
||||
@@ -966,7 +1003,9 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
try:
|
||||
# IMPORTANT: don't wait for a response here; waiting would consume
|
||||
# async events and can drop/skip property-change notifications.
|
||||
client.send_command_no_wait(["set_property_string", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)])
|
||||
client.send_command_no_wait(
|
||||
["set_property_string", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)]
|
||||
)
|
||||
except Exception:
|
||||
# If posting results fails, there's nothing more useful to do.
|
||||
pass
|
||||
|
||||
Reference in New Issue
Block a user