Add YAPF style + ignore, and format tracked Python files

This commit is contained in:
2025-12-29 18:42:02 -08:00
parent c019c00aed
commit 507946a3e4
108 changed files with 11664 additions and 6494 deletions

View File

@@ -41,7 +41,6 @@ from urllib.parse import urlparse
from MPV.mpv_ipc import MPV, MPVIPCClient
_TIMESTAMP_RE = re.compile(r"\[(?P<m>\d+):(?P<s>\d{2})(?:\.(?P<frac>\d{1,3}))?\]")
_OFFSET_RE = re.compile(r"^\[offset:(?P<ms>[+-]?\d+)\]$", re.IGNORECASE)
_HASH_RE = re.compile(r"[0-9a-f]{64}", re.IGNORECASE)
@@ -50,11 +49,9 @@ _HYDRUS_HASH_QS_RE = re.compile(r"hash=([0-9a-f]{64})", re.IGNORECASE)
_WIN_DRIVE_RE = re.compile(r"^[a-zA-Z]:[\\/]")
_WIN_UNC_RE = re.compile(r"^\\\\")
_LOG_FH: Optional[TextIO] = None
_SINGLE_INSTANCE_LOCK_FH: Optional[TextIO] = None
_LYRIC_VISIBLE_PROP = "user-data/medeia-lyric-visible"
# mpv osd-overlay IDs are scoped to the IPC client connection.
@@ -151,7 +148,13 @@ def _osd_overlay_set_ass(client: MPVIPCClient, ass_text: str) -> Optional[dict]:
def _osd_overlay_clear(client: MPVIPCClient) -> None:
client.send_command(
{"command": {"name": "osd-overlay", "id": _LYRIC_OSD_OVERLAY_ID, "format": "none"}}
{
"command": {
"name": "osd-overlay",
"id": _LYRIC_OSD_OVERLAY_ID,
"format": "none"
}
}
)
@@ -175,7 +178,10 @@ def _ipc_get_property(
*,
raise_on_disconnect: bool = False,
) -> object:
resp = client.send_command({"command": ["get_property", name]})
resp = client.send_command({
"command": ["get_property",
name]
})
if resp is None:
if raise_on_disconnect:
raise ConnectionError("Lost mpv IPC connection")
@@ -234,7 +240,10 @@ def _sanitize_query(s: Optional[str]) -> Optional[str]:
return t if t else None
def _infer_artist_title_from_tags(tags: List[str]) -> tuple[Optional[str], Optional[str]]:
def _infer_artist_title_from_tags(
tags: List[str]
) -> tuple[Optional[str],
Optional[str]]:
artist = None
title = None
for t in tags or []:
@@ -267,7 +276,10 @@ def _wrap_plain_lyrics_as_lrc(text: str) -> str:
def _fetch_lrclib(
*, artist: Optional[str], title: Optional[str], duration_s: Optional[float] = None
*,
artist: Optional[str],
title: Optional[str],
duration_s: Optional[float] = None
) -> Optional[str]:
base = "https://lrclib.net/api"
@@ -276,10 +288,11 @@ def _fetch_lrclib(
return None
# Try direct get.
q: Dict[str, str] = {
"artist_name": artist,
"track_name": title,
}
q: Dict[str,
str] = {
"artist_name": artist,
"track_name": title,
}
if isinstance(duration_s, (int, float)) and duration_s and duration_s > 0:
q["duration"] = str(int(duration_s))
url = f"{base}/get?{urlencode(q)}"
@@ -386,7 +399,7 @@ def parse_lrc(text: str) -> List[LrcLine]:
# Ignore non-timestamp metadata lines like [ar:], [ti:], etc.
continue
lyric_text = line[matches[-1].end() :].strip()
lyric_text = line[matches[-1].end():].strip()
for m in matches:
mm = int(m.group("m"))
ss = int(m.group("s"))
@@ -445,10 +458,11 @@ def _extract_hash_from_target(target: str) -> Optional[str]:
def _load_config_best_effort() -> dict:
try:
from config import load_config
from SYS.config import load_config
cfg = load_config()
return cfg if isinstance(cfg, dict) else {}
return cfg if isinstance(cfg,
dict) else {}
except Exception:
return {}
@@ -512,10 +526,11 @@ def _write_temp_sub_file(*, key: str, text: str) -> Path:
tmp_dir.mkdir(parents=True, exist_ok=True)
ext = _infer_sub_extension(text)
digest = hashlib.sha1((key + "\n" + (text or "")).encode("utf-8", errors="ignore")).hexdigest()[
:16
]
safe_key = hashlib.sha1((key or "").encode("utf-8", errors="ignore")).hexdigest()[:12]
digest = hashlib.sha1((key + "\n" + (text or "")).encode("utf-8",
errors="ignore")
).hexdigest()[:16]
safe_key = hashlib.sha1((key or "").encode("utf-8",
errors="ignore")).hexdigest()[:12]
path = (tmp_dir / f"sub-{safe_key}-{digest}{ext}").resolve()
path.write_text(text or "", encoding="utf-8", errors="replace")
return path
@@ -523,14 +538,23 @@ def _write_temp_sub_file(*, key: str, text: str) -> Path:
def _try_remove_selected_external_sub(client: MPVIPCClient) -> None:
try:
client.send_command({"command": ["sub-remove"]})
client.send_command({
"command": ["sub-remove"]
})
except Exception:
return
def _try_add_external_sub(client: MPVIPCClient, path: Path) -> None:
try:
client.send_command({"command": ["sub-add", str(path), "select", "medeia-sub"]})
client.send_command(
{
"command": ["sub-add",
str(path),
"select",
"medeia-sub"]
}
)
except Exception:
return
@@ -658,7 +682,8 @@ def _resolve_store_backend_for_target(
target: str,
file_hash: str,
config: dict,
) -> tuple[Optional[str], Any]:
) -> tuple[Optional[str],
Any]:
"""Resolve a store backend for a local mpv target using the store DB.
A target is considered valid only when:
@@ -756,7 +781,10 @@ def _infer_store_for_target(*, target: str, config: dict) -> Optional[str]:
root = None
try:
root = (
getattr(backend, "_location", None) or getattr(backend, "location", lambda: None)()
getattr(backend,
"_location",
None) or getattr(backend,
"location", lambda: None)()
)
except Exception:
root = None
@@ -795,7 +823,12 @@ def _infer_hash_for_target(target: str) -> Optional[str]:
return None
def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] = None) -> int:
def run_auto_overlay(
*,
mpv: MPV,
poll_s: float = 0.15,
config: Optional[dict] = None
) -> int:
"""Auto mode: track mpv's current file and render lyrics (note: 'lyric') or load subtitles (note: 'sub')."""
cfg = config or {}
@@ -827,7 +860,10 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
try:
# Toggle support (mpv Lua script sets this property; default to visible).
visible_raw = _ipc_get_property(
client, _LYRIC_VISIBLE_PROP, True, raise_on_disconnect=True
client,
_LYRIC_VISIBLE_PROP,
True,
raise_on_disconnect=True
)
raw_path = _ipc_get_property(client, "path", None, raise_on_disconnect=True)
except ConnectionError:
@@ -872,7 +908,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
else:
last_visible = visible
target = _unwrap_memory_m3u(str(raw_path)) if isinstance(raw_path, str) else None
target = _unwrap_memory_m3u(str(raw_path)
) if isinstance(raw_path,
str) else None
if isinstance(target, str):
target = _normalize_file_uri_target(target)
@@ -928,7 +966,8 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
# HTTP/HTTPS targets are only valid if they map to a store backend.
store_from_url = _extract_store_from_url_target(target)
store_name = store_from_url or _infer_hydrus_store_from_url_target(
target=target, config=cfg
target=target,
config=cfg
)
if not store_name:
_log("HTTP target has no store mapping; lyrics disabled")
@@ -954,7 +993,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
current_backend = reg[store_name]
current_store_name = store_name
except Exception:
_log(f"HTTP target store {store_name!r} not available; lyrics disabled")
_log(
f"HTTP target store {store_name!r} not available; lyrics disabled"
)
current_store_name = None
current_backend = None
current_key = None
@@ -995,7 +1036,9 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
continue
current_key = f"{current_store_name}:{current_file_hash}"
_log(f"Resolved store={current_store_name!r} hash={current_file_hash!r} valid=True")
_log(
f"Resolved store={current_store_name!r} hash={current_file_hash!r} valid=True"
)
else:
# Local files: resolve store item via store DB. If not resolvable, lyrics are disabled.
@@ -1006,8 +1049,7 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
)
current_key = (
f"{current_store_name}:{current_file_hash}"
if current_store_name and current_file_hash
else None
if current_store_name and current_file_hash else None
)
_log(
@@ -1032,16 +1074,15 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
# Load/reload lyrics when we have a resolvable key and it differs from what we loaded.
# This is important for the autofetch path: the note can appear without the mpv target changing.
if (
current_key
and current_key != last_loaded_key
and current_store_name
and current_file_hash
and current_backend
):
notes: Dict[str, str] = {}
if (current_key and current_key != last_loaded_key and current_store_name
and current_file_hash and current_backend):
notes: Dict[str,
str] = {}
try:
notes = current_backend.get_note(current_file_hash, config=cfg) or {}
notes = current_backend.get_note(
current_file_hash,
config=cfg
) or {}
except Exception:
notes = {}
@@ -1092,11 +1133,8 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
# Throttle attempts per key to avoid hammering APIs.
autofetch_enabled = bool(cfg.get("lyric_autofetch", True))
now = time.time()
if (
autofetch_enabled
and current_key != last_fetch_attempt_key
and (now - last_fetch_attempt_at) > 2.0
):
if (autofetch_enabled and current_key != last_fetch_attempt_key
and (now - last_fetch_attempt_at) > 2.0):
last_fetch_attempt_key = current_key
last_fetch_attempt_at = now
@@ -1128,7 +1166,10 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
artist=artist,
title=title,
duration_s=(
float(duration_s) if isinstance(duration_s, (int, float)) else None
float(duration_s)
if isinstance(duration_s,
(int,
float)) else None
),
)
if not fetched or not fetched.strip():
@@ -1137,7 +1178,10 @@ def run_auto_overlay(*, mpv: MPV, poll_s: float = 0.15, config: Optional[dict] =
try:
ok = bool(
current_backend.set_note(
current_file_hash, "lyric", fetched, config=cfg
current_file_hash,
"lyric",
fetched,
config=cfg
)
)
_log(f"Autofetch stored lyric note ok={ok}")
@@ -1230,7 +1274,8 @@ def run_overlay(*, mpv: MPV, entries: List[LrcLine], poll_s: float = 0.15) -> in
client = mpv.client()
if not client.connect():
print(
"mpv IPC is not reachable (is mpv running with --input-ipc-server?).", file=sys.stderr
"mpv IPC is not reachable (is mpv running with --input-ipc-server?).",
file=sys.stderr
)
return 3

View File

@@ -20,16 +20,13 @@ from typing import Any, Dict, Optional, List, BinaryIO, Tuple, cast
from SYS.logger import debug
# Fixed pipe name for persistent MPV connection across all Python sessions
FIXED_IPC_PIPE_NAME = "mpv-medeia-macina"
MPV_LUA_SCRIPT_PATH = str(Path(__file__).resolve().parent / "LUA" / "main.lua")
_LYRIC_PROCESS: Optional[subprocess.Popen] = None
_LYRIC_LOG_FH: Optional[Any] = None
_MPV_AVAILABILITY_CACHE: Optional[Tuple[bool, Optional[str]]] = None
@@ -64,7 +61,8 @@ def _windows_hidden_subprocess_kwargs() -> Dict[str, Any]:
if platform.system() != "Windows":
return {}
kwargs: Dict[str, Any] = {}
kwargs: Dict[str,
Any] = {}
try:
create_no_window = getattr(subprocess, "CREATE_NO_WINDOW", 0x08000000)
kwargs["creationflags"] = int(create_no_window)
@@ -103,7 +101,8 @@ def _check_mpv_availability() -> Tuple[bool, Optional[str]]:
try:
result = subprocess.run(
[mpv_path, "--version"],
[mpv_path,
"--version"],
capture_output=True,
text=True,
timeout=2,
@@ -112,7 +111,10 @@ def _check_mpv_availability() -> Tuple[bool, Optional[str]]:
if result.returncode == 0:
_MPV_AVAILABILITY_CACHE = (True, None)
return _MPV_AVAILABILITY_CACHE
_MPV_AVAILABILITY_CACHE = (False, f"MPV returned non-zero exit code: {result.returncode}")
_MPV_AVAILABILITY_CACHE = (
False,
f"MPV returned non-zero exit code: {result.returncode}"
)
return _MPV_AVAILABILITY_CACHE
except Exception as exc:
_MPV_AVAILABILITY_CACHE = (False, f"Error running MPV: {exc}")
@@ -141,7 +143,10 @@ def _windows_list_lyric_helper_pids(ipc_path: str) -> List[int]:
try:
out = subprocess.check_output(
["powershell", "-NoProfile", "-Command", ps_script],
["powershell",
"-NoProfile",
"-Command",
ps_script],
stdin=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
timeout=2,
@@ -186,7 +191,10 @@ def _windows_kill_pids(pids: List[int]) -> None:
for pid in pids or []:
try:
subprocess.run(
["taskkill", "/PID", str(int(pid)), "/F"],
["taskkill",
"/PID",
str(int(pid)),
"/F"],
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
@@ -237,7 +245,11 @@ class MPV:
self.lua_script_path = str(lua_path)
def client(self, silent: bool = False) -> "MPVIPCClient":
return MPVIPCClient(socket_path=self.ipc_path, timeout=self.timeout, silent=bool(silent))
return MPVIPCClient(
socket_path=self.ipc_path,
timeout=self.timeout,
silent=bool(silent)
)
def is_running(self) -> bool:
client = self.client(silent=True)
@@ -247,9 +259,11 @@ class MPV:
finally:
client.disconnect()
def send(
self, command: Dict[str, Any] | List[Any], silent: bool = False
) -> Optional[Dict[str, Any]]:
def send(self,
command: Dict[str,
Any] | List[Any],
silent: bool = False) -> Optional[Dict[str,
Any]]:
client = self.client(silent=bool(silent))
try:
if not client.connect():
@@ -263,13 +277,20 @@ class MPV:
client.disconnect()
def get_property(self, name: str, default: Any = None) -> Any:
resp = self.send({"command": ["get_property", name]})
resp = self.send({
"command": ["get_property",
name]
})
if resp and resp.get("error") == "success":
return resp.get("data", default)
return default
def set_property(self, name: str, value: Any) -> bool:
resp = self.send({"command": ["set_property", name, value]})
resp = self.send({
"command": ["set_property",
name,
value]
})
return bool(resp and resp.get("error") == "success")
def download(
@@ -279,7 +300,8 @@ class MPV:
fmt: str,
store: Optional[str] = None,
path: Optional[str] = None,
) -> Dict[str, Any]:
) -> Dict[str,
Any]:
"""Download a URL using the same pipeline semantics as the MPV UI.
This is intended as a stable Python entrypoint for "button actions".
@@ -291,9 +313,19 @@ class MPV:
path = str(path or "").strip() if path is not None else None
if not url:
return {"success": False, "stdout": "", "stderr": "", "error": "Missing url"}
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Missing url"
}
if not fmt:
return {"success": False, "stdout": "", "stderr": "", "error": "Missing fmt"}
return {
"success": False,
"stdout": "",
"stderr": "",
"error": "Missing fmt"
}
if bool(store) == bool(path):
return {
"success": False,
@@ -323,10 +355,18 @@ class MPV:
executor = PipelineExecutor()
result = executor.run_pipeline(pipeline)
return {
"success": bool(getattr(result, "success", False)),
"stdout": getattr(result, "stdout", "") or "",
"stderr": getattr(result, "stderr", "") or "",
"error": getattr(result, "error", None),
"success": bool(getattr(result,
"success",
False)),
"stdout": getattr(result,
"stdout",
"") or "",
"stderr": getattr(result,
"stderr",
"") or "",
"error": getattr(result,
"error",
None),
"pipeline": pipeline,
}
except Exception as exc:
@@ -340,7 +380,12 @@ class MPV:
def get_playlist(self, silent: bool = False) -> Optional[List[Dict[str, Any]]]:
resp = self.send(
{"command": ["get_property", "playlist"], "request_id": 100}, silent=silent
{
"command": ["get_property",
"playlist"],
"request_id": 100
},
silent=silent
)
if resp is None:
return None
@@ -383,7 +428,14 @@ class MPV:
if not script_path or not os.path.exists(script_path):
return
# Safe to call repeatedly; mpv will reload the script.
self.send({"command": ["load-script", script_path], "request_id": 12}, silent=True)
self.send(
{
"command": ["load-script",
script_path],
"request_id": 12
},
silent=True
)
except Exception:
return
@@ -465,11 +517,12 @@ class MPV:
except Exception:
_LYRIC_LOG_FH = None
kwargs: Dict[str, Any] = {
"stdin": subprocess.DEVNULL,
"stdout": _LYRIC_LOG_FH or subprocess.DEVNULL,
"stderr": _LYRIC_LOG_FH or subprocess.DEVNULL,
}
kwargs: Dict[str,
Any] = {
"stdin": subprocess.DEVNULL,
"stdout": _LYRIC_LOG_FH or subprocess.DEVNULL,
"stderr": _LYRIC_LOG_FH or subprocess.DEVNULL,
}
# Ensure immediate flushing to the log file.
env = os.environ.copy()
@@ -477,9 +530,8 @@ class MPV:
try:
existing_pp = env.get("PYTHONPATH")
env["PYTHONPATH"] = (
str(repo_root)
if not existing_pp
else (str(repo_root) + os.pathsep + str(existing_pp))
str(repo_root) if not existing_pp else
(str(repo_root) + os.pathsep + str(existing_pp))
)
except Exception:
pass
@@ -528,7 +580,10 @@ class MPV:
return
try:
subprocess.run(
["taskkill", "/IM", "mpv.exe", "/F"],
["taskkill",
"/IM",
"mpv.exe",
"/F"],
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
@@ -613,12 +668,17 @@ class MPV:
if extra_args:
cmd.extend([str(a) for a in extra_args if a])
kwargs: Dict[str, Any] = {}
kwargs: Dict[str,
Any] = {}
if platform.system() == "Windows":
# Ensure we don't flash a console window when spawning mpv.
flags = 0
try:
flags |= int(getattr(subprocess, "DETACHED_PROCESS", 0x00000008)) if detached else 0
flags |= int(
getattr(subprocess,
"DETACHED_PROCESS",
0x00000008)
) if detached else 0
except Exception:
flags |= 0x00000008 if detached else 0
try:
@@ -666,22 +726,30 @@ class MPV:
try:
existing_pp = helper_env.get("PYTHONPATH")
helper_env["PYTHONPATH"] = (
str(repo_root)
if not existing_pp
else (str(repo_root) + os.pathsep + str(existing_pp))
str(repo_root) if not existing_pp else
(str(repo_root) + os.pathsep + str(existing_pp))
)
except Exception:
pass
helper_kwargs: Dict[str, Any] = {}
helper_kwargs: Dict[str,
Any] = {}
if platform.system() == "Windows":
flags = 0
try:
flags |= int(getattr(subprocess, "DETACHED_PROCESS", 0x00000008))
flags |= int(
getattr(subprocess,
"DETACHED_PROCESS",
0x00000008)
)
except Exception:
flags |= 0x00000008
try:
flags |= int(getattr(subprocess, "CREATE_NO_WINDOW", 0x08000000))
flags |= int(
getattr(subprocess,
"CREATE_NO_WINDOW",
0x08000000)
)
except Exception:
flags |= 0x08000000
helper_kwargs["creationflags"] = flags
@@ -750,7 +818,10 @@ class MPVIPCClient:
"""
def __init__(
self, socket_path: Optional[str] = None, timeout: float = 5.0, silent: bool = False
self,
socket_path: Optional[str] = None,
timeout: float = 5.0,
silent: bool = False
):
"""Initialize MPV IPC client.
@@ -798,8 +869,8 @@ class MPVIPCClient:
while True:
nl = self._recv_buffer.find(b"\n")
if nl != -1:
line = self._recv_buffer[: nl + 1]
self._recv_buffer = self._recv_buffer[nl + 1 :]
line = self._recv_buffer[:nl + 1]
self._recv_buffer = self._recv_buffer[nl + 1:]
return line
remaining = deadline - _time.time()
@@ -824,7 +895,10 @@ class MPVIPCClient:
return b""
self._recv_buffer += chunk
def read_message(self, *, timeout: Optional[float] = None) -> Optional[Dict[str, Any]]:
def read_message(self,
*,
timeout: Optional[float] = None) -> Optional[Dict[str,
Any]]:
"""Read the next JSON message/event from MPV.
Returns:
@@ -836,13 +910,17 @@ class MPVIPCClient:
if raw is None:
return None
if raw == b"":
return {"event": "__eof__"}
return {
"event": "__eof__"
}
try:
return json.loads(raw.decode("utf-8", errors="replace").strip())
except Exception:
return None
def send_command_no_wait(self, command_data: Dict[str, Any] | List[Any]) -> Optional[int]:
def send_command_no_wait(self,
command_data: Dict[str,
Any] | List[Any]) -> Optional[int]:
"""Send a command to mpv without waiting for its response.
This is important for long-running event loops (helpers) so we don't
@@ -851,7 +929,9 @@ class MPVIPCClient:
try:
request: Dict[str, Any]
if isinstance(command_data, list):
request = {"command": command_data}
request = {
"command": command_data
}
else:
request = dict(command_data)
@@ -910,7 +990,10 @@ class MPVIPCClient:
self.sock = None
return False
def send_command(self, command_data: Dict[str, Any] | List[Any]) -> Optional[Dict[str, Any]]:
def send_command(self,
command_data: Dict[str,
Any] | List[Any]) -> Optional[Dict[str,
Any]]:
"""Send a command to mpv and get response.
Args:
@@ -927,7 +1010,9 @@ class MPVIPCClient:
# Format command as JSON (mpv IPC protocol)
request: Dict[str, Any]
if isinstance(command_data, list):
request = {"command": command_data}
request = {
"command": command_data
}
else:
request = command_data
@@ -958,7 +1043,10 @@ class MPVIPCClient:
break
try:
lines = response_data.decode("utf-8", errors="replace").strip().split("\n")
lines = response_data.decode(
"utf-8",
errors="replace"
).strip().split("\n")
for line in lines:
if not line:
continue

View File

@@ -32,7 +32,8 @@ def setup_logging(log_file: Optional[Path] = None) -> logging.Logger:
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter(
"[%(asctime)s][%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
"[%(asctime)s][%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S"
)
handler.setFormatter(formatter)
logger.addHandler(handler)
@@ -51,7 +52,8 @@ def execute_pipeline(
pipeline_cmd: str,
log_file: Optional[Path] = None,
dry_run: bool = False,
) -> Dict[str, Any]:
) -> Dict[str,
Any]:
"""Execute a pipeline command and return result as JSON.
Args:
@@ -84,11 +86,16 @@ def execute_pipeline(
cmd_args = shlex.split(pipeline_cmd)
result = subprocess.run(
[sys.executable, "-m", "CLI"] + cmd_args,
[sys.executable,
"-m",
"CLI"] + cmd_args,
capture_output=True,
text=True,
cwd=str(_ROOT_DIR),
env={**dict(__import__("os").environ), "MEDEIA_MPV_CALLER": "lua"},
env={
**dict(__import__("os").environ),
"MEDEIA_MPV_CALLER": "lua"
},
)
if log_file:
@@ -145,12 +152,10 @@ def handle_api_request(request_json: str, log_file: Optional[Path] = None) -> st
return json.dumps(result)
else:
return json.dumps(
{
"success": False,
"error": f"Unknown command: {cmd}",
}
)
return json.dumps({
"success": False,
"error": f"Unknown command: {cmd}",
})
except Exception as exc:
return json.dumps(
@@ -166,7 +171,10 @@ if __name__ == "__main__":
# python mpv_lua_api.py <json-request>
if len(sys.argv) < 2:
print(json.dumps({"success": False, "error": "No request provided"}))
print(json.dumps({
"success": False,
"error": "No request provided"
}))
sys.exit(1)
request_json = sys.argv[1]

View File

@@ -61,12 +61,10 @@ _ROOT = str(_repo_root())
if _ROOT not in sys.path:
sys.path.insert(0, _ROOT)
from MPV.mpv_ipc import MPVIPCClient # noqa: E402
from config import load_config # noqa: E402
from SYS.config import load_config # noqa: E402
from SYS.logger import set_debug, debug, set_thread_stream # noqa: E402
REQUEST_PROP = "user-data/medeia-pipeline-request"
RESPONSE_PROP = "user-data/medeia-pipeline-response"
READY_PROP = "user-data/medeia-pipeline-ready"
@@ -103,8 +101,12 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
try:
cols_payload.append(
{
"name": getattr(c, "name", ""),
"value": getattr(c, "value", ""),
"name": getattr(c,
"name",
""),
"value": getattr(c,
"value",
""),
}
)
except Exception:
@@ -118,10 +120,18 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
except Exception:
sel_args = None
rows_payload.append({"columns": cols_payload, "selection_args": sel_args})
rows_payload.append(
{
"columns": cols_payload,
"selection_args": sel_args
}
)
# Only return JSON-serializable data (Lua only needs title + rows).
return {"title": str(title or ""), "rows": rows_payload}
return {
"title": str(title or ""),
"rows": rows_payload
}
executor = PipelineExecutor()
result = executor.run_pipeline(pipeline_text, seeds=seeds)
@@ -150,7 +160,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"""
op_name = str(op or "").strip().lower()
if op_name in {"run-detached", "run_detached", "pipeline-detached", "pipeline_detached"}:
if op_name in {"run-detached",
"run_detached",
"pipeline-detached",
"pipeline_detached"}:
pipeline_text = ""
seeds = None
if isinstance(data, dict):
@@ -194,12 +207,13 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
# Best-effort; seeds are optional.
pass
popen_kwargs: Dict[str, Any] = {
"stdin": subprocess.DEVNULL,
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
"cwd": str(_repo_root()),
}
popen_kwargs: Dict[str,
Any] = {
"stdin": subprocess.DEVNULL,
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
"cwd": str(_repo_root()),
}
if platform.system() == "Windows":
flags = 0
try:
@@ -213,7 +227,11 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
popen_kwargs["creationflags"] = int(flags)
try:
si = subprocess.STARTUPINFO()
si.dwFlags |= int(getattr(subprocess, "STARTF_USESHOWWINDOW", 0x00000001))
si.dwFlags |= int(
getattr(subprocess,
"STARTF_USESHOWWINDOW",
0x00000001)
)
si.wShowWindow = subprocess.SW_HIDE
popen_kwargs["startupinfo"] = si
except Exception:
@@ -228,7 +246,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"success": False,
"stdout": "",
"stderr": "",
"error": f"Failed to spawn detached pipeline: {type(exc).__name__}: {exc}",
"error":
f"Failed to spawn detached pipeline: {type(exc).__name__}: {exc}",
"table": None,
}
@@ -238,16 +257,21 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stderr": "",
"error": None,
"table": None,
"pid": int(getattr(proc, "pid", 0) or 0),
"pid": int(getattr(proc,
"pid",
0) or 0),
}
# Provide store backend choices using the same source as CLI/Typer autocomplete.
if op_name in {"store-choices", "store_choices", "get-store-choices", "get_store_choices"}:
if op_name in {"store-choices",
"store_choices",
"get-store-choices",
"get_store_choices"}:
# IMPORTANT:
# - Prefer runtime cwd for config discovery (mpv spawns us with cwd=repo_root).
# - Avoid returning a cached empty result if config was loaded before it existed.
try:
from config import reload_config # noqa: WPS433
from SYS.config import reload_config # noqa: WPS433
from Store import Store # noqa: WPS433
config_root = _runtime_config_root()
@@ -255,7 +279,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
storage = Store(config=cfg, suppress_debug=True)
backends = storage.list_backends() or []
choices = sorted({str(n) for n in backends if str(n).strip()})
choices = sorted({str(n)
for n in backends if str(n).strip()})
# Fallback: if initialization gated all backends (e.g., missing deps or offline stores),
# still return configured instance names so the UI can present something.
@@ -269,7 +294,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
for instance_key, instance_cfg in instances.items():
name = None
if isinstance(instance_cfg, dict):
name = instance_cfg.get("NAME") or instance_cfg.get("name")
name = instance_cfg.get("NAME"
) or instance_cfg.get("name")
candidate = str(name or instance_key or "").strip()
if candidate:
seen.add(candidate)
@@ -297,7 +323,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
# Provide yt-dlp format list for a URL (for MPV "Change format" menu).
# Returns a ResultTable-like payload so the Lua UI can render without running cmdlets.
if op_name in {"ytdlp-formats", "ytdlp_formats", "ytdl-formats", "ytdl_formats"}:
if op_name in {"ytdlp-formats",
"ytdlp_formats",
"ytdl-formats",
"ytdl_formats"}:
try:
url = None
if isinstance(data, dict):
@@ -335,7 +364,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"success": False,
"stdout": "",
"stderr": "",
"error": f"yt-dlp module not available: {type(exc).__name__}: {exc}",
"error":
f"yt-dlp module not available: {type(exc).__name__}: {exc}",
"table": None,
}
@@ -350,16 +380,17 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
except Exception:
cookiefile = None
ydl_opts: Dict[str, Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
ydl_opts: Dict[str,
Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
if cookiefile:
ydl_opts["cookiefile"] = cookiefile
@@ -386,7 +417,9 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
try:
formats_any = info.get("formats") if isinstance(info, dict) else None
count = len(formats_any) if isinstance(formats_any, list) else 0
_append_helper_log(f"[ytdlp-formats] extracted formats count={count} url={url}")
_append_helper_log(
f"[ytdlp-formats] extracted formats count={count} url={url}"
)
if isinstance(formats_any, list) and formats_any:
limit = 60
@@ -414,7 +447,9 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
f"[ytdlp-format {i:02d}] id={fid} ext={ext} res={res} note={note} codecs={vcodec}/{acodec} size={size}"
)
if count > limit:
_append_helper_log(f"[ytdlp-formats] (truncated; total={count})")
_append_helper_log(
f"[ytdlp-formats] (truncated; total={count})"
)
except Exception:
pass
@@ -422,10 +457,13 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
try:
dump = os.environ.get("MEDEIA_MPV_YTDLP_DUMP", "").strip()
if dump and dump != "0" and isinstance(info, dict):
h = hashlib.sha1(url.encode("utf-8", errors="replace")).hexdigest()[:10]
h = hashlib.sha1(url.encode("utf-8",
errors="replace")).hexdigest()[:10]
out_path = _repo_root() / "Log" / f"ytdlp-probe-{h}.json"
out_path.write_text(
json.dumps(info, ensure_ascii=False, indent=2),
json.dumps(info,
ensure_ascii=False,
indent=2),
encoding="utf-8",
errors="replace",
)
@@ -449,7 +487,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": []},
"table": {
"title": "Formats",
"rows": []
},
}
rows = []
@@ -482,12 +523,25 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
rows.append(
{
"columns": [
{"name": "ID", "value": format_id},
{"name": "Resolution", "value": resolution or ""},
{"name": "Ext", "value": ext or ""},
{"name": "Size", "value": size or ""},
{
"name": "ID",
"value": format_id
},
{
"name": "Resolution",
"value": resolution or ""
},
{
"name": "Ext",
"value": ext or ""
},
{
"name": "Size",
"value": size or ""
},
],
"selection_args": selection_args,
"selection_args":
selection_args,
}
)
@@ -496,7 +550,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": rows},
"table": {
"title": "Formats",
"rows": rows
},
}
except Exception as exc:
return {
@@ -622,7 +679,10 @@ def main(argv: Optional[list[str]] = None) -> int:
format="[%(name)s] %(levelname)s: %(message)s",
stream=sys.stderr,
)
for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"):
for noisy in ("httpx",
"httpcore",
"httpcore.http11",
"httpcore.connection"):
try:
logging.getLogger(noisy).setLevel(logging.WARNING)
except Exception:
@@ -643,7 +703,9 @@ def main(argv: Optional[list[str]] = None) -> int:
return 0
try:
_append_helper_log(f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}")
_append_helper_log(
f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}"
)
try:
_append_helper_log(
f"[helper] file={Path(__file__).resolve()} cwd={Path.cwd().resolve()}"
@@ -666,6 +728,7 @@ def main(argv: Optional[list[str]] = None) -> int:
try:
class _HelperLogStream:
def __init__(self) -> None:
self._pending = ""
@@ -759,7 +822,11 @@ def main(argv: Optional[list[str]] = None) -> int:
if (now - last_ready_ts) < 0.75:
return
try:
client.send_command_no_wait(["set_property_string", READY_PROP, str(int(now))])
client.send_command_no_wait(
["set_property_string",
READY_PROP,
str(int(now))]
)
last_ready_ts = now
except Exception:
return
@@ -789,7 +856,12 @@ def main(argv: Optional[list[str]] = None) -> int:
# Observe request property changes.
try:
client.send_command_no_wait(["observe_property", OBS_ID_REQUEST, REQUEST_PROP, "string"])
client.send_command_no_wait(
["observe_property",
OBS_ID_REQUEST,
REQUEST_PROP,
"string"]
)
except Exception:
return 3
@@ -807,8 +879,8 @@ def main(argv: Optional[list[str]] = None) -> int:
startup_choices_payload = _run_op("store-choices", None)
startup_choices = (
startup_choices_payload.get("choices")
if isinstance(startup_choices_payload, dict)
else None
if isinstance(startup_choices_payload,
dict) else None
)
if isinstance(startup_choices, list):
preview = ", ".join(str(x) for x in startup_choices[:50])
@@ -819,10 +891,18 @@ def main(argv: Optional[list[str]] = None) -> int:
# Publish to a cached property for Lua to read without IPC request.
try:
cached_json = json.dumps(
{"success": True, "choices": startup_choices}, ensure_ascii=False
{
"success": True,
"choices": startup_choices
},
ensure_ascii=False
)
client.send_command_no_wait(
["set_property_string", "user-data/medeia-store-choices-cached", cached_json]
[
"set_property_string",
"user-data/medeia-store-choices-cached",
cached_json
]
)
_append_helper_log(
f"[helper] published store-choices to user-data/medeia-store-choices-cached"
@@ -834,23 +914,29 @@ def main(argv: Optional[list[str]] = None) -> int:
else:
_append_helper_log("[helper] startup store-choices unavailable")
except Exception as exc:
_append_helper_log(f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}")
_append_helper_log(
f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}"
)
# Also publish config temp directory if available
try:
from config import load_config
from SYS.config import load_config
cfg = load_config()
temp_dir = cfg.get("temp", "").strip() or os.getenv("TEMP") or "/tmp"
if temp_dir:
client.send_command_no_wait(
["set_property_string", "user-data/medeia-config-temp", temp_dir]
["set_property_string",
"user-data/medeia-config-temp",
temp_dir]
)
_append_helper_log(
f"[helper] published config temp to user-data/medeia-config-temp={temp_dir}"
)
except Exception as exc:
_append_helper_log(f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}")
_append_helper_log(
f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}"
)
last_seen_id: Optional[str] = None
@@ -889,9 +975,8 @@ def main(argv: Optional[list[str]] = None) -> int:
if "quic" in lower_prefix and "DEBUG:" in text:
continue
# Suppress progress-bar style lines (keep true errors).
if ("ETA" in text or "%" in text) and (
"ERROR:" not in text and "WARNING:" not in text
):
if ("ETA" in text or "%" in text) and ("ERROR:" not in text
and "WARNING:" not in text):
# Typical yt-dlp progress bar line.
if text.lstrip().startswith("["):
continue
@@ -927,7 +1012,9 @@ def main(argv: Optional[list[str]] = None) -> int:
snippet = raw.strip().replace("\r", "").replace("\n", " ")
if len(snippet) > 220:
snippet = snippet[:220] + ""
_append_helper_log(f"[request-raw] could not parse request json: {snippet}")
_append_helper_log(
f"[request-raw] could not parse request json: {snippet}"
)
except Exception:
pass
continue
@@ -946,7 +1033,9 @@ def main(argv: Optional[list[str]] = None) -> int:
last_seen_id = req_id
try:
label = pipeline_text if pipeline_text else (op and ("op=" + op) or "(empty)")
label = pipeline_text if pipeline_text else (
op and ("op=" + op) or "(empty)"
)
_append_helper_log(f"\n[request {req_id}] {label}")
except Exception:
pass
@@ -962,8 +1051,10 @@ def main(argv: Optional[list[str]] = None) -> int:
resp = {
"id": req_id,
"success": bool(run.get("success")),
"stdout": run.get("stdout", ""),
"stderr": run.get("stderr", ""),
"stdout": run.get("stdout",
""),
"stderr": run.get("stderr",
""),
"error": run.get("error"),
"table": run.get("table"),
}
@@ -1004,7 +1095,12 @@ def main(argv: Optional[list[str]] = None) -> int:
# IMPORTANT: don't wait for a response here; waiting would consume
# async events and can drop/skip property-change notifications.
client.send_command_no_wait(
["set_property_string", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)]
[
"set_property_string",
RESPONSE_PROP,
json.dumps(resp,
ensure_ascii=False)
]
)
except Exception:
# If posting results fails, there's nothing more useful to do.