Add YAPF style + ignore, and format tracked Python files

This commit is contained in:
2025-12-29 18:42:02 -08:00
parent c019c00aed
commit 507946a3e4
108 changed files with 11664 additions and 6494 deletions

View File

@@ -61,12 +61,10 @@ _ROOT = str(_repo_root())
if _ROOT not in sys.path:
sys.path.insert(0, _ROOT)
from MPV.mpv_ipc import MPVIPCClient # noqa: E402
from config import load_config # noqa: E402
from SYS.config import load_config # noqa: E402
from SYS.logger import set_debug, debug, set_thread_stream # noqa: E402
REQUEST_PROP = "user-data/medeia-pipeline-request"
RESPONSE_PROP = "user-data/medeia-pipeline-response"
READY_PROP = "user-data/medeia-pipeline-ready"
@@ -103,8 +101,12 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
try:
cols_payload.append(
{
"name": getattr(c, "name", ""),
"value": getattr(c, "value", ""),
"name": getattr(c,
"name",
""),
"value": getattr(c,
"value",
""),
}
)
except Exception:
@@ -118,10 +120,18 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
except Exception:
sel_args = None
rows_payload.append({"columns": cols_payload, "selection_args": sel_args})
rows_payload.append(
{
"columns": cols_payload,
"selection_args": sel_args
}
)
# Only return JSON-serializable data (Lua only needs title + rows).
return {"title": str(title or ""), "rows": rows_payload}
return {
"title": str(title or ""),
"rows": rows_payload
}
executor = PipelineExecutor()
result = executor.run_pipeline(pipeline_text, seeds=seeds)
@@ -150,7 +160,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"""
op_name = str(op or "").strip().lower()
if op_name in {"run-detached", "run_detached", "pipeline-detached", "pipeline_detached"}:
if op_name in {"run-detached",
"run_detached",
"pipeline-detached",
"pipeline_detached"}:
pipeline_text = ""
seeds = None
if isinstance(data, dict):
@@ -194,12 +207,13 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
# Best-effort; seeds are optional.
pass
popen_kwargs: Dict[str, Any] = {
"stdin": subprocess.DEVNULL,
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
"cwd": str(_repo_root()),
}
popen_kwargs: Dict[str,
Any] = {
"stdin": subprocess.DEVNULL,
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
"cwd": str(_repo_root()),
}
if platform.system() == "Windows":
flags = 0
try:
@@ -213,7 +227,11 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
popen_kwargs["creationflags"] = int(flags)
try:
si = subprocess.STARTUPINFO()
si.dwFlags |= int(getattr(subprocess, "STARTF_USESHOWWINDOW", 0x00000001))
si.dwFlags |= int(
getattr(subprocess,
"STARTF_USESHOWWINDOW",
0x00000001)
)
si.wShowWindow = subprocess.SW_HIDE
popen_kwargs["startupinfo"] = si
except Exception:
@@ -228,7 +246,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"success": False,
"stdout": "",
"stderr": "",
"error": f"Failed to spawn detached pipeline: {type(exc).__name__}: {exc}",
"error":
f"Failed to spawn detached pipeline: {type(exc).__name__}: {exc}",
"table": None,
}
@@ -238,16 +257,21 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stderr": "",
"error": None,
"table": None,
"pid": int(getattr(proc, "pid", 0) or 0),
"pid": int(getattr(proc,
"pid",
0) or 0),
}
# Provide store backend choices using the same source as CLI/Typer autocomplete.
if op_name in {"store-choices", "store_choices", "get-store-choices", "get_store_choices"}:
if op_name in {"store-choices",
"store_choices",
"get-store-choices",
"get_store_choices"}:
# IMPORTANT:
# - Prefer runtime cwd for config discovery (mpv spawns us with cwd=repo_root).
# - Avoid returning a cached empty result if config was loaded before it existed.
try:
from config import reload_config # noqa: WPS433
from SYS.config import reload_config # noqa: WPS433
from Store import Store # noqa: WPS433
config_root = _runtime_config_root()
@@ -255,7 +279,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
storage = Store(config=cfg, suppress_debug=True)
backends = storage.list_backends() or []
choices = sorted({str(n) for n in backends if str(n).strip()})
choices = sorted({str(n)
for n in backends if str(n).strip()})
# Fallback: if initialization gated all backends (e.g., missing deps or offline stores),
# still return configured instance names so the UI can present something.
@@ -269,7 +294,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
for instance_key, instance_cfg in instances.items():
name = None
if isinstance(instance_cfg, dict):
name = instance_cfg.get("NAME") or instance_cfg.get("name")
name = instance_cfg.get("NAME"
) or instance_cfg.get("name")
candidate = str(name or instance_key or "").strip()
if candidate:
seen.add(candidate)
@@ -297,7 +323,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
# Provide yt-dlp format list for a URL (for MPV "Change format" menu).
# Returns a ResultTable-like payload so the Lua UI can render without running cmdlets.
if op_name in {"ytdlp-formats", "ytdlp_formats", "ytdl-formats", "ytdl_formats"}:
if op_name in {"ytdlp-formats",
"ytdlp_formats",
"ytdl-formats",
"ytdl_formats"}:
try:
url = None
if isinstance(data, dict):
@@ -335,7 +364,8 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"success": False,
"stdout": "",
"stderr": "",
"error": f"yt-dlp module not available: {type(exc).__name__}: {exc}",
"error":
f"yt-dlp module not available: {type(exc).__name__}: {exc}",
"table": None,
}
@@ -350,16 +380,17 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
except Exception:
cookiefile = None
ydl_opts: Dict[str, Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
ydl_opts: Dict[str,
Any] = {
"quiet": True,
"no_warnings": True,
"socket_timeout": 20,
"retries": 2,
"skip_download": True,
# Avoid accidentally expanding huge playlists on load.
"noplaylist": True,
"noprogress": True,
}
if cookiefile:
ydl_opts["cookiefile"] = cookiefile
@@ -386,7 +417,9 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
try:
formats_any = info.get("formats") if isinstance(info, dict) else None
count = len(formats_any) if isinstance(formats_any, list) else 0
_append_helper_log(f"[ytdlp-formats] extracted formats count={count} url={url}")
_append_helper_log(
f"[ytdlp-formats] extracted formats count={count} url={url}"
)
if isinstance(formats_any, list) and formats_any:
limit = 60
@@ -414,7 +447,9 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
f"[ytdlp-format {i:02d}] id={fid} ext={ext} res={res} note={note} codecs={vcodec}/{acodec} size={size}"
)
if count > limit:
_append_helper_log(f"[ytdlp-formats] (truncated; total={count})")
_append_helper_log(
f"[ytdlp-formats] (truncated; total={count})"
)
except Exception:
pass
@@ -422,10 +457,13 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
try:
dump = os.environ.get("MEDEIA_MPV_YTDLP_DUMP", "").strip()
if dump and dump != "0" and isinstance(info, dict):
h = hashlib.sha1(url.encode("utf-8", errors="replace")).hexdigest()[:10]
h = hashlib.sha1(url.encode("utf-8",
errors="replace")).hexdigest()[:10]
out_path = _repo_root() / "Log" / f"ytdlp-probe-{h}.json"
out_path.write_text(
json.dumps(info, ensure_ascii=False, indent=2),
json.dumps(info,
ensure_ascii=False,
indent=2),
encoding="utf-8",
errors="replace",
)
@@ -449,7 +487,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": []},
"table": {
"title": "Formats",
"rows": []
},
}
rows = []
@@ -482,12 +523,25 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
rows.append(
{
"columns": [
{"name": "ID", "value": format_id},
{"name": "Resolution", "value": resolution or ""},
{"name": "Ext", "value": ext or ""},
{"name": "Size", "value": size or ""},
{
"name": "ID",
"value": format_id
},
{
"name": "Resolution",
"value": resolution or ""
},
{
"name": "Ext",
"value": ext or ""
},
{
"name": "Size",
"value": size or ""
},
],
"selection_args": selection_args,
"selection_args":
selection_args,
}
)
@@ -496,7 +550,10 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
"stdout": "",
"stderr": "",
"error": None,
"table": {"title": "Formats", "rows": rows},
"table": {
"title": "Formats",
"rows": rows
},
}
except Exception as exc:
return {
@@ -622,7 +679,10 @@ def main(argv: Optional[list[str]] = None) -> int:
format="[%(name)s] %(levelname)s: %(message)s",
stream=sys.stderr,
)
for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"):
for noisy in ("httpx",
"httpcore",
"httpcore.http11",
"httpcore.connection"):
try:
logging.getLogger(noisy).setLevel(logging.WARNING)
except Exception:
@@ -643,7 +703,9 @@ def main(argv: Optional[list[str]] = None) -> int:
return 0
try:
_append_helper_log(f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}")
_append_helper_log(
f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}"
)
try:
_append_helper_log(
f"[helper] file={Path(__file__).resolve()} cwd={Path.cwd().resolve()}"
@@ -666,6 +728,7 @@ def main(argv: Optional[list[str]] = None) -> int:
try:
class _HelperLogStream:
def __init__(self) -> None:
self._pending = ""
@@ -759,7 +822,11 @@ def main(argv: Optional[list[str]] = None) -> int:
if (now - last_ready_ts) < 0.75:
return
try:
client.send_command_no_wait(["set_property_string", READY_PROP, str(int(now))])
client.send_command_no_wait(
["set_property_string",
READY_PROP,
str(int(now))]
)
last_ready_ts = now
except Exception:
return
@@ -789,7 +856,12 @@ def main(argv: Optional[list[str]] = None) -> int:
# Observe request property changes.
try:
client.send_command_no_wait(["observe_property", OBS_ID_REQUEST, REQUEST_PROP, "string"])
client.send_command_no_wait(
["observe_property",
OBS_ID_REQUEST,
REQUEST_PROP,
"string"]
)
except Exception:
return 3
@@ -807,8 +879,8 @@ def main(argv: Optional[list[str]] = None) -> int:
startup_choices_payload = _run_op("store-choices", None)
startup_choices = (
startup_choices_payload.get("choices")
if isinstance(startup_choices_payload, dict)
else None
if isinstance(startup_choices_payload,
dict) else None
)
if isinstance(startup_choices, list):
preview = ", ".join(str(x) for x in startup_choices[:50])
@@ -819,10 +891,18 @@ def main(argv: Optional[list[str]] = None) -> int:
# Publish to a cached property for Lua to read without IPC request.
try:
cached_json = json.dumps(
{"success": True, "choices": startup_choices}, ensure_ascii=False
{
"success": True,
"choices": startup_choices
},
ensure_ascii=False
)
client.send_command_no_wait(
["set_property_string", "user-data/medeia-store-choices-cached", cached_json]
[
"set_property_string",
"user-data/medeia-store-choices-cached",
cached_json
]
)
_append_helper_log(
f"[helper] published store-choices to user-data/medeia-store-choices-cached"
@@ -834,23 +914,29 @@ def main(argv: Optional[list[str]] = None) -> int:
else:
_append_helper_log("[helper] startup store-choices unavailable")
except Exception as exc:
_append_helper_log(f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}")
_append_helper_log(
f"[helper] startup store-choices failed: {type(exc).__name__}: {exc}"
)
# Also publish config temp directory if available
try:
from config import load_config
from SYS.config import load_config
cfg = load_config()
temp_dir = cfg.get("temp", "").strip() or os.getenv("TEMP") or "/tmp"
if temp_dir:
client.send_command_no_wait(
["set_property_string", "user-data/medeia-config-temp", temp_dir]
["set_property_string",
"user-data/medeia-config-temp",
temp_dir]
)
_append_helper_log(
f"[helper] published config temp to user-data/medeia-config-temp={temp_dir}"
)
except Exception as exc:
_append_helper_log(f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}")
_append_helper_log(
f"[helper] failed to publish config temp: {type(exc).__name__}: {exc}"
)
last_seen_id: Optional[str] = None
@@ -889,9 +975,8 @@ def main(argv: Optional[list[str]] = None) -> int:
if "quic" in lower_prefix and "DEBUG:" in text:
continue
# Suppress progress-bar style lines (keep true errors).
if ("ETA" in text or "%" in text) and (
"ERROR:" not in text and "WARNING:" not in text
):
if ("ETA" in text or "%" in text) and ("ERROR:" not in text
and "WARNING:" not in text):
# Typical yt-dlp progress bar line.
if text.lstrip().startswith("["):
continue
@@ -927,7 +1012,9 @@ def main(argv: Optional[list[str]] = None) -> int:
snippet = raw.strip().replace("\r", "").replace("\n", " ")
if len(snippet) > 220:
snippet = snippet[:220] + ""
_append_helper_log(f"[request-raw] could not parse request json: {snippet}")
_append_helper_log(
f"[request-raw] could not parse request json: {snippet}"
)
except Exception:
pass
continue
@@ -946,7 +1033,9 @@ def main(argv: Optional[list[str]] = None) -> int:
last_seen_id = req_id
try:
label = pipeline_text if pipeline_text else (op and ("op=" + op) or "(empty)")
label = pipeline_text if pipeline_text else (
op and ("op=" + op) or "(empty)"
)
_append_helper_log(f"\n[request {req_id}] {label}")
except Exception:
pass
@@ -962,8 +1051,10 @@ def main(argv: Optional[list[str]] = None) -> int:
resp = {
"id": req_id,
"success": bool(run.get("success")),
"stdout": run.get("stdout", ""),
"stderr": run.get("stderr", ""),
"stdout": run.get("stdout",
""),
"stderr": run.get("stderr",
""),
"error": run.get("error"),
"table": run.get("table"),
}
@@ -1004,7 +1095,12 @@ def main(argv: Optional[list[str]] = None) -> int:
# IMPORTANT: don't wait for a response here; waiting would consume
# async events and can drop/skip property-change notifications.
client.send_command_no_wait(
["set_property_string", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)]
[
"set_property_string",
RESPONSE_PROP,
json.dumps(resp,
ensure_ascii=False)
]
)
except Exception:
# If posting results fails, there's nothing more useful to do.