This commit is contained in:
2026-02-04 20:51:54 -08:00
parent b714d477a6
commit d806ebad85
9 changed files with 257 additions and 63 deletions

View File

@@ -872,13 +872,20 @@ class MPVIPCClient:
try:
if self.is_windows:
# BinaryIO pipe from open('\\\\.\\pipe\\...')
pipe = cast(BinaryIO, self.sock)
pipe.write(payload.encode("utf-8"))
pipe.flush()
try:
pipe.write(payload.encode("utf-8"))
pipe.flush()
except OSError as e:
# Windows Errno 22 (EINVAL) often means the pipe handle is now invalid/closed
if getattr(e, "errno", 0) == 22:
raise BrokenPipeError(str(e))
raise
else:
sock_obj = cast(socket.socket, self.sock)
sock_obj.sendall(payload.encode("utf-8"))
except (OSError, IOError, BrokenPipeError) as exc:
except (OSError, IOError, BrokenPipeError, ConnectionResetError) as exc:
# Pipe became invalid (disconnected, corrupted, etc.).
# Disconnect and attempt one reconnection.
if not self.silent:
@@ -889,12 +896,17 @@ class MPVIPCClient:
try:
if self.is_windows:
pipe = cast(BinaryIO, self.sock)
pipe.write(payload.encode("utf-8"))
pipe.flush()
try:
pipe.write(payload.encode("utf-8"))
pipe.flush()
except OSError as e:
if getattr(e, "errno", 0) == 22:
raise BrokenPipeError(str(e))
raise
else:
sock_obj = cast(socket.socket, self.sock)
sock_obj.sendall(payload.encode("utf-8"))
except (OSError, IOError, BrokenPipeError) as retry_exc:
except (OSError, IOError, BrokenPipeError, ConnectionResetError) as retry_exc:
self.disconnect()
raise MPVIPCError(f"Pipe write failed after reconnect: {retry_exc}") from retry_exc
else:
@@ -912,7 +924,7 @@ class MPVIPCClient:
try:
pipe = cast(BinaryIO, self.sock)
return pipe.readline()
except (OSError, IOError, BrokenPipeError) as exc:
except (OSError, IOError, BrokenPipeError, ConnectionResetError) as exc:
# Pipe error; try to reconnect once
if not self.silent:
debug(f"Pipe readline failed: {exc}")

View File

@@ -28,6 +28,7 @@ import os
import sys
import tempfile
import time
import threading
import logging
import re
import hashlib
@@ -134,8 +135,16 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
"rows": rows_payload
}
start_time = time.time()
runner = PipelineRunner()
result = runner.run_pipeline(pipeline_text, seeds=seeds)
duration = time.time() - start_time
try:
_append_helper_log(
f"[pipeline] run_pipeline completed in {duration:.2f}s pipeline={pipeline_text[:64]}"
)
except Exception:
pass
table_payload = None
try:
@@ -152,6 +161,31 @@ def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]:
}
def _run_pipeline_background(pipeline_text: str, *, seeds: Any, req_id: str) -> None:
def _target() -> None:
try:
result = _run_pipeline(pipeline_text, seeds=seeds)
status = "success" if result.get("success") else "failed"
_append_helper_log(
f"[pipeline async {req_id}] {status} error={result.get('error')}"
)
except Exception as exc: # pragma: no cover - best-effort logging
_append_helper_log(
f"[pipeline async {req_id}] exception: {type(exc).__name__}: {exc}"
)
thread = threading.Thread(
target=_target,
name=f"pipeline-async-{req_id}",
daemon=True,
)
thread.start()
def _is_load_url_pipeline(pipeline_text: str) -> bool:
return str(pipeline_text or "").lstrip().lower().startswith(".mpv -url")
def _run_op(op: str, data: Any) -> Dict[str, Any]:
"""Run a helper-only operation.
@@ -1030,13 +1064,29 @@ def main(argv: Optional[list[str]] = None) -> int:
except Exception:
pass
async_dispatch = False
try:
if op:
run = _run_op(op, data)
else:
if not pipeline_text:
continue
run = _run_pipeline(pipeline_text, seeds=seeds)
if _is_load_url_pipeline(pipeline_text):
async_dispatch = True
run = {
"success": True,
"stdout": "",
"stderr": "",
"error": "",
"table": None,
}
_run_pipeline_background(
pipeline_text,
seeds=seeds,
req_id=req_id,
)
else:
run = _run_pipeline(pipeline_text, seeds=seeds)
resp = {
"id": req_id,
@@ -1050,6 +1100,8 @@ def main(argv: Optional[list[str]] = None) -> int:
}
if "choices" in run:
resp["choices"] = run.get("choices")
if async_dispatch:
resp["info"] = "queued asynchronously"
except Exception as exc:
resp = {
"id": req_id,

View File

@@ -1,7 +1,7 @@
# uosc provides seeking & volume indicators (via flash-timeline and flash-volume commands)
# if you decide to use them, you don't need osd-bar
osd-bar=no
ytdl=yes
# uosc will draw its own window controls and border if you disable window border
border=no

View File

@@ -226,7 +226,7 @@ class Bandcamp(Provider):
table = Table(f"Bandcamp: artist:{artist_title}")._perseverance(True)
table.set_table("bandcamp")
try:
table.set_value_case("lower")
table.set_value_case("preserve")
except Exception:
pass

View File

@@ -213,7 +213,7 @@ class PodcastIndex(Provider):
table = Table(f"PodcastIndex Episodes: {feed_title}")._perseverance(True)
table.set_table("podcastindex.episodes")
try:
table.set_value_case("lower")
table.set_value_case("preserve")
except Exception:
pass

View File

@@ -556,8 +556,8 @@ class Table:
self.table_metadata: Dict[str, Any] = {}
"""Optional provider/table metadata (e.g., provider name, view)."""
self.value_case: str = "lower"
"""Display-only value casing: 'lower' (default), 'upper', or 'preserve'."""
self.value_case: str = "preserve"
"""Display-only value casing: 'lower', 'upper', or 'preserve' (default)."""
def set_value_case(self, value_case: str) -> "Table":
"""Configure display-only casing for rendered cell values."""

View File

@@ -132,6 +132,40 @@ def _lua_log_file() -> Path:
return _repo_log_dir() / "medeia-mpv-lua.log"
def _extract_log_filter(args: Sequence[str]) -> tuple[List[str], Optional[str]]:
normalized: List[str] = []
log_filter: Optional[str] = None
i = 0
while i < len(args):
token = str(args[i])
token_lower = token.lower()
if token_lower in {"-log", "--log"}:
normalized.append(token)
if log_filter is None and i + 1 < len(args):
candidate = str(args[i + 1])
if candidate and not candidate.startswith("-"):
log_filter = candidate
i += 2
continue
i += 1
continue
normalized.append(token)
i += 1
return normalized, log_filter
def _apply_log_filter(lines: Sequence[str], filter_text: Optional[str]) -> List[str]:
if not filter_text:
return list(lines)
needle = filter_text.lower()
filtered: List[str] = []
for line in lines:
text = str(line)
if needle in text.lower():
filtered.append(text)
return filtered
def _try_enable_mpv_file_logging(mpv_log_path: str, *, attempts: int = 3) -> bool:
"""Best-effort enable mpv log-file + verbose level on a running instance.
@@ -646,8 +680,8 @@ def _build_ytdl_options(config: Optional[Dict[str,
if cookies_path:
opts.append(f"cookies={cookies_path.replace('\\', '/')}")
else:
opts.append("cookies-from-browser=chrome")
# Do not force chrome cookies if none are found; let yt-dlp use its defaults or fail gracefully.
if hydrus_header:
opts.append(f"add-header={hydrus_header}")
return ",".join(opts) if opts else None
@@ -676,6 +710,35 @@ def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
return False
def _is_probable_ytdl_url(url: str) -> bool:
"""Check if the URL is likely meant to be handled by MPV's ytdl-hook.
We use this to avoid wrapping these URLs in memory:// M3U payloads,
since the wrapper can sometimes prevent the ytdl-hook from triggering.
"""
if not isinstance(url, str):
return False
lower = url.lower().strip()
if not lower.startswith(("http://", "https://")):
return False
# Exclude Hydrus API file links (we handle headers for these separately)
if "/get_files/file" in lower:
return False
# Exclude Tidal manifest redirects if they've been resolved already
if "tidal.com" in lower and "/manifest" in lower:
return False
# Exclude AllDebrid protected links
if "alldebrid.com/f/" in lower:
return False
# Most other HTTP links (YouTube, Bandcamp, etc) are candidates for yt-dlp resolution in MPV
return True
def _ensure_ytdl_cookies(config: Optional[Dict[str, Any]] = None) -> None:
"""Ensure yt-dlp options are set correctly for this session."""
from pathlib import Path
@@ -1014,6 +1077,7 @@ def _queue_items(
existing_targets.add(norm)
# Remove duplicates from playlist starting from the end to keep indices valid
# Use wait=False for better performance, especially over slow IPC
for idx in reversed(dup_indexes):
try:
_send_ipc_command(
@@ -1022,7 +1086,8 @@ def _queue_items(
idx],
"request_id": 106
},
silent=True
silent=True,
wait=False
)
except Exception:
pass
@@ -1060,6 +1125,7 @@ def _queue_items(
"request_id": 198,
},
silent=True,
wait=False
)
except Exception:
pass
@@ -1124,9 +1190,8 @@ def _queue_items(
new_targets.add(norm_key)
# Use memory:// M3U hack to pass title to MPV.
# This is especially important for remote URLs (e.g., YouTube) where MPV may otherwise
# show the raw URL as the playlist title.
if title:
# Avoid this for probable ytdl URLs because it can prevent the hook from triggering.
if title and not _is_probable_ytdl_url(target):
# Sanitize title for M3U (remove newlines)
safe_title = title.replace("\n", " ").replace("\r", "")
@@ -1173,7 +1238,7 @@ def _queue_items(
"request_id":
199,
}
_send_ipc_command(header_cmd, silent=True)
_send_ipc_command(header_cmd, silent=True, wait=False)
if effective_ytdl_opts:
ytdl_cmd = {
"command":
@@ -1182,7 +1247,7 @@ def _queue_items(
effective_ytdl_opts],
"request_id": 197,
}
_send_ipc_command(ytdl_cmd, silent=True)
_send_ipc_command(ytdl_cmd, silent=True, wait=False)
# For memory:// M3U payloads (used to carry titles), use loadlist so mpv parses
# the content as a playlist and does not expose #EXTINF lines as entries.
@@ -1228,7 +1293,13 @@ def _queue_items(
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Manage and play items in the MPV playlist via IPC."""
parsed = parse_cmdlet_args(args, CMDLET)
log_filter_text: Optional[str] = None
args_for_parse, log_filter_text = _extract_log_filter(args)
parsed = parse_cmdlet_args(args_for_parse, CMDLET)
if log_filter_text:
log_filter_text = log_filter_text.strip()
if not log_filter_text:
log_filter_text = None
log_requested = bool(parsed.get("log"))
borderless = bool(parsed.get("borderless"))
@@ -1312,13 +1383,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# If index_arg is provided but is not an integer, treat it as a URL
# This allows .pipe "http://..." without -url flag
if index_arg is not None:
try:
int(index_arg)
except ValueError:
# Avoid exception-based check to prevent debugger breaks on caught exceptions
index_str = str(index_arg).strip()
is_int = False
if index_str:
if index_str.isdigit():
is_int = True
elif index_str.startswith("-") and index_str[1:].isdigit():
is_int = True
if not is_int:
# Not an integer, treat as URL if url_arg is not set
if not url_arg:
url_arg = index_arg
index_arg = None
index_arg = None
clear_mode = parsed.get("clear")
list_mode = parsed.get("list")
@@ -1390,24 +1468,25 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
mpv_started = _queue_items([url_arg], clear_first=queue_replace, config=config, start_opts=start_opts, wait=False)
ctx.emit({"path": url_arg, "title": url_arg, "source": "load-url", "queued": True})
if not (clear_mode or play_mode or pause_mode or save_mode or load_mode or replace_mode):
play_mode = True
if mpv_started:
# MPV was just started, wait a moment for it to be ready, then play first item
import time
time.sleep(0.5)
index_arg = "1" # 1-based index for first item
play_mode = True
index_arg = "1"
else:
# MPV was already running, just show the updated playlist.
list_mode = True
# If already running, we want to play the item we just added (last one).
# We need to fetch the current playlist to find the count.
current_playlist = _get_playlist(silent=True) or []
if current_playlist:
index_arg = str(len(current_playlist))
# If we used queue_replace, the URL is already playing. Clear play/index args to avoid redundant commands.
if queue_replace:
play_mode = False
index_arg = None
# Ensure lyric overlay is running (auto-discovery handled by MPV.lyric).
try:
mpv = MPV()
_ensure_lyric_overlay(mpv)
@@ -1968,18 +2047,26 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
except Exception:
break
if tail_lines:
print("MPV log (tail):")
for ln in tail_lines:
filtered_tail = _apply_log_filter(tail_lines, log_filter_text)
if filtered_tail:
title = "MPV log (tail"
if log_filter_text:
title += f" filtered by '{log_filter_text}'"
title += "):"
print(title)
for ln in filtered_tail:
print(ln)
else:
print("MPV log (tail): <empty>")
print(
"Note: On some Windows builds, mpv cannot start writing to --log-file after launch."
)
print(
"If you need full [main2] logs, restart mpv so it starts with --log-file."
)
if log_filter_text:
print(f"MPV log (tail): <no entries match filter '{log_filter_text}'>")
else:
print("MPV log (tail): <empty>")
print(
"Note: On some Windows builds, mpv cannot start writing to --log-file after launch."
)
print(
"If you need full [main2] logs, restart mpv so it starts with --log-file."
)
# Print database logs for mpv module (helper output)
try:
@@ -1987,19 +2074,28 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log_db_path = str((Path(__file__).resolve().parent.parent / "logs.db"))
conn = sqlite3.connect(log_db_path, timeout=5.0)
cur = conn.cursor()
cur.execute(
"SELECT level, module, message FROM logs WHERE module = 'mpv' ORDER BY timestamp DESC LIMIT 200"
)
query = "SELECT level, module, message FROM logs WHERE module = 'mpv'"
params: List[str] = []
if log_filter_text:
query += " AND LOWER(message) LIKE ?"
params.append(f"%{log_filter_text.lower()}%")
query += " ORDER BY timestamp DESC LIMIT 200"
cur.execute(query, tuple(params))
mpv_logs = cur.fetchall()
cur.close()
conn.close()
print("Helper logs from database (mpv module, most recent first):")
if log_filter_text:
print(f"Helper logs from database (mpv module, filtered by '{log_filter_text}', most recent first):")
else:
print("Helper logs from database (mpv module, most recent first):")
if mpv_logs:
for level, module, message in mpv_logs:
print(f"[{level}] {message}")
else:
print("(no helper logs found)")
if log_filter_text:
print(f"(no helper logs found matching '{log_filter_text}')")
else:
print("(no helper logs found)")
except Exception as e:
debug(f"Could not fetch database logs: {e}")
pass
@@ -2009,13 +2105,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
helper_path = _helper_log_file()
helper_tail = _tail_text_file(str(helper_path), max_lines=200)
filtered_helper = _apply_log_filter(helper_tail, log_filter_text)
print(f"Helper log file: {str(helper_path)}")
if helper_tail:
if filtered_helper:
print("Helper log (tail):")
for ln in helper_tail:
for ln in filtered_helper:
print(ln)
else:
print("Helper log (tail): <empty>")
if log_filter_text:
print(f"(no helper file logs found matching '{log_filter_text}')")
else:
print("Helper log (tail): <empty>")
except Exception:
pass
@@ -2023,13 +2123,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
lua_path = _lua_log_file()
lua_tail = _tail_text_file(str(lua_path), max_lines=200)
filtered_lua = _apply_log_filter(lua_tail, log_filter_text)
print(f"Lua log file: {str(lua_path)}")
if lua_tail:
if filtered_lua:
print("Lua log (tail):")
for ln in lua_tail:
for ln in filtered_lua:
print(ln)
else:
print("Lua log (tail): <empty>")
if log_filter_text:
print(f"(no lua file logs found matching '{log_filter_text}')")
else:
print("Lua log (tail): <empty>")
except Exception:
pass
except Exception:
@@ -2182,7 +2286,7 @@ CMDLET = Cmdlet(
name=".mpv",
alias=[".pipe", "pipe", "playlist", "queue", "ls-pipe"],
summary="Manage and play items in the MPV playlist via IPC",
usage=".mpv [index|url] [-current] [-clear] [-list] [-url URL] [-log] [-borderless]",
usage=".mpv [index|url] [-current] [-clear] [-list] [-url URL] [-log [filter text]] [-borderless]",
arg=[
CmdletArg(
name="index",
@@ -2224,7 +2328,7 @@ CMDLET = Cmdlet(
CmdletArg(
name="log",
type="flag",
description="Enable pipeable debug output and write an mpv log file",
description="Enable pipeable debug output, write an mpv log file, and optionally specify a filter string right after -log to search the stored logs",
),
CmdletArg(
name="borderless",

View File

@@ -1092,6 +1092,7 @@ def main(argv: Optional[list[str]] = None) -> int:
"-m",
"pip",
"install",
"--upgrade",
"-r",
str(req)
],

View File

@@ -73,7 +73,8 @@ def find_requirements(root: Path) -> Optional[Path]:
def install_requirements(
venv_py: Path,
req_path: Path,
reinstall: bool = False
reinstall: bool = False,
upgrade: bool = False
) -> bool:
try:
# Suppression flag for Windows
@@ -81,7 +82,7 @@ def install_requirements(
if os.name == "nt":
kwargs["creationflags"] = 0x08000000
print(f"Installing {req_path} into venv ({venv_py})...")
print(f"Installing/Updating {req_path} into venv ({venv_py})...")
subprocess.run(
[str(venv_py),
"-m",
@@ -93,6 +94,8 @@ def install_requirements(
**kwargs
)
install_cmd = [str(venv_py), "-m", "pip", "install", "-r", str(req_path)]
if upgrade:
install_cmd = [str(venv_py), "-m", "pip", "install", "--upgrade", "-r", str(req_path)]
if reinstall:
install_cmd = [
str(venv_py),
@@ -913,6 +916,11 @@ def main(argv: Optional[List[str]] = None) -> int:
action="store_true",
help="Run 'git pull' before starting the client",
)
p.add_argument(
"--update-deps",
action="store_true",
help="Update python dependencies to latest compatible versions on startup",
)
p.add_argument(
"--gui",
action="store_true",
@@ -1105,12 +1113,29 @@ def main(argv: Optional[List[str]] = None) -> int:
cwd = Path(args.cwd).resolve() if args.cwd else repo_root
# Optionally install dependencies
if args.install_deps or args.reinstall:
# Automatically update dependencies if we pulled new code or if forced via env/flag
should_update = args.update_deps or os.environ.get("MM_UPDATE_DEPS") == "1"
# Check config.conf for auto_update_deps
config_path = repo_root / "config.conf"
if not should_update and config_path.exists():
try:
with open(config_path, "r", encoding="utf-8") as f:
content = f.read()
if "auto_update_deps=true" in content.lower().replace(" ", ""):
should_update = True
except Exception:
pass
if not should_update and args.pull and not (args.install_service or args.uninstall_service):
should_update = True
if args.install_deps or args.reinstall or should_update:
req = find_requirements(repo_root)
if not req:
print("No requirements.txt found; skipping install")
else:
ok = install_requirements(venv_py, req, reinstall=args.reinstall)
ok = install_requirements(venv_py, req, reinstall=args.reinstall, upgrade=should_update)
if not ok:
print("Dependency installation failed; aborting")
return 4
@@ -1124,7 +1149,7 @@ def main(argv: Optional[List[str]] = None) -> int:
)
# If not installing but user asked to verify, do verification only
if args.verify and not (args.install_deps or args.reinstall):
if args.verify and not (args.install_deps or args.reinstall or should_update):
req = find_requirements(repo_root)
if req:
pkgs = parse_requirements_file(req)