This commit is contained in:
2026-03-19 13:08:15 -07:00
parent 5cbc2c09df
commit cc9c1850a8
7 changed files with 340 additions and 55 deletions

123
CLI.py
View File

@@ -114,6 +114,68 @@ from SYS.cli_parsing import SelectionSyntax, SelectionFilterSyntax, MedeiaLexer
def _notify_mpv_osd(text: str, *, duration_ms: int = 3500) -> bool:
message = str(text or "").strip()
if not message:
return False
try:
from MPV.mpv_ipc import MPVIPCClient, get_ipc_pipe_path
client = MPVIPCClient(
socket_path=get_ipc_pipe_path(),
timeout=0.75,
silent=True,
)
try:
response = client.send_command({
"command": [
"show-text",
message,
max(0, int(duration_ms)),
]
})
finally:
try:
client.disconnect()
except Exception:
pass
return bool(response and response.get("error") == "success")
except Exception as exc:
debug(f"mpv notify failed: {exc}")
return False
def _notify_mpv_completion(metadata: Dict[str, Any], execution_result: Dict[str, Any]) -> bool:
notify = metadata.get("mpv_notify") if isinstance(metadata, dict) else None
if not isinstance(notify, dict):
return False
success = bool(execution_result.get("success"))
error_text = str(execution_result.get("error") or "").strip()
if success:
message = str(notify.get("success_text") or "").strip()
else:
failure_prefix = str(notify.get("failure_text") or "").strip()
message = failure_prefix
if error_text:
if message:
message = f"{message}: {error_text}"
else:
message = error_text
if not message:
return False
try:
duration_ms = int(notify.get("duration_ms") or 3500)
except Exception:
duration_ms = 3500
return _notify_mpv_osd(message, duration_ms=duration_ms)
class _OldWorkerStages: class _OldWorkerStages:
"""Factory methods for stage/pipeline worker sessions.""" """Factory methods for stage/pipeline worker sessions."""
@@ -819,6 +881,14 @@ class CmdletExecutor:
if not cmd_fn: if not cmd_fn:
print(f"Unknown command: {cmd_name}\n") print(f"Unknown command: {cmd_name}\n")
try:
ctx.set_last_execution_result(
status="failed",
error=f"Unknown command: {cmd_name}",
command_text=" ".join([cmd_name, *args]).strip() or cmd_name,
)
except Exception:
pass
return return
config = self._config_loader.load() config = self._config_loader.load()
@@ -1289,6 +1359,14 @@ class CmdletExecutor:
already_rendered = False already_rendered = False
if already_rendered: if already_rendered:
try:
ctx.set_last_execution_result(
status=stage_status,
error=stage_error,
command_text=" ".join([cmd_name, *filtered_args]).strip() or cmd_name,
)
except Exception:
pass
return return
if progress_ui is not None: if progress_ui is not None:
@@ -1351,6 +1429,15 @@ class CmdletExecutor:
pass pass
# Do not keep stage tables around after a single command; it can cause # Do not keep stage tables around after a single command; it can cause
# later @ selections to bind to stale tables (e.g. old add-file scans). # later @ selections to bind to stale tables (e.g. old add-file scans).
try:
if hasattr(ctx, "set_last_execution_result"):
ctx.set_last_execution_result(
status=stage_status,
error=stage_error,
command_text=" ".join([cmd_name, *filtered_args]).strip() or cmd_name,
)
except Exception:
pass
try: try:
if hasattr(ctx, "set_current_stage_table"): if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None) ctx.set_current_stage_table(None)
@@ -2268,6 +2355,11 @@ Come to love it when others take what you share, as there is no greater joy
continue continue
pipeline_ctx_ref = None pipeline_ctx_ref = None
queued_metadata = (
queued_payload.get("metadata")
if isinstance(queued_payload, dict) and isinstance(queued_payload.get("metadata"), dict)
else None
)
try: try:
from SYS import pipeline as ctx from SYS import pipeline as ctx
@@ -2276,11 +2368,24 @@ Come to love it when others take what you share, as there is no greater joy
except Exception: except Exception:
pipeline_ctx_ref = None pipeline_ctx_ref = None
execution_result: Dict[str, Any] = {
"status": "completed",
"success": True,
"error": "",
"command_text": user_input,
}
try: try:
from SYS.cli_syntax import validate_pipeline_text from SYS.cli_syntax import validate_pipeline_text
syntax_error = validate_pipeline_text(user_input) syntax_error = validate_pipeline_text(user_input)
if syntax_error: if syntax_error:
execution_result = {
"status": "failed",
"success": False,
"error": str(syntax_error.message or "syntax error"),
"command_text": user_input,
}
print(syntax_error.message, file=sys.stderr) print(syntax_error.message, file=sys.stderr)
continue continue
except Exception: except Exception:
@@ -2289,6 +2394,12 @@ Come to love it when others take what you share, as there is no greater joy
try: try:
tokens = shlex.split(user_input) tokens = shlex.split(user_input)
except ValueError as exc: except ValueError as exc:
execution_result = {
"status": "failed",
"success": False,
"error": str(exc),
"command_text": user_input,
}
print(f"Syntax error: {exc}", file=sys.stderr) print(f"Syntax error: {exc}", file=sys.stderr)
continue continue
@@ -2434,6 +2545,18 @@ Come to love it when others take what you share, as there is no greater joy
else: else:
self._cmdlet_executor.execute(cmd_name, tokens[1:]) self._cmdlet_executor.execute(cmd_name, tokens[1:])
finally: finally:
if pipeline_ctx_ref and hasattr(pipeline_ctx_ref, "get_last_execution_result"):
try:
latest = pipeline_ctx_ref.get_last_execution_result()
if isinstance(latest, dict) and latest:
execution_result = latest
except Exception:
pass
if queued_metadata:
try:
_notify_mpv_completion(queued_metadata, execution_result)
except Exception:
pass
if pipeline_ctx_ref: if pipeline_ctx_ref:
pipeline_ctx_ref.clear_current_command_text() pipeline_ctx_ref.clear_current_command_text()

View File

@@ -4,7 +4,7 @@ local msg = require 'mp.msg'
local M = {} local M = {}
local MEDEIA_LUA_VERSION = '2026-03-19.1' local MEDEIA_LUA_VERSION = '2026-03-19.2'
-- Expose a tiny breadcrumb for debugging which script version is loaded. -- Expose a tiny breadcrumb for debugging which script version is loaded.
pcall(mp.set_property, 'user-data/medeia-lua-version', MEDEIA_LUA_VERSION) pcall(mp.set_property, 'user-data/medeia-lua-version', MEDEIA_LUA_VERSION)
@@ -1184,6 +1184,53 @@ local function _extract_query_param(url, key)
return nil return nil
end end
local function _download_url_for_current_item(url)
url = trim(tostring(url or ''))
if url == '' then
return '', false
end
local base, query = url:match('^([^?]+)%?(.*)$')
if not base or not query or query == '' then
return url, false
end
local base_lower = tostring(base or ''):lower()
local has_explicit_video = false
if base_lower:match('youtu%.be/') then
has_explicit_video = true
elseif base_lower:match('youtube%.com/watch') or base_lower:match('youtube%-nocookie%.com/watch') then
has_explicit_video = _extract_query_param(url, 'v') ~= nil
end
if not has_explicit_video then
return url, false
end
local kept = {}
local changed = false
for pair in query:gmatch('[^&]+') do
local raw_key = pair:match('^([^=]+)') or pair
local key = tostring(_percent_decode(raw_key) or raw_key or ''):lower()
local keep = true
if key == 'list' or key == 'index' or key == 'start_radio' or key == 'pp' or key == 'si' then
keep = false
changed = true
end
if keep then
kept[#kept + 1] = pair
end
end
if not changed then
return url, false
end
if #kept > 0 then
return base .. '?' .. table.concat(kept, '&'), true
end
return base, true
end
local function _normalize_url_for_store_lookup(url) local function _normalize_url_for_store_lookup(url)
url = trim(tostring(url or '')) url = trim(tostring(url or ''))
if url == '' then if url == '' then
@@ -1522,14 +1569,78 @@ local function _normalize_tag_list(value)
return tags return tags
end end
local function _queue_pipeline_in_repl(pipeline_cmd, queued_message, failure_prefix, queue_label) local function _write_repl_queue_file_local(command_text, source_text, metadata)
command_text = trim(tostring(command_text or ''))
if command_text == '' then
return nil, 'empty pipeline command'
end
local repo_root = _detect_repo_root()
if repo_root == '' then
return nil, 'repo root not found'
end
local log_dir = utils.join_path(repo_root, 'Log')
if not _path_exists(log_dir) then
return nil, 'Log directory not found'
end
local stamp = tostring(math.floor(mp.get_time() * 1000))
local token = tostring(math.random(100000, 999999))
local path = utils.join_path(log_dir, 'medeia-repl-queue-' .. stamp .. '-' .. token .. '.json')
local payload = {
id = stamp .. '-' .. token,
command = command_text,
source = trim(tostring(source_text or 'external')),
created_at = os.time(),
}
if type(metadata) == 'table' and next(metadata) ~= nil then
payload.metadata = metadata
end
local encoded = utils.format_json(payload)
if type(encoded) ~= 'string' or encoded == '' then
return nil, 'failed to encode queue payload'
end
local fh = io.open(path, 'w')
if not fh then
return nil, 'failed to open queue file'
end
fh:write(encoded)
fh:close()
return path, nil
end
local function _queue_pipeline_in_repl(pipeline_cmd, queued_message, failure_prefix, queue_label, metadata)
pipeline_cmd = trim(tostring(pipeline_cmd or '')) pipeline_cmd = trim(tostring(pipeline_cmd or ''))
if pipeline_cmd == '' then if pipeline_cmd == '' then
mp.osd_message((failure_prefix or 'REPL queue failed') .. ': empty pipeline command', 5) mp.osd_message((failure_prefix or 'REPL queue failed') .. ': empty pipeline command', 5)
return false return false
end end
local queue_metadata = { kind = 'mpv-download' }
if type(metadata) == 'table' then
for key, value in pairs(metadata) do
queue_metadata[key] = value
end
end
_lua_log(queue_label .. ': queueing repl cmd=' .. pipeline_cmd) _lua_log(queue_label .. ': queueing repl cmd=' .. pipeline_cmd)
do
local queue_path, queue_err = _write_repl_queue_file_local(
pipeline_cmd,
queue_label,
queue_metadata
)
if queue_path then
_lua_log(queue_label .. ': queued repl command locally path=' .. tostring(queue_path))
mp.osd_message(tostring(queued_message or 'Queued in REPL'), 3)
return true
end
_lua_log(queue_label .. ': local queue write failed err=' .. tostring(queue_err or 'unknown') .. '; falling back to helper')
end
ensure_mpv_ipc_server() ensure_mpv_ipc_server()
if not ensure_pipeline_helper_running() then if not ensure_pipeline_helper_running() then
mp.osd_message((failure_prefix or 'REPL queue failed') .. ': helper not running', 5) mp.osd_message((failure_prefix or 'REPL queue failed') .. ': helper not running', 5)
@@ -1542,7 +1653,7 @@ local function _queue_pipeline_in_repl(pipeline_cmd, queued_message, failure_pre
data = { data = {
command = pipeline_cmd, command = pipeline_cmd,
source = queue_label, source = queue_label,
metadata = { kind = 'mpv-download' }, metadata = queue_metadata,
}, },
}, },
4.0, 4.0,
@@ -1553,6 +1664,14 @@ local function _queue_pipeline_in_repl(pipeline_cmd, queued_message, failure_pre
mp.osd_message(tostring(queued_message or 'Queued in REPL'), 3) mp.osd_message(tostring(queued_message or 'Queued in REPL'), 3)
return return
end end
local err_text = tostring(err or '')
if err_text:find('timeout waiting response', 1, true) ~= nil then
_lua_log(queue_label .. ': queue ack timeout; assuming repl command queued')
mp.osd_message(tostring(queued_message or 'Queued in REPL'), 3)
return
end
local detail = tostring(err or (resp and resp.error) or 'unknown') local detail = tostring(err or (resp and resp.error) or 'unknown')
_lua_log(queue_label .. ': queue failed err=' .. detail) _lua_log(queue_label .. ': queue failed err=' .. detail)
mp.osd_message((failure_prefix or 'REPL queue failed') .. ': ' .. detail, 5) mp.osd_message((failure_prefix or 'REPL queue failed') .. ': ' .. detail, 5)
@@ -3764,13 +3883,25 @@ local function _start_download_flow_for_current()
pipeline_cmd, pipeline_cmd,
'Queued in REPL: store copy', 'Queued in REPL: store copy',
'REPL queue failed', 'REPL queue failed',
'download-store-copy' 'download-store-copy',
{
mpv_notify = {
success_text = 'Copy completed: store ' .. tostring(store_hash.store),
failure_text = 'Copy failed: store ' .. tostring(store_hash.store),
duration_ms = 3500,
},
}
) )
return return
end end
-- Non-store URL flow: use the current yt-dlp-selected format and ask for save location. -- Non-store URL flow: use the current yt-dlp-selected format and ask for save location.
local url = tostring(target) local url = tostring(target)
local download_url, stripped_playlist = _download_url_for_current_item(url)
if stripped_playlist then
_lua_log('download: stripped hidden playlist params from current url -> ' .. tostring(download_url))
url = tostring(download_url)
end
local fmt = _current_ytdl_format_string() local fmt = _current_ytdl_format_string()
if not fmt or fmt == '' then if not fmt or fmt == '' then
@@ -3966,7 +4097,14 @@ mp.register_script_message('medios-download-pick-store', function(json)
pipeline_cmd, pipeline_cmd,
'Queued in REPL: save to store ' .. store, 'Queued in REPL: save to store ' .. store,
'REPL queue failed', 'REPL queue failed',
'download-store-save' 'download-store-save',
{
mpv_notify = {
success_text = 'Download completed: store ' .. store .. ' [' .. fmt .. ']',
failure_text = 'Download failed: store ' .. store .. ' [' .. fmt .. ']',
duration_ms = 3500,
},
}
) )
_pending_download = nil _pending_download = nil
end) end)
@@ -3996,7 +4134,14 @@ mp.register_script_message('medios-download-pick-path', function()
pipeline_cmd, pipeline_cmd,
'Queued in REPL: save to folder', 'Queued in REPL: save to folder',
'REPL queue failed', 'REPL queue failed',
'download-folder-save' 'download-folder-save',
{
mpv_notify = {
success_text = 'Download completed: folder [' .. fmt .. ']',
failure_text = 'Download failed: folder [' .. fmt .. ']',
duration_ms = 3500,
},
}
) )
_pending_download = nil _pending_download = nil
end) end)

View File

@@ -1,2 +1,2 @@
# Medeia MPV script options # Medeia MPV script options
store=rpi store=local

View File

@@ -4,6 +4,7 @@ Pipeline execution context and state management for cmdlet.
from __future__ import annotations from __future__ import annotations
import sys import sys
import time
from contextlib import contextmanager from contextlib import contextmanager
from dataclasses import dataclass, field from dataclasses import dataclass, field
from contextvars import ContextVar from contextvars import ContextVar
@@ -102,6 +103,7 @@ class PipelineState:
ui_library_refresh_callback: Optional[Any] = None ui_library_refresh_callback: Optional[Any] = None
pipeline_stop: Optional[Dict[str, Any]] = None pipeline_stop: Optional[Dict[str, Any]] = None
live_progress: Any = None live_progress: Any = None
last_execution_result: Dict[str, Any] = field(default_factory=dict)
def reset(self) -> None: def reset(self) -> None:
self.current_context = None self.current_context = None
@@ -127,6 +129,7 @@ class PipelineState:
self.ui_library_refresh_callback = None self.ui_library_refresh_callback = None
self.pipeline_stop = None self.pipeline_stop = None
self.live_progress = None self.live_progress = None
self.last_execution_result = {}
# ContextVar for per-run state (prototype) # ContextVar for per-run state (prototype)
@@ -315,6 +318,29 @@ def load_value(key: str, default: Any = None) -> Any:
return current return current
def set_last_execution_result(
*,
status: str,
error: str = "",
command_text: str = "",
) -> None:
state = _get_pipeline_state()
text_status = str(status or "").strip().lower() or "unknown"
state.last_execution_result = {
"status": text_status,
"success": text_status == "completed",
"error": str(error or "").strip(),
"command_text": str(command_text or "").strip(),
"finished_at": time.time(),
}
def get_last_execution_result() -> Dict[str, Any]:
state = _get_pipeline_state()
payload = state.last_execution_result
return dict(payload) if isinstance(payload, dict) else {}
def set_pending_pipeline_tail( def set_pending_pipeline_tail(
stages: Optional[Sequence[Sequence[str]]], stages: Optional[Sequence[Sequence[str]]],
source_command: Optional[str] = None source_command: Optional[str] = None
@@ -3042,3 +3068,11 @@ class PipelineExecutor:
f"Pipeline {pipeline_status}: {pipeline_error or ''}") f"Pipeline {pipeline_status}: {pipeline_error or ''}")
except Exception: except Exception:
logger.exception("Failed to log final pipeline status (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None)) logger.exception("Failed to log final pipeline status (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
try:
set_last_execution_result(
status=pipeline_status,
error=pipeline_error,
command_text=pipeline_text,
)
except Exception:
logger.exception("Failed to record last execution result for pipeline")

View File

@@ -11,6 +11,13 @@ def repl_queue_dir(root: Path) -> Path:
return Path(root) / "Log" / "repl_queue" return Path(root) / "Log" / "repl_queue"
def _legacy_repl_queue_glob(root: Path) -> list[Path]:
log_dir = Path(root) / "Log"
if not log_dir.exists():
return []
return list(log_dir.glob("medeia-repl-queue-*.json"))
def enqueue_repl_command( def enqueue_repl_command(
root: Path, root: Path,
command: str, command: str,
@@ -41,11 +48,24 @@ def enqueue_repl_command(
def pop_repl_commands(root: Path, *, limit: int = 8) -> List[Dict[str, Any]]: def pop_repl_commands(root: Path, *, limit: int = 8) -> List[Dict[str, Any]]:
queue_dir = repl_queue_dir(root) queue_dir = repl_queue_dir(root)
if not queue_dir.exists(): legacy_entries = _legacy_repl_queue_glob(root)
if not queue_dir.exists() and not legacy_entries:
return [] return []
items: List[Dict[str, Any]] = [] items: List[Dict[str, Any]] = []
for entry in sorted(queue_dir.glob("*.json"))[: max(1, int(limit or 1))]: entries: List[Path] = []
if queue_dir.exists():
entries.extend(queue_dir.glob("*.json"))
entries.extend(legacy_entries)
def _sort_key(path: Path) -> tuple[float, str]:
try:
ts = float(path.stat().st_mtime)
except Exception:
ts = 0.0
return (ts, path.name)
for entry in sorted(entries, key=_sort_key)[: max(1, int(limit or 1))]:
try: try:
payload = json.loads(entry.read_text(encoding="utf-8")) payload = json.loads(entry.read_text(encoding="utf-8"))
except Exception: except Exception:

View File

@@ -1552,18 +1552,6 @@ class Download_File(Cmdlet):
# If no format explicitly chosen, we might want to check available formats # If no format explicitly chosen, we might want to check available formats
# and maybe show a table if multiple are available? # and maybe show a table if multiple are available?
if (
actual_format
and isinstance(actual_format, str)
and mode == "audio"
and "/" not in actual_format
and "+" not in actual_format
and not forced_single_applied
and actual_format not in {"best", "bestaudio", "bw", "ba"}
):
debug(f"Appending fallback to specific audio format: {actual_format} -> {actual_format}/bestaudio")
actual_format = f"{actual_format}/bestaudio"
if ( if (
actual_format actual_format
and isinstance(actual_format, str) and isinstance(actual_format, str)

View File

@@ -776,12 +776,14 @@ class YtDlpTool:
if not format_str or not isinstance(format_str, str): if not format_str or not isinstance(format_str, str):
return None return None
s = format_str.strip().lower() raw = format_str.strip()
s = raw.lower()
if not s: if not s:
return None return None
# Strip trailing 'p' if present (e.g. 720p -> 720) # Strip trailing 'p' if present (e.g. 720p -> 720)
if s.endswith('p'): explicit_height = s.endswith('p')
if explicit_height:
s = s[:-1] s = s[:-1]
# Heuristic: 240/360/480/720/1080/1440/2160 are common height inputs # Heuristic: 240/360/480/720/1080/1440/2160 are common height inputs
@@ -802,13 +804,9 @@ class YtDlpTool:
# Format 480 ... none in common lists. # Format 480 ... none in common lists.
# Format 720 ... none. # Format 720 ... none.
# So if it looks like a standard resolution, treat as height constraint. # So if it looks like a standard resolution, treat as height constraint.
common_heights = {144, 240, 360, 480, 540, 720, 1080, 1440, 2160, 2880, 4320}
if val in {144, 240, 360, 480, 540, 720, 1080, 1440, 2160, 2880, 4320}: if explicit_height or val in common_heights:
return f"bestvideo[height<={val}]+bestaudio/best[height<={val}]" return f"bestvideo[height<={val}]+bestaudio/best[height<={val}]"
# If user types something like 500, we can also treat as height constraint if > 100
if val >= 100 and val not in {133, 134, 135, 136, 137, 160, 242, 243, 244, 247, 248, 278, 394, 395, 396, 397, 398, 399}:
return f"bestvideo[height<={val}]+bestaudio/best[height<={val}]"
return None return None
@@ -981,7 +979,7 @@ class YtDlpTool:
# Add browser cookies support "just in case" if no file found (best effort) # Add browser cookies support "just in case" if no file found (best effort)
_add_browser_cookies_if_available(base_options) _add_browser_cookies_if_available(base_options)
# YouTube hardening: prefer browser cookies + mobile/web clients when available # YouTube hardening: prefer browser cookies when available.
try: try:
netloc = urlparse(opts.url).netloc.lower() netloc = urlparse(opts.url).netloc.lower()
except Exception: except Exception:
@@ -999,18 +997,6 @@ class YtDlpTool:
base_options.pop("cookiefile", None) base_options.pop("cookiefile", None)
debug("[ytdlp] Using browser cookies for YouTube; ignoring cookiefile") debug("[ytdlp] Using browser cookies for YouTube; ignoring cookiefile")
extractor_args = base_options.get("extractor_args")
if not isinstance(extractor_args, dict):
extractor_args = {}
youtube_args = extractor_args.get("youtube")
if not isinstance(youtube_args, dict):
youtube_args = {}
if "player_client" not in youtube_args:
youtube_args["player_client"] = ["android", "web"]
debug("[ytdlp] Using YouTube player_client override: android, web")
extractor_args["youtube"] = youtube_args
base_options["extractor_args"] = extractor_args
# Special handling for format keywords explicitly passed in via options # Special handling for format keywords explicitly passed in via options
if opts.ytdl_format == "audio": if opts.ytdl_format == "audio":
try: try:
@@ -1988,23 +1974,12 @@ def download_media(opts: DownloadOptions, *, config: Optional[Dict[str, Any]] =
retry_attempted = True retry_attempted = True
try: try:
if not opts.quiet: if not opts.quiet:
debug("yt-dlp hit HTTP 403; retrying with browser cookies + android/web player client") debug("yt-dlp hit HTTP 403; retrying with browser cookies")
fallback_options = dict(ytdl_options) fallback_options = dict(ytdl_options)
fallback_options.pop("cookiefile", None) fallback_options.pop("cookiefile", None)
_add_browser_cookies_if_available(fallback_options) _add_browser_cookies_if_available(fallback_options)
extractor_args = fallback_options.get("extractor_args")
if not isinstance(extractor_args, dict):
extractor_args = {}
youtube_args = extractor_args.get("youtube")
if not isinstance(youtube_args, dict):
youtube_args = {}
if "player_client" not in youtube_args:
youtube_args["player_client"] = ["android", "web"]
extractor_args["youtube"] = youtube_args
fallback_options["extractor_args"] = extractor_args
debug( debug(
"[ytdlp] retry options: " "[ytdlp] retry options: "
f"cookiefile={fallback_options.get('cookiefile')}, cookiesfrombrowser={fallback_options.get('cookiesfrombrowser')}, " f"cookiefile={fallback_options.get('cookiefile')}, cookiesfrombrowser={fallback_options.get('cookiesfrombrowser')}, "