diff --git a/.gitignore b/.gitignore index e7068ba..258aedc 100644 --- a/.gitignore +++ b/.gitignore @@ -220,3 +220,5 @@ luac.out config.conf config.d/ +MPV/ffmpeg/* +MPV/portable_config/* \ No newline at end of file diff --git a/CLI.py b/CLI.py index 3835bfa..73a52ae 100644 --- a/CLI.py +++ b/CLI.py @@ -549,6 +549,20 @@ def _get_cmdlet_args(cmd_name: str) -> List[str]: return [] +def get_store_choices() -> List[str]: + """Return configured store backend names. + + This is the same list used for REPL/Typer autocomplete for `-store`. + """ + try: + from Store import Store + storage = Store(_load_cli_config(), suppress_debug=True) + backends = storage.list_backends() + return list(backends or []) + except Exception: + return [] + + def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]: """Get list of valid choices for a specific cmdlet argument.""" try: @@ -558,14 +572,9 @@ def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]: # Dynamic storage backends: use current config to enumerate available storages # Support both "storage" and "store" argument names if normalized_arg in ("storage", "store"): - try: - from Store import Store - storage = Store(_load_cli_config(), suppress_debug=True) - backends = storage.list_backends() - if backends: - return backends - except Exception: - pass + backends = get_store_choices() + if backends: + return backends # Dynamic search providers if normalized_arg == "provider": diff --git a/medeia_macina/__init__.py b/Log/medeia_macina/__init__.py similarity index 100% rename from medeia_macina/__init__.py rename to Log/medeia_macina/__init__.py diff --git a/medeia_macina/cli_entry.py b/Log/medeia_macina/cli_entry.py similarity index 100% rename from medeia_macina/cli_entry.py rename to Log/medeia_macina/cli_entry.py diff --git a/MPV/LUA/main.lua b/MPV/LUA/main.lua index 8d1f3fc..d43aa17 100644 --- a/MPV/LUA/main.lua +++ b/MPV/LUA/main.lua @@ -4,17 +4,198 @@ local msg = require 'mp.msg' local M = {} +local MEDEIA_LUA_VERSION = '2025-12-18' + +-- Track whether uosc is available so menu calls don't fail with +-- "Can't find script 'uosc' to send message to." +local _uosc_loaded = false + +mp.register_script_message('uosc-version', function(_ver) + _uosc_loaded = true +end) + +local function _is_script_loaded(name) + local ok, list = pcall(mp.get_property_native, 'script-list') + if not ok or type(list) ~= 'table' then + return false + end + for _, s in ipairs(list) do + if type(s) == 'table' then + local n = s.name or '' + if n == name or tostring(n):match('^' .. name .. '%d*$') then + return true + end + elseif type(s) == 'string' then + local n = s + if n == name or tostring(n):match('^' .. name .. '%d*$') then + return true + end + end + end + return false +end + local LOAD_URL_MENU_TYPE = 'medios_load_url' +local DOWNLOAD_FORMAT_MENU_TYPE = 'medios_download_pick_format' +local DOWNLOAD_STORE_MENU_TYPE = 'medios_download_pick_store' + local PIPELINE_REQ_PROP = 'user-data/medeia-pipeline-request' local PIPELINE_RESP_PROP = 'user-data/medeia-pipeline-response' local PIPELINE_READY_PROP = 'user-data/medeia-pipeline-ready' +-- Dedicated Lua log (next to mpv log-file) because mp.msg output is not always +-- included in --log-file depending on msg-level and build. +local function _lua_log(text) + local payload = (text and tostring(text) or '') + if payload == '' then + return + end + local dir = '' + + -- Prefer repo-root Log/ for consistency with Python helper logs. + do + local function find_up(start_dir, relative_path, max_levels) + local d = start_dir + local levels = max_levels or 6 + for _ = 0, levels do + if d and d ~= '' then + local candidate = d .. '/' .. relative_path + if utils.file_info(candidate) then + return candidate + end + end + local parent = d and d:match('(.*)[/\\]') or nil + if not parent or parent == d or parent == '' then + break + end + d = parent + end + return nil + end + + local base = mp.get_script_directory() or utils.getcwd() or '' + if base ~= '' then + local cli = find_up(base, 'CLI.py', 8) + if cli and cli ~= '' then + local root = cli:match('(.*)[/\\]') or '' + if root ~= '' then + dir = utils.join_path(root, 'Log') + end + end + end + end + + -- Fallback: next to mpv --log-file. + if dir == '' then + local log_file = mp.get_property('options/log-file') or '' + dir = log_file:match('(.*)[/\\]') or '' + end + if dir == '' then + dir = mp.get_script_directory() or utils.getcwd() or '' + end + if dir == '' then + return + end + local path = utils.join_path(dir, 'medeia-mpv-lua.log') + local fh = io.open(path, 'a') + if not fh then + return + end + local line = '[' .. os.date('%Y-%m-%d %H:%M:%S') .. '] ' .. payload + fh:write(line .. '\n') + fh:close() + + -- Also mirror Lua-side debug into the Python helper log file so there's one + -- place to look when diagnosing mpv↔python IPC issues. + do + local helper_path = utils.join_path(dir, 'medeia-mpv-helper.log') + local fh2 = io.open(helper_path, 'a') + if fh2 then + fh2:write('[lua] ' .. line .. '\n') + fh2:close() + end + end +end + +local function ensure_uosc_loaded() + if _uosc_loaded or _is_script_loaded('uosc') then + _uosc_loaded = true + return true + end + + local entry = nil + pcall(function() + entry = mp.find_config_file('scripts/uosc.lua') + end) + if not entry or entry == '' then + _lua_log('uosc entry not found at scripts/uosc.lua under config-dir') + return false + end + + local ok = pcall(mp.commandv, 'load-script', entry) + if ok then + _lua_log('Loaded uosc from: ' .. tostring(entry)) + else + _lua_log('Failed to load uosc from: ' .. tostring(entry)) + end + + -- uosc will broadcast uosc-version on load; also re-check script-list if available. + if _is_script_loaded('uosc') then + _uosc_loaded = true + return true + end + return _uosc_loaded +end + local function write_temp_log(prefix, text) if not text or text == '' then return nil end - local dir = os.getenv('TEMP') or os.getenv('TMP') or utils.getcwd() or '' + + local dir = '' + -- Prefer repo-root Log/ for easier discovery. + do + local function find_up(start_dir, relative_path, max_levels) + local d = start_dir + local levels = max_levels or 6 + for _ = 0, levels do + if d and d ~= '' then + local candidate = d .. '/' .. relative_path + if utils.file_info(candidate) then + return candidate + end + end + local parent = d and d:match('(.*)[/\\]') or nil + if not parent or parent == d or parent == '' then + break + end + d = parent + end + return nil + end + + local base = mp.get_script_directory() or utils.getcwd() or '' + if base ~= '' then + local cli = find_up(base, 'CLI.py', 6) + if cli and cli ~= '' then + local parent = cli:match('(.*)[/\\]') or '' + if parent ~= '' then + dir = utils.join_path(parent, 'Log') + -- Best-effort create dir. + local sep = package and package.config and package.config:sub(1, 1) or '/' + if sep == '\\' then + pcall(utils.subprocess, { args = { 'cmd.exe', '/c', 'mkdir "' .. dir .. '" 1>nul 2>nul' } }) + else + pcall(utils.subprocess, { args = { 'sh', '-lc', 'mkdir -p ' .. string.format('%q', dir) .. ' >/dev/null 2>&1' } }) + end + end + end + end + end + if dir == '' then + dir = os.getenv('TEMP') or os.getenv('TMP') or utils.getcwd() or '' + end if dir == '' then return nil end @@ -83,8 +264,38 @@ local function find_file_upwards(start_dir, relative_path, max_levels) return nil end +local _cached_store_names = {} +local _store_cache_loaded = false + local _pipeline_helper_started = false +local function _is_pipeline_helper_ready() + local ready = mp.get_property_native(PIPELINE_READY_PROP) + if not ready then + return false + end + local s = tostring(ready) + if s == '' or s == '0' then + return false + end + + -- Back-compat: older helpers may set "1". New helpers set unix timestamps. + local n = tonumber(s) + if n and n > 1000000000 then + local now = (os and os.time) and os.time() or nil + if not now then + return true + end + local age = now - n + if age < 0 then + age = 0 + end + return age <= 10 + end + + return true +end + local function get_mpv_ipc_path() local ipc = mp.get_property('input-ipc-server') if ipc and ipc ~= '' then @@ -98,29 +309,829 @@ local function get_mpv_ipc_path() return '/tmp/mpv-medeia-macina.sock' end -local function ensure_pipeline_helper_running() - local ready = mp.get_property_native(PIPELINE_READY_PROP) - if ready then - return true - end - if _pipeline_helper_started then +local function ensure_mpv_ipc_server() + -- `.pipe -play` (Python) controls MPV via JSON IPC. If mpv was started + -- without --input-ipc-server, make sure we set one so the running instance + -- can be controlled (instead of Python spawning a separate mpv). + local ipc = mp.get_property('input-ipc-server') + if ipc and ipc ~= '' then return true end - local base_dir = mp.get_script_directory() or "" - if base_dir == "" then - base_dir = utils.getcwd() or "" + local desired = get_mpv_ipc_path() + if not desired or desired == '' then + return false end - local helper_path = find_file_upwards(base_dir, 'MPV/pipeline_helper.py', 6) + + local ok = pcall(mp.set_property, 'input-ipc-server', desired) + if not ok then + return false + end + local now = mp.get_property('input-ipc-server') + return (now and now ~= '') and true or false +end + +local function quote_pipeline_arg(s) + -- Ensure URLs with special characters (e.g. &, #) survive pipeline parsing. + s = tostring(s or '') + s = s:gsub('\\', '\\\\'):gsub('"', '\\"') + return '"' .. s .. '"' +end + +local function _is_windows() + local sep = package and package.config and package.config:sub(1, 1) or '/' + return sep == '\\' +end + +local function _extract_target_from_memory_uri(text) + if type(text) ~= 'string' then + return nil + end + if not text:match('^memory://') then + return nil + end + for line in text:gmatch('[^\r\n]+') do + line = trim(line) + if line ~= '' and not line:match('^#') and not line:match('^memory://') then + return line + end + end + return nil +end + +local function _percent_decode(s) + if type(s) ~= 'string' then + return s + end + return (s:gsub('%%(%x%x)', function(hex) + return string.char(tonumber(hex, 16)) + end)) +end + +local function _extract_query_param(url, key) + if type(url) ~= 'string' then + return nil + end + key = tostring(key or '') + if key == '' then + return nil + end + local pattern = '[?&]' .. key:gsub('([^%w])', '%%%1') .. '=([^&#]+)' + local v = url:match(pattern) + if v then + return _percent_decode(v) + end + return nil +end + +local function _current_target() + local path = mp.get_property('path') + if not path or path == '' then + return nil + end + local mem = _extract_target_from_memory_uri(path) + if mem and mem ~= '' then + return mem + end + return path +end + +local function _extract_store_hash(target) + if type(target) ~= 'string' or target == '' then + return nil + end + local hash = _extract_query_param(target, 'hash') + local store = _extract_query_param(target, 'store') + if hash and store then + local h = tostring(hash):lower() + if h:match('^[0-9a-f]+$') and #h == 64 then + return { store = tostring(store), hash = h } + end + end + return nil +end + +local function _pick_folder_windows() + -- Native folder picker via PowerShell + WinForms. + local ps = [[Add-Type -AssemblyName System.Windows.Forms; $d = New-Object System.Windows.Forms.FolderBrowserDialog; $d.Description = 'Select download folder'; $d.ShowNewFolderButton = $true; if ($d.ShowDialog() -eq [System.Windows.Forms.DialogResult]::OK) { $d.SelectedPath }]] + local res = utils.subprocess({ + args = { 'powershell', '-NoProfile', '-STA', '-ExecutionPolicy', 'Bypass', '-Command', ps }, + cancellable = false, + }) + if res and res.status == 0 and res.stdout then + local out = trim(tostring(res.stdout)) + if out ~= '' then + return out + end + end + return nil +end + +-- Forward declaration: used by run_pipeline_via_ipc_response before definition. +local ensure_pipeline_helper_running + +local function _run_helper_request_response(req, timeout_seconds) + if not ensure_pipeline_helper_running() then + _lua_log('ipc: helper not running; cannot execute request') + return nil + end + + do + local deadline = mp.get_time() + 3.0 + while mp.get_time() < deadline do + if _is_pipeline_helper_ready() then + break + end + mp.wait_event(0.05) + end + if not _is_pipeline_helper_ready() then + _lua_log('ipc: helper not ready; ready=' .. tostring(mp.get_property_native(PIPELINE_READY_PROP))) + _pipeline_helper_started = false + return nil + end + end + + if type(req) ~= 'table' then + return nil + end + + local id = tostring(req.id or '') + if id == '' then + id = tostring(math.floor(mp.get_time() * 1000)) .. '-' .. tostring(math.random(100000, 999999)) + req.id = id + end + + local label = '' + if req.op then + label = 'op=' .. tostring(req.op) + elseif req.pipeline then + label = 'cmd=' .. tostring(req.pipeline) + else + label = '(unknown)' + end + _lua_log('ipc: send request id=' .. tostring(id) .. ' ' .. label) + + mp.set_property(PIPELINE_RESP_PROP, '') + mp.set_property(PIPELINE_REQ_PROP, utils.format_json(req)) + + local deadline = mp.get_time() + (timeout_seconds or 5) + while mp.get_time() < deadline do + local resp_json = mp.get_property(PIPELINE_RESP_PROP) + if resp_json and resp_json ~= '' then + local ok, resp = pcall(utils.parse_json, resp_json) + if ok and resp and resp.id == id then + _lua_log('ipc: got response id=' .. tostring(id) .. ' success=' .. tostring(resp.success)) + return resp + end + end + mp.wait_event(0.05) + end + + _lua_log('ipc: timeout waiting response; ' .. label) + _pipeline_helper_started = false + return nil +end + +-- IPC helper: return the whole response object (stdout/stderr/error/table) +local function run_pipeline_via_ipc_response(pipeline_cmd, seeds, timeout_seconds) + local req = { pipeline = pipeline_cmd } + if seeds then + req.seeds = seeds + end + return _run_helper_request_response(req, timeout_seconds) +end + +local function _refresh_store_cache(timeout_seconds) + ensure_mpv_ipc_server() + local resp = _run_helper_request_response({ op = 'store-choices' }, timeout_seconds or 1) + if not resp or not resp.success or type(resp.choices) ~= 'table' then + _lua_log('stores: failed to load store choices via helper; stderr=' .. tostring(resp and resp.stderr or '') .. ' error=' .. tostring(resp and resp.error or '')) + + -- Fallback: directly call Python to import CLI.get_store_choices(). + -- This avoids helper IPC issues and still stays in sync with the REPL. + local python = (opts and opts.python_path) and tostring(opts.python_path) or 'python' + local cli_path = (opts and opts.cli_path) and tostring(opts.cli_path) or nil + if not cli_path or cli_path == '' or not utils.file_info(cli_path) then + local base_dir = mp.get_script_directory() or utils.getcwd() or '' + if base_dir ~= '' then + cli_path = find_file_upwards(base_dir, 'CLI.py', 8) + end + end + + if cli_path and cli_path ~= '' then + local root = tostring(cli_path):match('(.*)[/\\]') or '' + if root ~= '' then + local code = "import json, sys; sys.path.insert(0, r'" .. root .. "'); from CLI import get_store_choices; print(json.dumps(get_store_choices()))" + local res = utils.subprocess({ + args = { python, '-c', code }, + cancellable = false, + }) + if res and res.status == 0 and res.stdout then + local out_text = tostring(res.stdout) + local last_line = '' + for line in out_text:gmatch('[^\r\n]+') do + if trim(line) ~= '' then + last_line = line + end + end + local ok, parsed = pcall(utils.parse_json, last_line ~= '' and last_line or out_text) + if ok and type(parsed) == 'table' then + local out = {} + for _, v in ipairs(parsed) do + local name = trim(tostring(v or '')) + if name ~= '' then + out[#out + 1] = name + end + end + if #out > 0 then + _cached_store_names = out + _store_cache_loaded = true + _lua_log('stores: loaded ' .. tostring(#_cached_store_names) .. ' stores via python fallback') + return true + end + end + else + _lua_log('stores: python fallback failed; status=' .. tostring(res and res.status or 'nil') .. ' stderr=' .. tostring(res and res.stderr or '')) + end + end + end + + return false + end + + local out = {} + for _, v in ipairs(resp.choices) do + local name = trim(tostring(v or '')) + if name ~= '' then + out[#out + 1] = name + end + end + _cached_store_names = out + _store_cache_loaded = true + _lua_log('stores: loaded ' .. tostring(#_cached_store_names) .. ' stores via helper') + return true +end + +local function _uosc_open_list_picker(menu_type, title, items) + local menu_data = { + type = menu_type, + title = title, + items = items or {}, + } + if ensure_uosc_loaded() then + mp.commandv('script-message-to', 'uosc', 'open-menu', utils.format_json(menu_data)) + else + _lua_log('menu: uosc not available; cannot open-menu') + end +end + +-- No-op handler for placeholder menu items. +mp.register_script_message('medios-nop', function() + return +end) + +local _pending_download = nil +local _pending_format_change = nil + +-- Cache yt-dlp format lists per URL so Change Format is instant. +M.file = M.file or {} +M.file.formats_table = nil +M.file.url = nil +local _formats_cache = {} +local _formats_inflight = {} + +local function _is_http_url(u) + if type(u) ~= 'string' then + return false + end + return u:match('^https?://') ~= nil +end + +local function _cache_formats_for_url(url, tbl) + if type(url) ~= 'string' or url == '' then + return + end + if type(tbl) ~= 'table' then + return + end + _formats_cache[url] = { table = tbl, ts = mp.get_time() } + M.file.url = url + M.file.formats_table = tbl +end + +local function _get_cached_formats_table(url) + if type(url) ~= 'string' or url == '' then + return nil + end + local hit = _formats_cache[url] + if type(hit) == 'table' and type(hit.table) == 'table' then + return hit.table + end + return nil +end + +local function _prefetch_formats_for_url(url) + url = tostring(url or '') + if url == '' or not _is_http_url(url) then + return + end + + -- Only applies to plain URLs (not store hash URLs). + if _extract_store_hash(url) then + return + end + + if _get_cached_formats_table(url) then + return + end + if _formats_inflight[url] then + return + end + _formats_inflight[url] = true + + mp.add_timeout(0.01, function() + if _get_cached_formats_table(url) then + _formats_inflight[url] = nil + return + end + + ensure_mpv_ipc_server() + local resp = _run_helper_request_response({ op = 'ytdlp-formats', data = { url = url } }, 20) + _formats_inflight[url] = nil + + if resp and resp.success and type(resp.table) == 'table' then + _cache_formats_for_url(url, resp.table) + _lua_log('formats: cached ' .. tostring((resp.table.rows and #resp.table.rows) or 0) .. ' rows for url') + end + end) +end + +local function _open_loading_formats_menu(title) + _uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, title or 'Pick format', { + { + title = 'Loading formats…', + hint = 'Fetching format list', + value = { 'script-message-to', mp.get_script_name(), 'medios-nop', '{}' }, + }, + }) +end + +local function _current_ytdl_format_string() + -- Preferred: mpv exposes the active ytdl format string. + local fmt = trim(tostring(mp.get_property_native('ytdl-format') or '')) + if fmt ~= '' then + return fmt + end + + -- Fallbacks: option value, or raw info if available. + local opt = trim(tostring(mp.get_property('options/ytdl-format') or '')) + if opt ~= '' then + return opt + end + + local raw = mp.get_property_native('ytdl-raw-info') + if type(raw) == 'table' then + if raw.format_id and tostring(raw.format_id) ~= '' then + return tostring(raw.format_id) + end + local rf = raw.requested_formats + if type(rf) == 'table' then + local parts = {} + for _, item in ipairs(rf) do + if type(item) == 'table' and item.format_id and tostring(item.format_id) ~= '' then + parts[#parts + 1] = tostring(item.format_id) + end + end + if #parts >= 1 then + return table.concat(parts, '+') + end + end + end + + return nil +end + +local function _run_pipeline_detached(pipeline_cmd) + if not pipeline_cmd or pipeline_cmd == '' then + return false + end + local python = (opts and opts.python_path) and tostring(opts.python_path) or 'python' + local cli = (opts and opts.cli_path) and tostring(opts.cli_path) or 'CLI.py' + local args = { python, cli, 'pipeline', '--pipeline', pipeline_cmd } + local ok = utils.subprocess_detached({ args = args }) + return ok ~= nil +end + +local function _open_save_location_picker_for_pending_download() + if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then + return + end + + local function build_items() + local items = { + { + title = 'Pick folder…', + hint = 'Save to a local folder', + value = { 'script-message-to', mp.get_script_name(), 'medios-download-pick-path', '{}' }, + }, + } + + if type(_cached_store_names) == 'table' and #_cached_store_names > 0 then + for _, name in ipairs(_cached_store_names) do + name = trim(tostring(name or '')) + if name ~= '' then + local payload = { store = name } + items[#items + 1] = { + title = name, + value = { 'script-message-to', mp.get_script_name(), 'medios-download-pick-store', utils.format_json(payload) }, + } + end + end + end + return items + end + + -- Always open immediately with whatever store cache we have. + _uosc_open_list_picker(DOWNLOAD_STORE_MENU_TYPE, 'Save location', build_items()) + + -- Best-effort refresh; if it succeeds, reopen menu with stores. + mp.add_timeout(0.05, function() + if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then + return + end + local before = (type(_cached_store_names) == 'table') and #_cached_store_names or 0 + if _refresh_store_cache(1.5) then + local after = (type(_cached_store_names) == 'table') and #_cached_store_names or 0 + if after > 0 and after ~= before then + _uosc_open_list_picker(DOWNLOAD_STORE_MENU_TYPE, 'Save location', build_items()) + end + end + end) +end + +-- Prime store cache shortly after load (best-effort; picker also refreshes on-demand). +mp.add_timeout(0.10, function() + if not _store_cache_loaded then + pcall(_refresh_store_cache, 1.5) + end +end) + +local function _apply_ytdl_format_and_reload(url, fmt) + if not url or url == '' or not fmt or fmt == '' then + return + end + + local pos = mp.get_property_number('time-pos') + local paused = mp.get_property_native('pause') and true or false + + _lua_log('change-format: setting options/ytdl-format=' .. tostring(fmt)) + pcall(mp.set_property, 'options/ytdl-format', tostring(fmt)) + + if pos and pos > 0 then + mp.commandv('loadfile', url, 'replace', 'start=' .. tostring(pos)) + else + mp.commandv('loadfile', url, 'replace') + end + + if paused then + mp.set_property_native('pause', true) + end +end + +local function _start_download_flow_for_current() + local target = _current_target() + if not target or target == '' then + mp.osd_message('No current item', 2) + return + end + + _lua_log('download: current target=' .. tostring(target)) + + local store_hash = _extract_store_hash(target) + if store_hash then + if not _is_windows() then + mp.osd_message('Download folder picker is Windows-only', 4) + return + end + local folder = _pick_folder_windows() + if not folder or folder == '' then + return + end + ensure_mpv_ipc_server() + M.run_pipeline('get-file -store ' .. quote_pipeline_arg(store_hash.store) .. ' -hash ' .. store_hash.hash .. ' -path ' .. quote_pipeline_arg(folder)) + mp.osd_message('Download started', 2) + return + end + + -- Non-store URL flow: use the current yt-dlp-selected format and ask for save location. + local url = tostring(target) + local fmt = _current_ytdl_format_string() + + if not fmt or fmt == '' then + _lua_log('download: could not determine current ytdl format string') + mp.osd_message('Cannot determine current format; use Change Format first', 5) + return + end + + _lua_log('download: using current format=' .. tostring(fmt)) + _pending_download = { url = url, format = fmt } + _open_save_location_picker_for_pending_download() +end + +mp.register_script_message('medios-download-current', function() + _start_download_flow_for_current() +end) + +mp.register_script_message('medios-change-format-current', function() + local target = _current_target() + if not target or target == '' then + mp.osd_message('No current item', 2) + return + end + + local store_hash = _extract_store_hash(target) + if store_hash then + mp.osd_message('Change Format is only for URL playback', 4) + return + end + + local url = tostring(target) + + -- If formats were already prefetched for this URL, open instantly. + local cached_tbl = _get_cached_formats_table(url) + if type(cached_tbl) == 'table' and type(cached_tbl.rows) == 'table' and #cached_tbl.rows > 0 then + _pending_format_change = { url = url, token = 'cached', formats_table = cached_tbl } + + local items = {} + for idx, row in ipairs(cached_tbl.rows) do + local cols = row.columns or {} + local id_val = '' + local res_val = '' + local ext_val = '' + local size_val = '' + for _, c in ipairs(cols) do + if c.name == 'ID' then id_val = tostring(c.value or '') end + if c.name == 'Resolution' then res_val = tostring(c.value or '') end + if c.name == 'Ext' then ext_val = tostring(c.value or '') end + if c.name == 'Size' then size_val = tostring(c.value or '') end + end + local label = id_val ~= '' and id_val or ('Format ' .. tostring(idx)) + local hint_parts = {} + if res_val ~= '' and res_val ~= 'N/A' then table.insert(hint_parts, res_val) end + if ext_val ~= '' then table.insert(hint_parts, ext_val) end + if size_val ~= '' and size_val ~= 'N/A' then table.insert(hint_parts, size_val) end + local hint = table.concat(hint_parts, ' | ') + + local payload = { index = idx } + items[#items + 1] = { + title = label, + hint = hint, + value = { 'script-message-to', mp.get_script_name(), 'medios-change-format-pick', utils.format_json(payload) }, + } + end + + _uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', items) + return + end + + local token = tostring(math.floor(mp.get_time() * 1000)) .. '-' .. tostring(math.random(100000, 999999)) + _pending_format_change = { url = url, token = token } + _open_loading_formats_menu('Change format') + + mp.add_timeout(0.05, function() + if type(_pending_format_change) ~= 'table' or _pending_format_change.token ~= token then + return + end + + ensure_mpv_ipc_server() + _lua_log('change-format: requesting formats via helper op for url') + + local resp = _run_helper_request_response({ op = 'ytdlp-formats', data = { url = url } }, 30) + if type(_pending_format_change) ~= 'table' or _pending_format_change.token ~= token then + return + end + + if not resp or not resp.success or type(resp.table) ~= 'table' then + local err = '' + if type(resp) == 'table' then + if resp.error and tostring(resp.error) ~= '' then err = tostring(resp.error) end + if resp.stderr and tostring(resp.stderr) ~= '' then + err = (err ~= '' and (err .. ' | ') or '') .. tostring(resp.stderr) + end + end + _lua_log('change-format: formats failed: ' .. (err ~= '' and err or '(no details)')) + mp.osd_message('Failed to load format list', 5) + _uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', { + { + title = 'Failed to load format list', + hint = 'Check logs (medeia-mpv-lua.log / medeia-mpv-helper.log)', + value = { 'script-message-to', mp.get_script_name(), 'medios-nop', '{}' }, + }, + }) + return + end + + local tbl = resp.table + if type(tbl.rows) ~= 'table' or #tbl.rows == 0 then + mp.osd_message('No formats available', 4) + return + end + + local items = {} + for idx, row in ipairs(tbl.rows) do + local cols = row.columns or {} + local id_val = '' + local res_val = '' + local ext_val = '' + local size_val = '' + for _, c in ipairs(cols) do + if c.name == 'ID' then id_val = tostring(c.value or '') end + if c.name == 'Resolution' then res_val = tostring(c.value or '') end + if c.name == 'Ext' then ext_val = tostring(c.value or '') end + if c.name == 'Size' then size_val = tostring(c.value or '') end + end + local label = id_val ~= '' and id_val or ('Format ' .. tostring(idx)) + local hint_parts = {} + if res_val ~= '' and res_val ~= 'N/A' then table.insert(hint_parts, res_val) end + if ext_val ~= '' then table.insert(hint_parts, ext_val) end + if size_val ~= '' and size_val ~= 'N/A' then table.insert(hint_parts, size_val) end + local hint = table.concat(hint_parts, ' | ') + + local payload = { index = idx } + items[#items + 1] = { + title = label, + hint = hint, + value = { 'script-message-to', mp.get_script_name(), 'medios-change-format-pick', utils.format_json(payload) }, + } + end + + _pending_format_change.formats_table = tbl + _cache_formats_for_url(url, tbl) + _uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', items) + end) +end) + +-- Prefetch formats for yt-dlp-supported URLs on load so Change Format is instant. +mp.register_event('file-loaded', function() + local target = _current_target() + if not target or target == '' then + return + end + local url = tostring(target) + if not _is_http_url(url) then + return + end + _prefetch_formats_for_url(url) +end) + +mp.register_script_message('medios-change-format-pick', function(json) + if type(_pending_format_change) ~= 'table' or not _pending_format_change.url then + return + end + local ok, ev = pcall(utils.parse_json, json) + if not ok or type(ev) ~= 'table' then + return + end + local idx = tonumber(ev.index or 0) or 0 + if idx <= 0 then + return + end + local tbl = _pending_format_change.formats_table + if type(tbl) ~= 'table' or type(tbl.rows) ~= 'table' or not tbl.rows[idx] then + return + end + local row = tbl.rows[idx] + local sel = row.selection_args + local fmt = nil + if type(sel) == 'table' then + for i = 1, #sel do + if tostring(sel[i]) == '-format' and sel[i + 1] then + fmt = tostring(sel[i + 1]) + break + end + end + end + if not fmt or fmt == '' then + mp.osd_message('Invalid format selection', 3) + return + end + + local url = tostring(_pending_format_change.url) + _pending_format_change = nil + _apply_ytdl_format_and_reload(url, fmt) +end) + +mp.register_script_message('medios-download-pick-store', function(json) + if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then + return + end + local ok, ev = pcall(utils.parse_json, json) + if not ok or type(ev) ~= 'table' then + return + end + local store = trim(tostring(ev.store or '')) + if store == '' then + return + end + + local url = tostring(_pending_download.url) + local fmt = tostring(_pending_download.format) + + local pipeline_cmd = 'download-media -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt) + .. ' | add-file -store ' .. quote_pipeline_arg(store) + + if not _run_pipeline_detached(pipeline_cmd) then + -- Fall back to synchronous execution if detached failed. + M.run_pipeline(pipeline_cmd) + end + mp.osd_message('Download started', 3) + _pending_download = nil +end) + +mp.register_script_message('medios-download-pick-path', function() + if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then + return + end + if not _is_windows() then + mp.osd_message('Folder picker is Windows-only', 4) + return + end + + local folder = _pick_folder_windows() + if not folder or folder == '' then + return + end + + local url = tostring(_pending_download.url) + local fmt = tostring(_pending_download.format) + + local pipeline_cmd = 'download-media -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt) + .. ' | add-file -path ' .. quote_pipeline_arg(folder) + + if not _run_pipeline_detached(pipeline_cmd) then + M.run_pipeline(pipeline_cmd) + end + mp.osd_message('Download started', 3) + _pending_download = nil +end) + +ensure_pipeline_helper_running = function() + -- If a helper is already running (e.g. started by the launcher), just use it. + if _is_pipeline_helper_ready() then + _pipeline_helper_started = true + return true + end + + -- We tried to start a helper before but it isn't ready anymore; restart. + if _pipeline_helper_started then + _pipeline_helper_started = false + end + + local helper_path = nil + + -- Prefer deriving repo root from located CLI.py if available. + if opts and opts.cli_path and utils.file_info(opts.cli_path) then + local root = tostring(opts.cli_path):match('(.*)[/\\]') or '' + if root ~= '' then + local candidate = utils.join_path(root, 'MPV/pipeline_helper.py') + if utils.file_info(candidate) then + helper_path = candidate + end + end + end + if not helper_path then + local base_dir = mp.get_script_directory() or "" + if base_dir == "" then + base_dir = utils.getcwd() or "" + end + helper_path = find_file_upwards(base_dir, 'MPV/pipeline_helper.py', 8) + end + if not helper_path then + _lua_log('ipc: cannot find helper script MPV/pipeline_helper.py (script_dir=' .. tostring(mp.get_script_directory() or '') .. ')') + return false + end + + -- Ensure mpv actually has a JSON IPC server for the helper to connect to. + if not ensure_mpv_ipc_server() then + _lua_log('ipc: mpv input-ipc-server is not set; start mpv with --input-ipc-server=\\\\.\\pipe\\mpv-medeia-macina') + return false + end + + local python = (opts and opts.python_path) and tostring(opts.python_path) or 'python' + local ipc = get_mpv_ipc_path() + -- Give the helper enough time to connect (Windows pipe can take a moment). + local args = {python, helper_path, '--ipc', ipc, '--timeout', '30'} + _lua_log('ipc: starting helper: ' .. table.concat(args, ' ')) + + local ok = utils.subprocess_detached({ args = args }) + if ok == nil then + _lua_log('ipc: failed to start helper (subprocess_detached returned nil)') return false end _pipeline_helper_started = true - - local args = {opts.python_path, helper_path, '--ipc', get_mpv_ipc_path()} - local ok = utils.subprocess_detached({ args = args }) - return ok ~= nil + return true end local function run_pipeline_via_ipc(pipeline_cmd, seeds, timeout_seconds) @@ -134,14 +1145,12 @@ local function run_pipeline_via_ipc(pipeline_cmd, seeds, timeout_seconds) do local deadline = mp.get_time() + 1.0 while mp.get_time() < deadline do - local ready = mp.get_property_native(PIPELINE_READY_PROP) - if ready and tostring(ready) ~= '' and tostring(ready) ~= '0' then + if _is_pipeline_helper_ready() then break end mp.wait_event(0.05) end - local ready = mp.get_property_native(PIPELINE_READY_PROP) - if not (ready and tostring(ready) ~= '' and tostring(ready) ~= '0') then + if not _is_pipeline_helper_ready() then _pipeline_helper_started = false return nil end @@ -227,7 +1236,7 @@ function M.run_pipeline(pipeline_cmd, seeds) if err ~= nil then local log_path = write_temp_log('medeia-pipeline-error', tostring(err)) local suffix = log_path and (' (log: ' .. log_path .. ')') or '' - msg.error('Pipeline error: ' .. tostring(err) .. suffix) + _lua_log('Pipeline error: ' .. tostring(err) .. suffix) mp.osd_message('Error: pipeline failed' .. suffix, 6) return nil end @@ -240,7 +1249,9 @@ function M.run_pipeline(pipeline_cmd, seeds) table.insert(args, seeds_json) end - msg.info("Running pipeline: " .. pipeline_cmd) + _lua_log("Running pipeline: " .. pipeline_cmd) + -- If the persistent IPC helper isn't available, fall back to a subprocess. + -- Note: mpv's subprocess helper does not support an `env` parameter. local res = utils.subprocess({ args = args, cancellable = false, @@ -252,7 +1263,7 @@ function M.run_pipeline(pipeline_cmd, seeds) or "unknown" local log_path = write_temp_log('medeia-cli-pipeline-stderr', tostring(res.stderr or err)) local suffix = log_path and (' (log: ' .. log_path .. ')') or '' - msg.error("Pipeline error: " .. err .. suffix) + _lua_log("Pipeline error: " .. err .. suffix) mp.osd_message("Error: pipeline failed" .. suffix, 6) return nil end @@ -273,7 +1284,7 @@ function M.run_pipeline_json(pipeline_cmd, seeds) if ok then return data else - msg.error("Failed to parse JSON: " .. output) + _lua_log("Failed to parse JSON: " .. output) return nil end end @@ -293,7 +1304,7 @@ function M.get_file_info() if data then -- Display metadata - msg.info("Metadata: " .. utils.format_json(data)) + _lua_log("Metadata: " .. utils.format_json(data)) mp.osd_message("Metadata loaded (check console)", 3) end end @@ -324,7 +1335,11 @@ function M.open_load_url_prompt() } local json = utils.format_json(menu_data) - mp.commandv('script-message-to', 'uosc', 'open-menu', json) + if ensure_uosc_loaded() then + mp.commandv('script-message-to', 'uosc', 'open-menu', json) + else + _lua_log('menu: uosc not available; cannot open-menu') + end end mp.register_script_message('medios-load-url', function() @@ -345,9 +1360,14 @@ mp.register_script_message('medios-load-url-event', function(json) return end - local out = M.run_pipeline('.pipe ' .. url .. ' -play') + ensure_mpv_ipc_server() + local out = M.run_pipeline('.pipe -url ' .. quote_pipeline_arg(url) .. ' -play') if out ~= nil then - mp.commandv('script-message-to', 'uosc', 'close-menu', LOAD_URL_MENU_TYPE) + if ensure_uosc_loaded() then + mp.commandv('script-message-to', 'uosc', 'close-menu', LOAD_URL_MENU_TYPE) + else + _lua_log('menu: uosc not available; cannot close-menu') + end end end) @@ -359,11 +1379,17 @@ function M.show_menu() { title = "Get Metadata", value = "script-binding medios-info", hint = "Ctrl+i" }, { title = "Delete File", value = "script-binding medios-delete", hint = "Ctrl+Del" }, { title = "Load URL", value = {"script-message-to", mp.get_script_name(), "medios-load-url"} }, + { title = "Download", value = {"script-message-to", mp.get_script_name(), "medios-download-current"} }, + { title = "Change Format", value = {"script-message-to", mp.get_script_name(), "medios-change-format-current"} }, } } local json = utils.format_json(menu_data) - mp.commandv('script-message-to', 'uosc', 'open-menu', json) + if ensure_uosc_loaded() then + mp.commandv('script-message-to', 'uosc', 'open-menu', json) + else + _lua_log('menu: uosc not available; cannot open-menu') + end end -- Keybindings @@ -379,7 +1405,9 @@ mp.add_key_binding("L", "medeia-lyric-toggle-shift", lyric_toggle) -- Start the persistent pipeline helper eagerly at launch. -- This avoids spawning Python per command and works cross-platform via MPV IPC. mp.add_timeout(0, function() + pcall(ensure_mpv_ipc_server) pcall(ensure_pipeline_helper_running) + pcall(_lua_log, 'medeia-lua loaded version=' .. MEDEIA_LUA_VERSION) end) return M diff --git a/MPV/mpv_ipc.py b/MPV/mpv_ipc.py index 364467c..ccf298e 100644 --- a/MPV/mpv_ipc.py +++ b/MPV/mpv_ipc.py @@ -198,11 +198,13 @@ class MPV: ipc_path: Optional[str] = None, lua_script_path: Optional[str | Path] = None, timeout: float = 5.0, + check_mpv: bool = True, ) -> None: - ok, reason = _check_mpv_availability() - if not ok: - raise MPVIPCError(reason or "MPV unavailable") + if bool(check_mpv): + ok, reason = _check_mpv_availability() + if not ok: + raise MPVIPCError(reason or "MPV unavailable") self.timeout = timeout self.ipc_path = ipc_path or get_ipc_pipe_path() @@ -246,6 +248,66 @@ class MPV: resp = self.send({"command": ["set_property", name, value]}) return bool(resp and resp.get("error") == "success") + def download( + self, + *, + url: str, + fmt: str, + store: Optional[str] = None, + path: Optional[str] = None, + ) -> Dict[str, Any]: + """Download a URL using the same pipeline semantics as the MPV UI. + + This is intended as a stable Python entrypoint for "button actions". + It does not require mpv.exe availability (set check_mpv=False if needed). + """ + url = str(url or "").strip() + fmt = str(fmt or "").strip() + store = str(store or "").strip() if store is not None else None + path = str(path or "").strip() if path is not None else None + + if not url: + return {"success": False, "stdout": "", "stderr": "", "error": "Missing url"} + if not fmt: + return {"success": False, "stdout": "", "stderr": "", "error": "Missing fmt"} + if bool(store) == bool(path): + return { + "success": False, + "stdout": "", + "stderr": "", + "error": "Provide exactly one of store or path", + } + + # Ensure any in-process cmdlets that talk to MPV pick up this IPC path. + try: + os.environ["MEDEIA_MPV_IPC"] = str(self.ipc_path) + except Exception: + pass + + def _q(s: str) -> str: + return '"' + s.replace('\\', '\\\\').replace('"', '\\"') + '"' + + pipeline = f"download-media -url {_q(url)} -format {_q(fmt)}" + if store: + pipeline += f" | add-file -store {_q(store)}" + else: + pipeline += f" | add-file -path {_q(path or '')}" + + try: + from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433 + + executor = PipelineExecutor() + result = executor.run_pipeline(pipeline) + return { + "success": bool(getattr(result, "success", False)), + "stdout": getattr(result, "stdout", "") or "", + "stderr": getattr(result, "stderr", "") or "", + "error": getattr(result, "error", None), + "pipeline": pipeline, + } + except Exception as exc: + return {"success": False, "stdout": "", "stderr": "", "error": f"{type(exc).__name__}: {exc}", "pipeline": pipeline} + def get_playlist(self, silent: bool = False) -> Optional[List[Dict[str, Any]]]: resp = self.send({"command": ["get_property", "playlist"], "request_id": 100}, silent=silent) if resp is None: @@ -427,13 +489,62 @@ class MPV: http_header_fields: Optional[str] = None, detached: bool = True, ) -> None: + # uosc reads its config from "~~/script-opts/uosc.conf". + # With --no-config, mpv makes ~~ expand to an empty string, so uosc can't load. + # Instead, point mpv at a repo-controlled config directory. + try: + repo_root = Path(__file__).resolve().parent.parent + except Exception: + repo_root = Path.cwd() + + portable_config_dir = repo_root / "MPV" / "portable_config" + try: + (portable_config_dir / "script-opts").mkdir(parents=True, exist_ok=True) + except Exception: + pass + + # Ensure uosc.conf is available at the location uosc expects. + try: + src_uosc_conf = repo_root / "MPV" / "LUA" / "uosc" / "uosc.conf" + dst_uosc_conf = portable_config_dir / "script-opts" / "uosc.conf" + if src_uosc_conf.exists(): + # Only seed a default config if the user doesn't already have one. + if not dst_uosc_conf.exists(): + dst_uosc_conf.write_bytes(src_uosc_conf.read_bytes()) + except Exception: + pass + cmd: List[str] = [ "mpv", + f"--config-dir={str(portable_config_dir)}", + # Allow mpv to auto-load scripts from /scripts/ (e.g., thumbfast). + "--load-scripts=yes", + "--osc=no", + "--load-console=no", + "--load-commands=no", + "--load-select=no", + "--load-context-menu=no", + "--load-positioning=no", + "--load-stats-overlay=no", + "--load-auto-profiles=no", + "--ytdl=yes", f"--input-ipc-server={self.ipc_path}", "--idle=yes", "--force-window=yes", ] + # uosc and other scripts are expected to be auto-loaded from portable_config/scripts. + # We keep the back-compat fallback only if the user hasn't installed uosc.lua there. + try: + uosc_entry = portable_config_dir / "scripts" / "uosc.lua" + if not uosc_entry.exists() and self.lua_script_path: + lua_dir = Path(self.lua_script_path).resolve().parent + uosc_main = lua_dir / "uosc" / "scripts" / "uosc" / "main.lua" + if uosc_main.exists(): + cmd.append(f"--script={str(uosc_main)}") + except Exception: + pass + # Always load the bundled Lua script at startup. if self.lua_script_path and os.path.exists(self.lua_script_path): cmd.append(f"--script={self.lua_script_path}") @@ -519,6 +630,112 @@ class MPVIPCClient: self.sock: socket.socket | BinaryIO | None = None self.is_windows = platform.system() == "Windows" self.silent = bool(silent) + self._recv_buffer: bytes = b"" + + def _write_payload(self, payload: str) -> None: + if not self.sock: + if not self.connect(): + raise MPVIPCError("Not connected") + + if self.is_windows: + pipe = cast(BinaryIO, self.sock) + pipe.write(payload.encode("utf-8")) + pipe.flush() + else: + sock_obj = cast(socket.socket, self.sock) + sock_obj.sendall(payload.encode("utf-8")) + + def _readline(self, *, timeout: Optional[float] = None) -> Optional[bytes]: + if not self.sock: + if not self.connect(): + return None + + effective_timeout = self.timeout if timeout is None else float(timeout) + deadline = _time.time() + max(0.0, effective_timeout) + + if self.is_windows: + try: + pipe = cast(BinaryIO, self.sock) + return pipe.readline() + except (OSError, IOError): + return None + + # Unix: buffer until newline. + sock_obj = cast(socket.socket, self.sock) + while True: + nl = self._recv_buffer.find(b"\n") + if nl != -1: + line = self._recv_buffer[: nl + 1] + self._recv_buffer = self._recv_buffer[nl + 1 :] + return line + + remaining = deadline - _time.time() + if remaining <= 0: + return None + + try: + # Temporarily narrow timeout for this read. + old_timeout = sock_obj.gettimeout() + try: + sock_obj.settimeout(remaining) + chunk = sock_obj.recv(4096) + finally: + sock_obj.settimeout(old_timeout) + except socket.timeout: + return None + except Exception: + return None + + if not chunk: + # EOF + return b"" + self._recv_buffer += chunk + + def read_message(self, *, timeout: Optional[float] = None) -> Optional[Dict[str, Any]]: + """Read the next JSON message/event from MPV. + + Returns: + - dict: parsed JSON message/event + - {"event": "__eof__"} if the stream ended + - None on timeout / no data + """ + raw = self._readline(timeout=timeout) + if raw is None: + return None + if raw == b"": + return {"event": "__eof__"} + try: + return json.loads(raw.decode("utf-8", errors="replace").strip()) + except Exception: + return None + + def send_command_no_wait(self, command_data: Dict[str, Any] | List[Any]) -> Optional[int]: + """Send a command to mpv without waiting for its response. + + This is important for long-running event loops (helpers) so we don't + consume/lose async events (like property-change) while waiting. + """ + try: + request: Dict[str, Any] + if isinstance(command_data, list): + request = {"command": command_data} + else: + request = dict(command_data) + + if "request_id" not in request: + request["request_id"] = int(_time.time() * 1000) % 100000 + + payload = json.dumps(request) + "\n" + self._write_payload(payload) + return int(request["request_id"]) + except Exception as exc: + if not self.silent: + debug(f"Error sending no-wait command to MPV: {exc}") + try: + self.disconnect() + except Exception: + pass + return None def connect(self) -> bool: """Connect to mpv IPC socket. @@ -592,44 +809,22 @@ class MPVIPCClient: _debug(f"[IPC] Sending: {payload.strip()}") # Send command - if self.is_windows: - pipe = cast(BinaryIO, self.sock) - pipe.write(payload.encode("utf-8")) - pipe.flush() - else: - sock_obj = cast(socket.socket, self.sock) - sock_obj.sendall(payload.encode("utf-8")) + self._write_payload(payload) # Receive response # We need to read lines until we find the one with matching request_id # or until timeout/error. MPV might send events in between. start_time = _time.time() while _time.time() - start_time < self.timeout: - response_data = b"" - if self.is_windows: - try: - pipe = cast(BinaryIO, self.sock) - response_data = pipe.readline() - except (OSError, IOError): - return None - else: - try: - # This is simplistic for Unix socket (might not get full line) - # But for now assuming MPV sends line-buffered JSON - sock_obj = cast(socket.socket, self.sock) - chunk = sock_obj.recv(4096) - if not chunk: - break - response_data = chunk - # TODO: Handle partial lines if needed - except socket.timeout: - return None + response_data = self._readline(timeout=self.timeout) + if response_data is None: + return None if not response_data: break try: - lines = response_data.decode('utf-8').strip().split('\n') + lines = response_data.decode('utf-8', errors='replace').strip().split('\n') for line in lines: if not line: continue resp = json.loads(line) diff --git a/MPV/pipeline_helper.py b/MPV/pipeline_helper.py index 72448e2..daf5c31 100644 --- a/MPV/pipeline_helper.py +++ b/MPV/pipeline_helper.py @@ -17,15 +17,18 @@ Protocol (user-data properties): This helper is intentionally minimal: one request at a time, last-write-wins. """ - from __future__ import annotations +MEDEIA_MPV_HELPER_VERSION = "2025-12-19" + import argparse import json import os import sys import tempfile import time +import logging +import re from pathlib import Path from typing import Any, Dict, Optional @@ -40,7 +43,9 @@ if _ROOT not in sys.path: sys.path.insert(0, _ROOT) -from SYS.tasks import connect_ipc # noqa: E402 +from MPV.mpv_ipc import MPVIPCClient # noqa: E402 +from config import load_config # noqa: E402 +from SYS.logger import set_debug, debug, set_thread_stream # noqa: E402 REQUEST_PROP = "user-data/medeia-pipeline-request" @@ -49,68 +54,361 @@ READY_PROP = "user-data/medeia-pipeline-ready" OBS_ID_REQUEST = 1001 - -def _json_line(payload: Dict[str, Any]) -> bytes: - return (json.dumps(payload, ensure_ascii=False) + "\n").encode("utf-8") - - -class MPVWire: - def __init__(self, ipc_path: str, *, timeout: float = 5.0) -> None: - self.ipc_path = ipc_path - self.timeout = timeout - self._fh: Optional[Any] = None - self._req_id = 1 - - def connect(self) -> bool: - self._fh = connect_ipc(self.ipc_path, timeout=self.timeout) - return self._fh is not None - - @property - def fh(self): - if self._fh is None: - raise RuntimeError("Not connected") - return self._fh - - def send(self, command: list[Any]) -> int: - self._req_id = (self._req_id + 1) % 1000000 - req_id = self._req_id - self.fh.write(_json_line({"command": command, "request_id": req_id})) - self.fh.flush() - return req_id - - def set_property(self, name: str, value: Any) -> int: - return self.send(["set_property", name, value]) - - def observe_property(self, obs_id: int, name: str, fmt: str = "string") -> int: - # mpv requires an explicit format argument. - return self.send(["observe_property", obs_id, name, fmt]) - - def read_message(self) -> Optional[Dict[str, Any]]: - raw = self.fh.readline() - if raw == b"": - return {"event": "__eof__"} - if not raw: - return None - try: - return json.loads(raw.decode("utf-8", errors="replace")) - except Exception: - return None + def _run_pipeline(pipeline_text: str, *, seeds: Any = None) -> Dict[str, Any]: # Import after sys.path fix. from TUI.pipeline_runner import PipelineExecutor # noqa: WPS433 + def _table_to_payload(table: Any) -> Optional[Dict[str, Any]]: + if table is None: + return None + try: + title = getattr(table, "title", "") + except Exception: + title = "" + + rows_payload = [] + try: + rows = getattr(table, "rows", None) + except Exception: + rows = None + if isinstance(rows, list): + for r in rows: + cols_payload = [] + try: + cols = getattr(r, "columns", None) + except Exception: + cols = None + if isinstance(cols, list): + for c in cols: + try: + cols_payload.append( + { + "name": getattr(c, "name", ""), + "value": getattr(c, "value", ""), + } + ) + except Exception: + continue + + sel_args = None + try: + sel = getattr(r, "selection_args", None) + if isinstance(sel, list): + sel_args = [str(x) for x in sel] + except Exception: + sel_args = None + + rows_payload.append({"columns": cols_payload, "selection_args": sel_args}) + + # Only return JSON-serializable data (Lua only needs title + rows). + return {"title": str(title or ""), "rows": rows_payload} + executor = PipelineExecutor() result = executor.run_pipeline(pipeline_text, seeds=seeds) + + table_payload = None + try: + table_payload = _table_to_payload(getattr(result, "result_table", None)) + except Exception: + table_payload = None + return { "success": bool(result.success), "stdout": result.stdout or "", "stderr": result.stderr or "", "error": result.error, + "table": table_payload, } +def _run_op(op: str, data: Any) -> Dict[str, Any]: + """Run a helper-only operation. + + These are NOT cmdlets and are not available via CLI pipelines. They exist + solely so MPV Lua can query lightweight metadata (e.g., autocomplete lists) + without inventing user-facing commands. + """ + op_name = str(op or "").strip().lower() + + # Provide store backend choices using the same source as CLI/Typer autocomplete. + if op_name in {"store-choices", "store_choices", "get-store-choices", "get_store_choices"}: + # Preferred: call the same choice function used by the CLI completer. + try: + from CLI import get_store_choices # noqa: WPS433 + + backends = get_store_choices() + choices = sorted({str(n) for n in (backends or []) if str(n).strip()}) + except Exception: + # Fallback: direct Store registry enumeration using loaded config. + try: + cfg = load_config() or {} + except Exception: + cfg = {} + try: + from Store import Store # noqa: WPS433 + + storage = Store(cfg, suppress_debug=True) + backends = storage.list_backends() or [] + choices = sorted({str(n) for n in backends if str(n).strip()}) + except Exception as exc: + return { + "success": False, + "stdout": "", + "stderr": "", + "error": f"{type(exc).__name__}: {exc}", + "table": None, + "choices": [], + } + + return { + "success": True, + "stdout": "", + "stderr": "", + "error": None, + "table": None, + "choices": choices, + } + + # Provide yt-dlp format list for a URL (for MPV "Change format" menu). + # Returns a ResultTable-like payload so the Lua UI can render without running cmdlets. + if op_name in {"ytdlp-formats", "ytdlp_formats", "ytdl-formats", "ytdl_formats"}: + try: + url = None + if isinstance(data, dict): + url = data.get("url") + url = str(url or "").strip() + if not url: + return { + "success": False, + "stdout": "", + "stderr": "", + "error": "Missing url", + "table": None, + } + + # Fast gate: only for streaming URLs yt-dlp knows about. + try: + from SYS.download import is_url_supported_by_ytdlp # noqa: WPS433 + + if not is_url_supported_by_ytdlp(url): + return { + "success": False, + "stdout": "", + "stderr": "", + "error": "URL not supported by yt-dlp", + "table": None, + } + except Exception: + # If probing support fails, still attempt extraction and let yt-dlp decide. + pass + + try: + import yt_dlp # type: ignore + except Exception as exc: + return { + "success": False, + "stdout": "", + "stderr": "", + "error": f"yt-dlp module not available: {type(exc).__name__}: {exc}", + "table": None, + } + + cookiefile = None + try: + from tool.ytdlp import YtDlpTool # noqa: WPS433 + + cfg = load_config() or {} + cookie_path = YtDlpTool(cfg).resolve_cookiefile() + if cookie_path is not None: + cookiefile = str(cookie_path) + except Exception: + cookiefile = None + + ydl_opts: Dict[str, Any] = { + "quiet": True, + "no_warnings": True, + "socket_timeout": 20, + "retries": 2, + "skip_download": True, + # Avoid accidentally expanding huge playlists on load. + "noplaylist": True, + "noprogress": True, + } + if cookiefile: + ydl_opts["cookiefile"] = cookiefile + + def _format_bytes(n: Any) -> str: + try: + v = float(n) + except Exception: + return "" + if v <= 0: + return "" + units = ["B", "KB", "MB", "GB", "TB"] + i = 0 + while v >= 1024 and i < len(units) - 1: + v /= 1024.0 + i += 1 + if i == 0: + return f"{int(v)} {units[i]}" + return f"{v:.1f} {units[i]}" + + with yt_dlp.YoutubeDL(ydl_opts) as ydl: # type: ignore[attr-defined] + info = ydl.extract_info(url, download=False) + + if not isinstance(info, dict): + return { + "success": False, + "stdout": "", + "stderr": "", + "error": "yt-dlp returned non-dict info", + "table": None, + } + + formats = info.get("formats") + if not isinstance(formats, list) or not formats: + return { + "success": True, + "stdout": "", + "stderr": "", + "error": None, + "table": {"title": "Formats", "rows": []}, + } + + rows = [] + for fmt in formats: + if not isinstance(fmt, dict): + continue + format_id = str(fmt.get("format_id") or "").strip() + if not format_id: + continue + + # Prefer human-ish resolution. + resolution = str(fmt.get("resolution") or "").strip() + if not resolution: + w = fmt.get("width") + h = fmt.get("height") + try: + if w and h: + resolution = f"{int(w)}x{int(h)}" + elif h: + resolution = f"{int(h)}p" + except Exception: + resolution = "" + + ext = str(fmt.get("ext") or "").strip() + size = _format_bytes(fmt.get("filesize") or fmt.get("filesize_approx")) + + # Build selection args compatible with MPV Lua picker. + selection_args = ["-format", format_id] + + rows.append( + { + "columns": [ + {"name": "ID", "value": format_id}, + {"name": "Resolution", "value": resolution or ""}, + {"name": "Ext", "value": ext or ""}, + {"name": "Size", "value": size or ""}, + ], + "selection_args": selection_args, + } + ) + + return { + "success": True, + "stdout": "", + "stderr": "", + "error": None, + "table": {"title": "Formats", "rows": rows}, + } + except Exception as exc: + return { + "success": False, + "stdout": "", + "stderr": "", + "error": f"{type(exc).__name__}: {exc}", + "table": None, + } + + return { + "success": False, + "stdout": "", + "stderr": "", + "error": f"Unknown op: {op_name}", + "table": None, + } + + +def _helper_log_path() -> Path: + try: + d = _repo_root() / "Log" + d.mkdir(parents=True, exist_ok=True) + return d / "medeia-mpv-helper.log" + except Exception: + return Path(tempfile.gettempdir()) / "medeia-mpv-helper.log" + + +def _append_helper_log(text: str) -> None: + payload = (text or "").rstrip() + if not payload: + return + try: + path = _helper_log_path() + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "a", encoding="utf-8", errors="replace") as fh: + fh.write(payload + "\n") + except Exception: + return + + +def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]: + """Best-effort singleton lock per IPC path. + + Multiple helpers subscribing to mpv log-message events causes duplicated + log output. Use a tiny file lock to ensure one helper per mpv instance. + """ + try: + safe = re.sub(r"[^a-zA-Z0-9_.-]+", "_", str(ipc_path or "")) + if not safe: + safe = "mpv" + # Keep lock files out of the repo's Log/ directory to avoid clutter. + lock_dir = Path(tempfile.gettempdir()) / "medeia-mpv-helper" + lock_dir.mkdir(parents=True, exist_ok=True) + lock_path = lock_dir / f"medeia-mpv-helper-{safe}.lock" + fh = open(lock_path, "a+", encoding="utf-8", errors="replace") + + if os.name == "nt": + try: + import msvcrt # type: ignore + + fh.seek(0) + msvcrt.locking(fh.fileno(), msvcrt.LK_NBLCK, 1) + except Exception: + try: + fh.close() + except Exception: + pass + return None + else: + try: + import fcntl # type: ignore + + fcntl.flock(fh.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) + except Exception: + try: + fh.close() + except Exception: + pass + return None + + return fh + except Exception: + return None + + def _parse_request(data: Any) -> Optional[Dict[str, Any]]: if data is None: return None @@ -131,14 +429,87 @@ def _parse_request(data: Any) -> Optional[Dict[str, Any]]: def main(argv: Optional[list[str]] = None) -> int: parser = argparse.ArgumentParser(prog="mpv-pipeline-helper") parser.add_argument("--ipc", required=True, help="mpv --input-ipc-server path") - parser.add_argument("--timeout", type=float, default=5.0) + parser.add_argument("--timeout", type=float, default=15.0) args = parser.parse_args(argv) + # Load config once and configure logging similar to CLI.pipeline. + try: + cfg = load_config() or {} + except Exception: + cfg = {} + + try: + debug_enabled = bool(isinstance(cfg, dict) and cfg.get("debug", False)) + set_debug(debug_enabled) + + if debug_enabled: + logging.basicConfig( + level=logging.DEBUG, + format='[%(name)s] %(levelname)s: %(message)s', + stream=sys.stderr, + ) + for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"): + try: + logging.getLogger(noisy).setLevel(logging.WARNING) + except Exception: + pass + except Exception: + pass + # Ensure all in-process cmdlets that talk to MPV pick up the exact IPC server # path used by this helper (which comes from the running MPV instance). os.environ["MEDEIA_MPV_IPC"] = str(args.ipc) - error_log_dir = Path(tempfile.gettempdir()) + # Ensure single helper instance per ipc. + _lock_fh = _acquire_ipc_lock(str(args.ipc)) + if _lock_fh is None: + _append_helper_log(f"[helper] another instance already holds lock for ipc={args.ipc}; exiting") + return 0 + + try: + _append_helper_log(f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}") + debug(f"[mpv-helper] logging to: {_helper_log_path()}") + except Exception: + pass + + # Route SYS.logger output into the helper log file so diagnostics are not + # lost in mpv's console/terminal output. + try: + class _HelperLogStream: + def __init__(self) -> None: + self._pending = "" + + def write(self, s: str) -> int: + if not s: + return 0 + text = self._pending + str(s) + lines = text.splitlines(keepends=True) + if lines and not lines[-1].endswith(("\n", "\r")): + self._pending = lines[-1] + lines = lines[:-1] + else: + self._pending = "" + for line in lines: + payload = line.rstrip("\r\n") + if payload: + _append_helper_log("[py] " + payload) + return len(s) + + def flush(self) -> None: + if self._pending: + _append_helper_log("[py] " + self._pending.rstrip("\r\n")) + self._pending = "" + + set_thread_stream(_HelperLogStream()) + except Exception: + pass + + # Prefer a stable repo-local log folder for discoverability. + error_log_dir = _repo_root() / "Log" + try: + error_log_dir.mkdir(parents=True, exist_ok=True) + except Exception: + error_log_dir = Path(tempfile.gettempdir()) last_error_log = error_log_dir / "medeia-mpv-pipeline-last-error.log" def _write_error_log(text: str, *, req_id: str) -> Optional[str]: @@ -164,33 +535,131 @@ def main(argv: Optional[list[str]] = None) -> int: return str(stamped) if stamped else str(last_error_log) - wire = MPVWire(args.ipc, timeout=float(args.timeout)) - if not wire.connect(): - return 2 + # Connect to mpv's JSON IPC. On Windows, the pipe can exist but reject opens + # briefly during startup; also mpv may create the IPC server slightly after + # the Lua script launches us. Retry until timeout. + connect_deadline = time.time() + max(0.5, float(args.timeout)) + last_connect_error: Optional[str] = None - # Mark ready ASAP. + client = MPVIPCClient(socket_path=args.ipc, timeout=0.5, silent=True) + while True: + try: + if client.connect(): + break + except Exception as exc: + last_connect_error = f"{type(exc).__name__}: {exc}" + + if time.time() > connect_deadline: + _append_helper_log(f"[helper] failed to connect ipc={args.ipc} error={last_connect_error or 'timeout'}") + return 2 + + # Keep trying. + time.sleep(0.10) + + # Mark ready ASAP and keep it fresh. + # Use a unix timestamp so the Lua side can treat it as a heartbeat. + last_ready_ts: float = 0.0 + + def _touch_ready() -> None: + nonlocal last_ready_ts + now = time.time() + # Throttle updates to reduce IPC chatter. + if (now - last_ready_ts) < 0.75: + return + try: + client.send_command_no_wait(["set_property", READY_PROP, str(int(now))]) + last_ready_ts = now + except Exception: + return + + _touch_ready() + + # Mirror mpv's own log messages into our helper log file so debugging does + # not depend on the mpv on-screen console or mpv's log-file. try: - wire.set_property(READY_PROP, "1") + level = "debug" if debug_enabled else "warn" + client.send_command_no_wait(["request_log_messages", level]) + _append_helper_log(f"[helper] requested mpv log messages level={level}") except Exception: pass + # De-dup/throttle mpv log-message lines (mpv and yt-dlp can be very chatty). + last_mpv_line: Optional[str] = None + last_mpv_count: int = 0 + last_mpv_ts: float = 0.0 + + def _flush_mpv_repeat() -> None: + nonlocal last_mpv_line, last_mpv_count + if last_mpv_line and last_mpv_count > 1: + _append_helper_log(f"[mpv] (previous line repeated {last_mpv_count}x)") + last_mpv_line = None + last_mpv_count = 0 + # Observe request property changes. try: - wire.observe_property(OBS_ID_REQUEST, REQUEST_PROP, "string") + client.send_command_no_wait(["observe_property", OBS_ID_REQUEST, REQUEST_PROP, "string"]) except Exception: return 3 last_seen_id: Optional[str] = None + try: + _append_helper_log(f"[helper] connected to ipc={args.ipc}") + except Exception: + pass + while True: - msg = wire.read_message() + msg = client.read_message(timeout=0.25) if msg is None: - time.sleep(0.05) + # Keep READY fresh even when idle (Lua may clear it on timeouts). + _touch_ready() + time.sleep(0.02) continue if msg.get("event") == "__eof__": + try: + _flush_mpv_repeat() + except Exception: + pass return 0 + if msg.get("event") == "log-message": + try: + level = str(msg.get("level") or "") + prefix = str(msg.get("prefix") or "") + text = str(msg.get("text") or "").rstrip() + + if not text: + continue + + # Filter excessive noise unless debug is enabled. + if not debug_enabled: + lower_prefix = prefix.lower() + if "quic" in lower_prefix and "DEBUG:" in text: + continue + # Suppress progress-bar style lines (keep true errors). + if ("ETA" in text or "%" in text) and ("ERROR:" not in text and "WARNING:" not in text): + # Typical yt-dlp progress bar line. + if text.lstrip().startswith("["): + continue + + line = f"[mpv {level}] {prefix} {text}".strip() + + now = time.time() + if last_mpv_line == line and (now - last_mpv_ts) < 2.0: + last_mpv_count += 1 + last_mpv_ts = now + continue + + _flush_mpv_repeat() + last_mpv_line = line + last_mpv_count = 1 + last_mpv_ts = now + _append_helper_log(line) + except Exception: + pass + continue + if msg.get("event") != "property-change": continue @@ -202,10 +671,12 @@ def main(argv: Optional[list[str]] = None) -> int: continue req_id = str(req.get("id") or "") + op = str(req.get("op") or "").strip() + data = req.get("data") pipeline_text = str(req.get("pipeline") or "").strip() seeds = req.get("seeds") - if not req_id or not pipeline_text: + if not req_id: continue if last_seen_id == req_id: @@ -213,14 +684,29 @@ def main(argv: Optional[list[str]] = None) -> int: last_seen_id = req_id try: - run = _run_pipeline(pipeline_text, seeds=seeds) + label = pipeline_text if pipeline_text else (op and ("op=" + op) or "(empty)") + _append_helper_log(f"\n[request {req_id}] {label}") + except Exception: + pass + + try: + if op: + run = _run_op(op, data) + else: + if not pipeline_text: + continue + run = _run_pipeline(pipeline_text, seeds=seeds) + resp = { "id": req_id, "success": bool(run.get("success")), "stdout": run.get("stdout", ""), "stderr": run.get("stderr", ""), "error": run.get("error"), + "table": run.get("table"), } + if "choices" in run: + resp["choices"] = run.get("choices") except Exception as exc: resp = { "id": req_id, @@ -228,8 +714,20 @@ def main(argv: Optional[list[str]] = None) -> int: "stdout": "", "stderr": "", "error": f"{type(exc).__name__}: {exc}", + "table": None, } + # Persist helper output for debugging MPV menu interactions. + try: + if resp.get("stdout"): + _append_helper_log("[stdout]\n" + str(resp.get("stdout"))) + if resp.get("stderr"): + _append_helper_log("[stderr]\n" + str(resp.get("stderr"))) + if resp.get("error"): + _append_helper_log("[error]\n" + str(resp.get("error"))) + except Exception: + pass + if not resp.get("success"): details = "" if resp.get("error"): @@ -241,7 +739,9 @@ def main(argv: Optional[list[str]] = None) -> int: resp["log_path"] = log_path try: - wire.set_property(RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)) + # IMPORTANT: don't wait for a response here; waiting would consume + # async events and can drop/skip property-change notifications. + client.send_command_no_wait(["set_property", RESPONSE_PROP, json.dumps(resp, ensure_ascii=False)]) except Exception: # If posting results fails, there's nothing more useful to do. pass diff --git a/Provider/alldebrid.py b/Provider/alldebrid.py index 5bec6ca..5af08ab 100644 --- a/Provider/alldebrid.py +++ b/Provider/alldebrid.py @@ -1,9 +1,11 @@ from __future__ import annotations +from pathlib import Path import sys from typing import Any, Dict, Iterable, List, Optional from ProviderCore.base import SearchProvider, SearchResult +from ProviderCore.download import sanitize_filename from SYS.logger import log @@ -66,6 +68,89 @@ class AllDebrid(SearchProvider): # Consider "available" when configured; actual API connectivity can vary. return bool(_get_debrid_api_key(self.config or {})) + def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]: + """Download an AllDebrid SearchResult into output_dir. + + AllDebrid magnet file listings often provide links that require an API + "unlock" step to produce a true direct-download URL. Without unlocking, + callers may download a small HTML/redirect page instead of file bytes. + + This is used by the download-file cmdlet when a provider item is piped. + """ + try: + api_key = _get_debrid_api_key(self.config or {}) + if not api_key: + return None + + target = str(getattr(result, "path", "") or "").strip() + if not target.startswith(("http://", "https://")): + return None + + try: + from API.alldebrid import AllDebridClient + + client = AllDebridClient(api_key) + except Exception as exc: + log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr) + return None + + # Quiet mode when download-file is mid-pipeline. + quiet = bool(self.config.get("_quiet_background_output")) if isinstance(self.config, dict) else False + + unlocked_url = target + try: + unlocked = client.unlock_link(target) + if isinstance(unlocked, str) and unlocked.strip().startswith(("http://", "https://")): + unlocked_url = unlocked.strip() + except Exception as exc: + # Fall back to the raw link, but warn. + log(f"[alldebrid] Failed to unlock link: {exc}", file=sys.stderr) + + # Prefer provider title as the output filename. + suggested = sanitize_filename(str(getattr(result, "title", "") or "").strip()) + suggested_name = suggested if suggested else None + + try: + from SYS.download import _download_direct_file + + dl_res = _download_direct_file( + unlocked_url, + Path(output_dir), + quiet=quiet, + suggested_filename=suggested_name, + ) + downloaded_path = getattr(dl_res, "path", None) + if downloaded_path is None: + return None + downloaded_path = Path(str(downloaded_path)) + + # Guard: if we got an HTML error/redirect page, treat as failure. + try: + if downloaded_path.exists(): + size = downloaded_path.stat().st_size + if size > 0 and size <= 250_000 and downloaded_path.suffix.lower() not in (".html", ".htm"): + head = downloaded_path.read_bytes()[:512] + try: + text = head.decode("utf-8", errors="ignore").lower() + except Exception: + text = "" + if " Iterable[Dict[str, Any]]: """Flatten AllDebrid magnet file tree into file dicts. diff --git a/Provider/soulseek.py b/Provider/soulseek.py index 61ce508..8fa375b 100644 --- a/Provider/soulseek.py +++ b/Provider/soulseek.py @@ -13,6 +13,7 @@ from typing import Any, Dict, List, Optional from ProviderCore.base import SearchProvider, SearchResult from SYS.logger import log, debug +from models import ProgressBar _SOULSEEK_NOISE_SUBSTRINGS = ( @@ -502,58 +503,145 @@ async def download_soulseek_file( raise RuntimeError("Soulseek credentials not configured (set provider=soulseek username/password)") settings = Settings(credentials=CredentialsSettings(username=login_user, password=login_pass)) - client = SoulSeekClient(settings) - with _suppress_aioslsk_noise(): - try: - await client.start() - await client.login() - debug(f"[soulseek] Logged in as {login_user}") - - debug(f"[soulseek] Requesting download from {username}: {filename}") - - transfer = await client.transfers.add(Transfer(username, filename, TransferDirection.DOWNLOAD)) - transfer.local_path = str(output_path) - await client.transfers.queue(transfer) - - start_time = time.time() - last_log_time = 0.0 - while not transfer.is_finalized(): - if time.time() - start_time > timeout: - log(f"[soulseek] Download timeout after {timeout}s", file=sys.stderr) - return None - - if time.time() - last_log_time >= 5.0 and transfer.bytes_transfered > 0: - progress = (transfer.bytes_transfered / transfer.filesize * 100) if transfer.filesize else 0 - debug( - f"[soulseek] Progress: {progress:.1f}% " - f"({transfer.bytes_transfered}/{transfer.filesize})" - ) - last_log_time = time.time() - - await asyncio.sleep(1) - - if transfer.state.VALUE == TransferState.COMPLETE and transfer.local_path: - downloaded_path = Path(transfer.local_path) - if downloaded_path.exists(): - debug(f"[soulseek] Download complete: {downloaded_path}") - return downloaded_path - - log(f"[soulseek] Transfer completed but file missing: {downloaded_path}", file=sys.stderr) - return None - - log( - f"[soulseek] Download failed: state={transfer.state.VALUE} " - f"bytes={transfer.bytes_transfered}/{transfer.filesize}", - file=sys.stderr, - ) - return None - - finally: + async def _attempt_once(attempt_num: int) -> tuple[Optional[Path], Any, int, float]: + client = SoulSeekClient(settings) + with _suppress_aioslsk_noise(): try: - await client.stop() - except Exception: - pass + await client.start() + await client.login() + debug(f"[soulseek] Logged in as {login_user}") + + log( + f"[soulseek] Download attempt {attempt_num}: {username} :: {local_filename}", + file=sys.stderr, + ) + debug(f"[soulseek] Requesting download from {username}: {filename}") + + transfer = await client.transfers.add(Transfer(username, filename, TransferDirection.DOWNLOAD)) + transfer.local_path = str(output_path) + await client.transfers.queue(transfer) + + start_time = time.time() + last_progress_time = start_time + progress_bar = ProgressBar() + + while not transfer.is_finalized(): + elapsed = time.time() - start_time + if elapsed > timeout: + log(f"[soulseek] Download timeout after {timeout}s", file=sys.stderr) + bytes_done = int(getattr(transfer, "bytes_transfered", 0) or 0) + state_val = getattr(getattr(transfer, "state", None), "VALUE", None) + try: + if getattr(sys.stderr, "isatty", lambda: False)(): + sys.stderr.write("\r" + (" " * 140) + "\r") + sys.stderr.flush() + except Exception: + pass + return None, state_val, bytes_done, elapsed + + bytes_done = int(getattr(transfer, "bytes_transfered", 0) or 0) + total_bytes = int(getattr(transfer, "filesize", 0) or 0) + now = time.time() + if now - last_progress_time >= 0.5: + percent = (bytes_done / total_bytes) * 100.0 if total_bytes > 0 else 0.0 + speed = bytes_done / elapsed if elapsed > 0 else 0.0 + eta_str: Optional[str] = None + if total_bytes > 0 and speed > 0: + try: + eta_seconds = max(0.0, float(total_bytes - bytes_done) / float(speed)) + minutes, seconds = divmod(int(eta_seconds), 60) + hours, minutes = divmod(minutes, 60) + eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}" + except Exception: + eta_str = None + + speed_str = progress_bar.format_bytes(speed) + "/s" + progress_line = progress_bar.format_progress( + percent_str=f"{percent:.1f}%", + downloaded=bytes_done, + total=total_bytes if total_bytes > 0 else None, + speed_str=speed_str, + eta_str=eta_str, + ) + + try: + if getattr(sys.stderr, "isatty", lambda: False)(): + sys.stderr.write("\r" + progress_line + " ") + sys.stderr.flush() + else: + log(progress_line, file=sys.stderr) + except Exception: + pass + + last_progress_time = now + + await asyncio.sleep(1) + + final_state = getattr(getattr(transfer, "state", None), "VALUE", None) + downloaded_path = Path(transfer.local_path) if getattr(transfer, "local_path", None) else output_path + final_elapsed = time.time() - start_time + + # Clear in-place progress bar. + try: + if getattr(sys.stderr, "isatty", lambda: False)(): + sys.stderr.write("\r" + (" " * 140) + "\r") + sys.stderr.flush() + except Exception: + pass + + # If a file was written, treat it as success even if state is odd. + try: + if downloaded_path.exists() and downloaded_path.stat().st_size > 0: + if final_state != TransferState.COMPLETE: + log( + f"[soulseek] Transfer finalized as {final_state}, but file exists ({downloaded_path.stat().st_size} bytes). Keeping file.", + file=sys.stderr, + ) + return downloaded_path, final_state, int(downloaded_path.stat().st_size), final_elapsed + except Exception: + pass + + if final_state == TransferState.COMPLETE and downloaded_path.exists(): + debug(f"[soulseek] Download complete: {downloaded_path}") + return downloaded_path, final_state, int(downloaded_path.stat().st_size), final_elapsed + + fail_bytes = int(getattr(transfer, "bytes_transfered", 0) or 0) + fail_total = int(getattr(transfer, "filesize", 0) or 0) + reason = getattr(transfer, "reason", None) + log( + f"[soulseek] Download failed: state={final_state} bytes={fail_bytes}/{fail_total} reason={reason}", + file=sys.stderr, + ) + + # Clean up 0-byte placeholder. + try: + if downloaded_path.exists() and downloaded_path.stat().st_size == 0: + downloaded_path.unlink(missing_ok=True) + except Exception: + pass + return None, final_state, fail_bytes, final_elapsed + + finally: + try: + await client.stop() + except Exception: + pass + + # Retry a couple times only for fast 0-byte failures (common transient case). + max_attempts = 3 + for attempt in range(1, max_attempts + 1): + result_path, final_state, bytes_done, elapsed = await _attempt_once(attempt) + if result_path: + return result_path + + should_retry = (bytes_done == 0) and (elapsed < 15.0) + if attempt < max_attempts and should_retry: + log(f"[soulseek] Retrying after fast failure (state={final_state})", file=sys.stderr) + await asyncio.sleep(2) + continue + break + return None except ImportError: log("[soulseek] aioslsk not installed. Install with: pip install aioslsk", file=sys.stderr) diff --git a/SYS/download.py b/SYS/download.py index 3ba1831..990c340 100644 --- a/SYS/download.py +++ b/SYS/download.py @@ -510,6 +510,7 @@ def _download_direct_file( output_dir: Path, debug_logger: Optional[DebugLogger] = None, quiet: bool = False, + suggested_filename: Optional[str] = None, ) -> DownloadMediaResult: """Download a direct file (PDF, image, document, etc.) without yt-dlp.""" ensure_directory(output_dir) @@ -517,6 +518,44 @@ def _download_direct_file( from urllib.parse import unquote, urlparse, parse_qs import re + def _sanitize_filename(name: str) -> str: + # Windows-safe filename sanitization. + # Keep it simple: strip path parts, drop invalid chars, collapse whitespace. + text = str(name or "").strip() + if not text: + return "" + # Remove any path components + text = text.replace("/", "\\") + text = text.split("\\")[-1] + + invalid = set('<>:"/\\|?*') + cleaned_chars: List[str] = [] + for ch in text: + o = ord(ch) + if o < 32: + cleaned_chars.append(" ") + continue + if ch in invalid: + cleaned_chars.append(" ") + continue + cleaned_chars.append(ch) + cleaned = " ".join("".join(cleaned_chars).split()).strip() + # Avoid trailing dots/spaces on Windows + cleaned = cleaned.rstrip(" .") + return cleaned + + def _unique_path(path: Path) -> Path: + if not path.exists(): + return path + stem = path.stem + suffix = path.suffix + parent = path.parent + for i in range(1, 10_000): + candidate = parent / f"{stem} ({i}){suffix}" + if not candidate.exists(): + return candidate + return parent / f"{stem} ({int(time.time())}){suffix}" + # Extract filename from URL parsed_url = urlparse(url) url_path = parsed_url.path @@ -560,11 +599,29 @@ def _download_direct_file( if not quiet: log(f"Could not get filename from headers: {e}", file=sys.stderr) - # Fallback if we still don't have a good filename + # Apply suggested filename (from provider title) if given. + suggested = _sanitize_filename(suggested_filename) if suggested_filename else "" + if suggested: + # Preserve extension from suggested name if present; otherwise borrow from detected filename. + suggested_path = Path(suggested) + if suggested_path.suffix: + filename = suggested + else: + detected_ext = "" + try: + detected_ext = Path(str(filename)).suffix + except Exception: + detected_ext = "" + if detected_ext: + filename = suggested + detected_ext + else: + filename = suggested + + # Final fallback if we still don't have a good filename if not filename or "." not in filename: filename = "downloaded_file.bin" - file_path = output_dir / filename + file_path = _unique_path(output_dir / filename) progress_bar = ProgressBar() if not quiet: @@ -581,32 +638,57 @@ def _download_direct_file( total_bytes[0] = content_length now = time.time() - if now - last_progress_time[0] >= 0.5 and total_bytes[0] > 0: - elapsed = now - start_time - percent = (bytes_downloaded / content_length) * 100 if content_length > 0 else 0 - speed = bytes_downloaded / elapsed if elapsed > 0 else 0 - eta_seconds = (content_length - bytes_downloaded) / speed if speed > 0 else 0 + if now - last_progress_time[0] < 0.5: + return - speed_str = progress_bar.format_bytes(speed) + "/s" - minutes, seconds = divmod(int(eta_seconds), 60) - hours, minutes = divmod(minutes, 60) - eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}" + elapsed = now - start_time + percent = (bytes_downloaded / content_length) * 100 if content_length > 0 else 0 + speed = bytes_downloaded / elapsed if elapsed > 0 else 0 + eta_str: Optional[str] = None + if content_length > 0 and speed > 0: + try: + eta_seconds = max(0.0, float(content_length - bytes_downloaded) / float(speed)) + minutes, seconds = divmod(int(eta_seconds), 60) + hours, minutes = divmod(minutes, 60) + eta_str = f"{hours:02d}:{minutes:02d}:{seconds:02d}" + except Exception: + eta_str = None - progress_line = progress_bar.format_progress( - percent_str=f"{percent:.1f}%", - downloaded=bytes_downloaded, - total=content_length, - speed_str=speed_str, - eta_str=eta_str, - ) - if not quiet: - debug(progress_line) - last_progress_time[0] = now + speed_str = progress_bar.format_bytes(speed) + "/s" + + progress_line = progress_bar.format_progress( + percent_str=f"{percent:.1f}%", + downloaded=bytes_downloaded, + total=content_length if content_length > 0 else None, + speed_str=speed_str, + eta_str=eta_str, + ) + + if not quiet: + try: + if getattr(sys.stderr, "isatty", lambda: False)(): + sys.stderr.write("\r" + progress_line + " ") + sys.stderr.flush() + else: + # Non-interactive: print occasional progress lines. + log(progress_line, file=sys.stderr) + except Exception: + pass + + last_progress_time[0] = now with HTTPClient(timeout=30.0) as client: client.download(url, str(file_path), progress_callback=progress_callback) elapsed = time.time() - start_time + # Clear in-place progress bar. + if not quiet: + try: + if getattr(sys.stderr, "isatty", lambda: False)(): + sys.stderr.write("\r" + (" " * 140) + "\r") + sys.stderr.flush() + except Exception: + pass avg_speed_str = progress_bar.format_bytes(downloaded_bytes[0] / elapsed if elapsed > 0 else 0) + "/s" if not quiet: debug(f"✓ Downloaded in {elapsed:.1f}s at {avg_speed_str}") diff --git a/SYS/tasks.py b/SYS/tasks.py index 8a49945..926d8bf 100644 --- a/SYS/tasks.py +++ b/SYS/tasks.py @@ -26,7 +26,9 @@ def connect_ipc(path: str, timeout: float = 5.0) -> IO[bytes] | None: return None time.sleep(0.05) except OSError as exc: # Pipe busy - if exc.errno not in (errno.ENOENT, errno.EPIPE, errno.EBUSY): + # Windows named pipes can intermittently raise EINVAL while the pipe exists + # but is not ready/accepting connections yet. + if exc.errno not in (errno.ENOENT, errno.EPIPE, errno.EBUSY, errno.EINVAL): raise if time.time() > deadline: return None diff --git a/cmdlet/delete_tag.py b/cmdlet/delete_tag.py index 2ab2450..0141627 100644 --- a/cmdlet/delete_tag.py +++ b/cmdlet/delete_tag.py @@ -94,15 +94,22 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: log(f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}") return 0 - # Check if we have a piped TagItem with no args (i.e., from @1 | delete-tag) - has_piped_tag = (result and hasattr(result, '__class__') and - result.__class__.__name__ == 'TagItem' and - hasattr(result, 'tag_name')) - - # Check if we have a piped list of TagItems (from @N selection) - has_piped_tag_list = (isinstance(result, list) and result and - hasattr(result[0], '__class__') and - result[0].__class__.__name__ == 'TagItem') + def _looks_like_tag_row(obj: Any) -> bool: + if obj is None: + return False + # TagItem (direct) or PipeObject/dict emitted from get-tag table rows. + try: + if hasattr(obj, '__class__') and obj.__class__.__name__ == 'TagItem' and hasattr(obj, 'tag_name'): + return True + except Exception: + pass + try: + return bool(get_field(obj, 'tag_name')) + except Exception: + return False + + has_piped_tag = _looks_like_tag_row(result) + has_piped_tag_list = isinstance(result, list) and bool(result) and _looks_like_tag_row(result[0]) if not args and not has_piped_tag and not has_piped_tag_list: log("Requires at least one tag argument") @@ -195,9 +202,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: # If we have TagItems and no args, we are deleting the tags themselves # If we have Files (or other objects) and args, we are deleting tags FROM those files - # Check if we are in "delete selected tags" mode (TagItems) - is_tag_item_mode = (items_to_process and hasattr(items_to_process[0], '__class__') and - items_to_process[0].__class__.__name__ == 'TagItem') + # Check if we are in "delete selected tags" mode (tag rows) + is_tag_item_mode = bool(items_to_process) and _looks_like_tag_row(items_to_process[0]) if is_tag_item_mode: # Collect all tags to delete from the TagItems @@ -248,8 +254,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: ) item_store = override_store or get_field(item, "store") - if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem': - # It's a TagItem + if _looks_like_tag_row(item): + # It's a tag row (TagItem or PipeObject/dict with tag_name) if tags_arg: # User provided tags to delete FROM this file (ignoring the tag name in the item?) # Or maybe they want to delete the tag in the item AND the args? diff --git a/cmdlet/download_file.py b/cmdlet/download_file.py index 8953dc5..10ea115 100644 --- a/cmdlet/download_file.py +++ b/cmdlet/download_file.py @@ -36,11 +36,13 @@ class Download_File(Cmdlet): super().__init__( name="download-file", summary="Download files via HTTP or provider handlers", - usage="download-file [options] OR @N | download-file [options]", + usage="download-file [-path DIR] [options] OR @N | download-file [-path DIR] [options]", alias=["dl-file", "download-http"], arg=[ - CmdletArg(name="output", type="string", alias="o", description="Output directory (overrides defaults)"), SharedArgs.URL, + SharedArgs.PATH, + # Prefer -path for output directory to match other cmdlets; keep -output for backwards compatibility. + CmdletArg(name="-output", type="string", alias="o", description="(deprecated) Output directory (use -path instead)"), ], detail=["Download files directly via HTTP without yt-dlp processing.", "For streaming sites, use download-media."], @@ -50,10 +52,6 @@ class Download_File(Cmdlet): def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: """Main execution method.""" - stage_ctx = pipeline_context.get_stage_context() - in_pipeline = stage_ctx is not None and getattr(stage_ctx, "total_stages", 1) > 1 - if in_pipeline and isinstance(config, dict): - config["_quiet_background_output"] = True return self._run_impl(result, args, config) def _run_impl(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: @@ -169,7 +167,47 @@ class Download_File(Cmdlet): log(f"Error processing {url}: {e}", file=sys.stderr) # 2) Provider item downloads (piped results) + # Expand provider "folder" rows into their contained files when possible (e.g., AllDebrid magnets). + expanded_items: List[Any] = [] for item in piped_items: + try: + table = get_field(item, "table") + media_kind = get_field(item, "media_kind") + full_metadata = get_field(item, "full_metadata") + target = get_field(item, "path") or get_field(item, "url") + + if str(table or "").lower() == "alldebrid" and str(media_kind or "").lower() == "folder": + magnet_id = None + if isinstance(full_metadata, dict): + magnet_id = full_metadata.get("magnet_id") + if magnet_id is None and isinstance(target, str) and target.lower().startswith("alldebrid:magnet:"): + try: + magnet_id = int(target.split(":")[-1]) + except Exception: + magnet_id = None + + if magnet_id is not None and get_search_provider is not None: + provider = get_search_provider("alldebrid", config) + if provider is not None: + try: + files = provider.search("*", limit=10_000, filters={"view": "files", "magnet_id": int(magnet_id)}) + except Exception: + files = [] + + # If the magnet isn't ready, provider.search returns a single not-ready folder row. + if files and len(files) == 1 and getattr(files[0], "media_kind", "") == "folder": + detail = getattr(files[0], "detail", "") + log(f"[download-file] AllDebrid magnet {magnet_id} not ready ({detail or 'unknown'})", file=sys.stderr) + else: + for sr in files: + expanded_items.append(sr.to_dict() if hasattr(sr, "to_dict") else sr) + continue + + expanded_items.append(item) + except Exception: + expanded_items.append(item) + + for item in expanded_items: try: table = get_field(item, "table") title = get_field(item, "title") @@ -226,8 +264,12 @@ class Download_File(Cmdlet): from cmdlet.search_provider import CMDLET as _SEARCH_PROVIDER_CMDLET # Use plain title text (LibGen mirrors can be finicky with fielded query prefixes). fallback_query = title_text + exec_fn = getattr(_SEARCH_PROVIDER_CMDLET, "exec", None) + if not callable(exec_fn): + log("[download-file] search-provider cmdlet unavailable; cannot run LibGen fallback search", file=sys.stderr) + continue - ret = _SEARCH_PROVIDER_CMDLET.exec( + ret = exec_fn( None, ["-provider", "libgen", "-query", fallback_query], config, @@ -243,7 +285,10 @@ class Download_File(Cmdlet): except Exception: pass - return int(ret) + try: + return int(ret) # type: ignore[arg-type] + except Exception: + return 1 except Exception: pass @@ -259,7 +304,14 @@ class Download_File(Cmdlet): log("[download-file] Refusing to download LibGen landing page (expected provider to resolve file link)", file=sys.stderr) continue debug(f"[download-file] Provider item looks like direct URL, downloading: {target}") - result_obj = _download_direct_file(target, final_output_dir, quiet=quiet_mode) + # Use provider title as filename hint so multiple items don't overwrite as downloaded_file.bin + suggested_name = str(title).strip() if title is not None else None + result_obj = _download_direct_file( + target, + final_output_dir, + quiet=quiet_mode, + suggested_filename=suggested_name, + ) file_path = None if hasattr(result_obj, "path"): file_path = getattr(result_obj, "path") @@ -301,7 +353,7 @@ class Download_File(Cmdlet): def _resolve_output_dir(self, parsed: Dict[str, Any], config: Dict[str, Any]) -> Optional[Path]: """Resolve the output directory from storage location or config.""" - output_dir_arg = parsed.get("output") + output_dir_arg = parsed.get("path") or parsed.get("output") if output_dir_arg: try: out_path = Path(str(output_dir_arg)).expanduser() diff --git a/cmdlet/download_media.py b/cmdlet/download_media.py index 2e0fa86..2e01f34 100644 --- a/cmdlet/download_media.py +++ b/cmdlet/download_media.py @@ -12,6 +12,7 @@ Focused cmdlet for video/audio downloads from yt-dlp-supported sites: from __future__ import annotations import sys +import os from pathlib import Path from typing import Any, Dict, List, Optional, Sequence @@ -430,10 +431,29 @@ def _build_ytdlp_options(opts: DownloadOptions) -> Dict[str, Any]: "fragment_retries": 10, "http_chunk_size": 10_485_760, "restrictfilenames": True, - # Always show a progress indicator; do not tie it to debug logging. - "progress_hooks": [_progress_callback], } + # Prefer the bundled ffmpeg shipped with the repo (used for merges/remux/postproc). + try: + repo_root = Path(__file__).resolve().parents[1] + bundled_ffmpeg_dir = repo_root / "MPV" / "ffmpeg" / "bin" + if bundled_ffmpeg_dir.exists(): + base_options.setdefault("ffmpeg_location", str(bundled_ffmpeg_dir)) + except Exception: + pass + + # On Windows, AV/indexers can transiently lock files at the end of a download. + # yt-dlp uses file_access_retries for renames (e.g. .part -> final). Default is low. + try: + if os.name == "nt": + base_options.setdefault("file_access_retries", 40) + except Exception: + pass + + # Avoid writing progress bars when running in quiet/background mode (e.g. mpv detached pipelines). + if not getattr(opts, "quiet", False): + base_options["progress_hooks"] = [_progress_callback] + if opts.cookies_path and opts.cookies_path.is_file(): base_options["cookiefile"] = str(opts.cookies_path) diff --git a/cmdlet/get_tag.py b/cmdlet/get_tag.py index 467bc52..73f9910 100644 --- a/cmdlet/get_tag.py +++ b/cmdlet/get_tag.py @@ -12,6 +12,8 @@ from __future__ import annotations import sys +from SYS.logger import log, debug + try: from Provider.openlibrary import OpenLibrary _ol_scrape_isbn_metadata = OpenLibrary.scrape_isbn_metadata @@ -94,7 +96,7 @@ def _emit_tags_as_table( file_hash: Optional[str], store: str = "hydrus", service_name: Optional[str] = None, - config: Dict[str, Any] = None, + config: Optional[Dict[str, Any]] = None, item_title: Optional[str] = None, path: Optional[str] = None, subject: Optional[Any] = None, @@ -107,11 +109,10 @@ def _emit_tags_as_table( from result_table import ResultTable # Create ResultTable with just tag column (no title) - table_title = "Tag" + # Keep the title stable and avoid including hash fragments. + table_title = "tag" if item_title: - table_title = f"Tag: {item_title}" - if file_hash: - table_title += f" [{file_hash[:8]}]" + table_title = f"tag: {item_title}" table = ResultTable(table_title, max_columns=1) table.set_source_command("get-tag", []) @@ -140,6 +141,28 @@ def _emit_tags_as_table( except AttributeError: ctx.set_last_result_table(table, tag_items, subject) # Note: CLI will handle displaying the table via ResultTable formatting + + +def _filter_scraped_tags(tags: List[str]) -> List[str]: + """Filter out tags we don't want to import from scraping.""" + blocked = {"title", "artist", "source"} + out: List[str] = [] + seen: set[str] = set() + for t in tags: + if not t: + continue + s = str(t).strip() + if not s: + continue + ns = s.split(":", 1)[0].strip().lower() if ":" in s else "" + if ns in blocked: + continue + key = s.lower() + if key in seen: + continue + seen.add(key) + out.append(s) + return out def _summarize_tags(tags_list: List[str], limit: int = 8) -> str: """Create a summary of tags for display.""" shown = [t for t in tags_list[:limit] if t] @@ -865,14 +888,32 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: log(f"Unknown metadata provider: {scrape_url}", file=sys.stderr) return 1 - # Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename + # Prefer identifier tags (ISBN/OLID/etc.) when available; fallback to title/filename. + # IMPORTANT: do not rely on `result.tag` for this because it can be stale (cached on + # the piped PipeObject). Always prefer the current store-backed tags when possible. identifier_tags: List[str] = [] - result_tags = get_field(result, "tag", None) - if isinstance(result_tags, list): - identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))] - - # Try local sidecar if no tags present on result + file_hash_for_scrape = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None)) + store_for_scrape = get_field(result, "store", None) + if file_hash_for_scrape and store_for_scrape: + try: + from Store import Store + storage = Store(config) + backend = storage[str(store_for_scrape)] + current_tags, _src = backend.get_tag(file_hash_for_scrape, config=config) + if isinstance(current_tags, (list, tuple, set)) and current_tags: + identifier_tags = [str(t) for t in current_tags if isinstance(t, (str, bytes))] + except Exception: + # Fall back to whatever is present on the piped result if store lookup fails. + pass + + # Fall back to tags carried on the result (may be stale). if not identifier_tags: + result_tags = get_field(result, "tag", None) + if isinstance(result_tags, list): + identifier_tags = [str(t) for t in result_tags if isinstance(t, (str, bytes))] + + # As a last resort, try local sidecar only when the item is not store-backed. + if not identifier_tags and (not file_hash_for_scrape or not store_for_scrape): file_path = get_field(result, "target", None) or get_field(result, "path", None) or get_field(result, "filename", None) if isinstance(file_path, str) and file_path and not file_path.lower().startswith(("http://", "https://")): try: @@ -939,8 +980,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: selection_payload = [] hash_for_payload = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None)) store_for_payload = get_field(result, "store", None) + # Preserve a consistent path field when present so selecting a metadata row + # keeps referring to the original file. + path_for_payload = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None) for idx, item in enumerate(items): - tags = provider.to_tags(item) + tags = _filter_scraped_tags(provider.to_tags(item)) row = table.add_row() row.add_column("Title", item.get("title", "")) row.add_column("Artist", item.get("artist", "")) @@ -955,6 +999,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: "year": item.get("year"), "hash": hash_for_payload, "store": store_for_payload, + "path": path_for_payload, "extra": { "tag": tags, "provider": provider.name, @@ -967,7 +1012,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: ctx.set_current_stage_table(table) # Preserve items for @ selection and downstream pipes without emitting duplicates ctx.set_last_result_items_only(selection_payload) - print(table) return 0 # If -scrape was requested but no URL, that's an error @@ -978,6 +1022,70 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: # Handle @N selection which creates a list - extract the first item if isinstance(result, list) and len(result) > 0: result = result[0] + + # If the current result already carries a tag list (e.g. a selected metadata + # row from get-tag -scrape itunes), APPLY those tags to the file in the store. + result_provider = get_field(result, "provider", None) + result_tags = get_field(result, "tag", None) + if result_provider and isinstance(result_tags, list) and result_tags: + file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash", None)) + store_name = get_field(result, "store", None) + subject_path = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None) + if not file_hash or not store_name: + log("Selected metadata row is missing hash/store; cannot apply tags", file=sys.stderr) + _emit_tags_as_table( + tags_list=[str(t) for t in result_tags if t is not None], + file_hash=file_hash, + store=str(store_name or "local"), + service_name=None, + config=config, + item_title=str(get_field(result, "title", None) or result_provider), + path=str(subject_path) if subject_path else None, + subject=result, + ) + return 0 + + # Apply tags to the store backend (no sidecar writing here). + apply_tags = _filter_scraped_tags([str(t) for t in result_tags if t is not None]) + if not apply_tags: + log("No applicable scraped tags to apply (title:/artist:/source: are skipped)", file=sys.stderr) + return 0 + try: + from Store import Store + storage = Store(config) + backend = storage[str(store_name)] + ok = bool(backend.add_tag(file_hash, apply_tags, config=config)) + if not ok: + log(f"Failed to apply tags to store '{store_name}'", file=sys.stderr) + except Exception as exc: + log(f"Failed to apply tags: {exc}", file=sys.stderr) + return 1 + + # Show updated tags after applying. + try: + updated_tags, _src = backend.get_tag(file_hash, config=config) + except Exception: + updated_tags = apply_tags + if not updated_tags: + updated_tags = apply_tags + + _emit_tags_as_table( + tags_list=list(updated_tags), + file_hash=file_hash, + store=str(store_name), + service_name=None, + config=config, + item_title=str(get_field(result, "title", None) or get_field(result, "name", None) or str(result_provider)), + path=str(subject_path) if subject_path else None, + subject={ + "hash": file_hash, + "store": str(store_name), + "path": str(subject_path) if subject_path else None, + "title": get_field(result, "title", None) or get_field(result, "name", None), + "extra": {"applied_provider": str(result_provider)}, + }, + ) + return 0 hash_from_result = normalize_hash(get_field(result, "hash", None)) file_hash = hash_override or hash_from_result @@ -1022,6 +1130,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: # Build a subject payload representing the file whose tags are being shown subject_store = get_field(result, "store", None) or store_name + subject_path = get_field(result, "path", None) or get_field(result, "target", None) or get_field(result, "filename", None) subject_payload: Dict[str, Any] = { "tag": list(current), "title": item_title, @@ -1034,12 +1143,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: } if file_hash: subject_payload["hash"] = file_hash - if local_path: + if subject_path: try: - path_text = str(local_path) - subject_payload.update({ - "path": path_text, - }) + subject_payload["path"] = str(subject_path) except Exception: pass @@ -1050,7 +1156,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: service_name=service_name if source == "hydrus" else None, config=config, item_title=item_title, - path=str(local_path) if local_path else None, + path=str(subject_path) if subject_path else None, subject=subject_payload, ) @@ -1116,55 +1222,7 @@ class Get_Tag(Cmdlet): def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: """Execute get-tag cmdlet.""" - # Parse arguments - parsed = parse_cmdlet_args(args, self) - - # Get hash and store from parsed args or result - hash_override = parsed.get("hash") - file_hash = normalize_hash(hash_override) or normalize_hash(get_field(result, "hash")) - store_name = parsed.get("store") or get_field(result, "store") - - if not file_hash: - log("No hash available in result", file=sys.stderr) - return 1 - - if not store_name: - log("No store specified in result", file=sys.stderr) - return 1 - - # Get tags using storage backend - try: - from Store import Store - storage_obj = Store(config) - backend = storage_obj[store_name] - current, source = backend.get_tag(file_hash, config=config) - - if not current: - log("No tags found", file=sys.stderr) - return 1 - - # Build table and emit - item_title = get_field(result, "title") or file_hash[:16] - _emit_tags_as_table( - tags_list=current, - file_hash=file_hash, - store=store_name, - service_name="", - config=config, - item_title=item_title, - path=None, - subject=result, - ) - return 0 - - except KeyError: - log(f"Store '{store_name}' not found", file=sys.stderr) - return 1 - except Exception as exc: - log(f"Failed to get tags: {exc}", file=sys.stderr) - import traceback - traceback.print_exc(file=sys.stderr) - return 1 + return _run(result, args, config) # Create and register the cmdlet diff --git a/cmdnat/pipe.py b/cmdnat/pipe.py index e62373e..2c1020f 100644 --- a/cmdnat/pipe.py +++ b/cmdnat/pipe.py @@ -22,6 +22,77 @@ from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url _ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {} +def _repo_root() -> Path: + try: + return Path(__file__).resolve().parent.parent + except Exception: + return Path(os.getcwd()) + + +def _repo_log_dir() -> Path: + d = _repo_root() / "Log" + try: + d.mkdir(parents=True, exist_ok=True) + except Exception: + pass + return d + + +def _helper_log_file() -> Path: + return _repo_log_dir() / "medeia-mpv-helper.log" + + +def _lua_log_file() -> Path: + return _repo_log_dir() / "medeia-mpv-lua.log" + + +def _try_enable_mpv_file_logging(mpv_log_path: str, *, attempts: int = 3) -> bool: + """Best-effort enable mpv log-file + verbose level on a running instance. + + Note: mpv may not honor changing log-file at runtime on all builds/platforms. + We still try; if it fails, callers can fall back to restart-on-demand. + """ + if not isinstance(mpv_log_path, str) or not mpv_log_path.strip(): + return False + mpv_log_path = mpv_log_path.strip() + + try: + Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True) + with open(mpv_log_path, "a", encoding="utf-8", errors="replace"): + pass + except Exception: + pass + + ok = False + for _ in range(max(1, int(attempts))): + try: + # Try to set log-file and verbose level. + r1 = _send_ipc_command({"command": ["set_property", "options/log-file", mpv_log_path]}) + r2 = _send_ipc_command({"command": ["set_property", "options/msg-level", "all=v"]}) + ok = bool((r1 and r1.get("error") == "success") or (r2 and r2.get("error") == "success")) + + # Emit a predictable line so the file isn't empty if logging is active. + _send_ipc_command({"command": ["print-text", f"medeia: log enabled -> {mpv_log_path}"]}, silent=True) + except Exception: + ok = False + + # If mpv has opened the log file, it should have content shortly. + try: + p = Path(mpv_log_path) + if p.exists() and p.is_file() and p.stat().st_size > 0: + return True + except Exception: + pass + + try: + import time + time.sleep(0.15) + except Exception: + break + + return bool(ok) + + def _get_alldebrid_api_key(config: Optional[Dict[str, Any]]) -> Optional[str]: try: if not isinstance(config, dict): @@ -838,10 +909,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: set_debug(True) set_thread_stream(sys.stdout) try: - tmp_dir = Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".") + log_dir = _repo_log_dir() + mpv_log_path = str((log_dir / "medeia-mpv.log").resolve()) except Exception: - tmp_dir = Path(".") - mpv_log_path = str((tmp_dir / "medeia-mpv.log").resolve()) + mpv_log_path = str((Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".") / "medeia-mpv.log").resolve()) # Ensure file exists early so we can tail it even if mpv writes later. try: Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True) @@ -851,6 +922,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: pass debug(f"MPV log file: {mpv_log_path}") + # Try to enable mpv file logging on the currently running instance. + # (If mpv wasn't started with --log-file, this may not work everywhere.) + try: + _try_enable_mpv_file_logging(mpv_log_path, attempts=3) + except Exception: + pass + # If mpv is already running, set log options live via IPC. try: mpv_live = MPV() @@ -899,6 +977,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: load_mode = parsed.get("load") current_mode = parsed.get("current") + # Pure log mode: `.pipe -log` should not run any playlist actions and + # should not print the playlist table. It should only enable/tail logs + # (handled in the `finally` block). + only_log = bool( + log_requested + and not url_arg + and index_arg is None + and not clear_mode + and not list_mode + and not play_mode + and not pause_mode + and not save_mode + and not load_mode + and not current_mode + ) + if only_log: + return 0 + # Handle --current flag: emit currently playing item to pipeline if current_mode: items = _get_playlist() @@ -959,6 +1055,19 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: # Fallback: just list the playlist if we can't determine index list_mode = True + # If the user explicitly requested -play while queueing a URL, interpret that + # as "play the URL I just queued" (not merely "unpause whatever is currently playing"). + if play_mode and index_arg is None: + if mpv_started: + # MPV was just started; give it a moment, then play first item. + import time + time.sleep(0.5) + index_arg = "1" + else: + playlist = _get_playlist(silent=True) + if playlist and len(playlist) > 0: + index_arg = str(len(playlist)) + # Ensure lyric overlay is running (auto-discovery handled by MPV.lyric). try: mpv = MPV() @@ -1411,11 +1520,64 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: finally: if log_requested and isinstance(mpv_log_path, str) and mpv_log_path.strip(): try: - tail_lines = _tail_text_file(mpv_log_path, max_lines=160) + # Give mpv a short moment to flush logs, then print a tail that is easy to copy. + print(f"MPV log file: {mpv_log_path}") + + # Best-effort: re-try enabling file logging at the end too (mpv may have + # been unreachable at the start). + try: + _try_enable_mpv_file_logging(mpv_log_path, attempts=2) + except Exception: + pass + + tail_lines: List[str] = [] + for _ in range(8): + tail_lines = _tail_text_file(mpv_log_path, max_lines=200) + if tail_lines: + break + try: + import time + time.sleep(0.25) + except Exception: + break + if tail_lines: print("MPV log (tail):") for ln in tail_lines: print(ln) + else: + print("MPV log (tail): ") + print("Note: On some Windows builds, mpv cannot start writing to --log-file after launch.") + print("If you need full [main2] logs, restart mpv so it starts with --log-file.") + + # Also print the helper log tail (this captures Python helper output that won't + # necessarily show up in MPV's own log-file). + try: + helper_path = _helper_log_file() + helper_tail = _tail_text_file(str(helper_path), max_lines=200) + print(f"Helper log file: {str(helper_path)}") + if helper_tail: + print("Helper log (tail):") + for ln in helper_tail: + print(ln) + else: + print("Helper log (tail): ") + except Exception: + pass + + # Also print the Lua-side log tail (mp.msg output isn't always written to mpv's log-file). + try: + lua_path = _lua_log_file() + lua_tail = _tail_text_file(str(lua_path), max_lines=200) + print(f"Lua log file: {str(lua_path)}") + if lua_tail: + print("Lua log (tail):") + for ln in lua_tail: + print(ln) + else: + print("Lua log (tail): ") + except Exception: + pass except Exception: pass try: @@ -1486,8 +1648,7 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_ debug("Timed out waiting for MPV IPC connection", file=sys.stderr) return - # Ensure Lua script is loaded (redundant when started with --script, but safe) - mpv.ensure_lua_loaded() + # main.lua is loaded at startup via --script; don't reload it here. # Ensure lyric overlay is running (auto-discovery handled by MPV.lyric). _ensure_lyric_overlay(mpv)