This commit is contained in:
2026-01-07 05:09:59 -08:00
parent edc33f4528
commit f0799191ff
10 changed files with 956 additions and 353 deletions

View File

@@ -1139,47 +1139,80 @@ local function run_pipeline_via_ipc_response(pipeline_cmd, seeds, timeout_second
return _run_helper_request_response(req, timeout_seconds) return _run_helper_request_response(req, timeout_seconds)
end end
local function _refresh_store_cache(timeout_seconds) local function _store_names_key(names)
if type(names) ~= 'table' or #names == 0 then
return ''
end
local normalized = {}
for _, name in ipairs(names) do
normalized[#normalized + 1] = trim(tostring(name or ''))
end
return table.concat(normalized, '\0')
end
local function _run_pipeline_request_async(pipeline_cmd, seeds, timeout_seconds, cb)
cb = cb or function() end
pipeline_cmd = trim(tostring(pipeline_cmd or ''))
if pipeline_cmd == '' then
cb(nil, 'empty pipeline command')
return
end
ensure_mpv_ipc_server()
local req = { pipeline = pipeline_cmd }
if seeds then
req.seeds = seeds
end
_run_helper_request_async(req, timeout_seconds or 30, cb)
end
local function _refresh_store_cache(timeout_seconds, on_complete)
ensure_mpv_ipc_server() ensure_mpv_ipc_server()
-- First, try reading the pre-computed cached property (set by helper at startup). local prev_count = (type(_cached_store_names) == 'table') and #_cached_store_names or 0
-- This avoids a request/response timeout if observe_property isn't working. local prev_key = _store_names_key(_cached_store_names)
local cached_json = mp.get_property('user-data/medeia-store-choices-cached') local cached_json = mp.get_property('user-data/medeia-store-choices-cached')
_lua_log('stores: cache_read cached_json=' .. tostring(cached_json) .. ' len=' .. tostring(cached_json and #cached_json or 0)) _lua_log('stores: cache_read cached_json=' .. tostring(cached_json) .. ' len=' .. tostring(cached_json and #cached_json or 0))
if cached_json and cached_json ~= '' then if cached_json and cached_json ~= '' then
-- Try to parse as JSON (may fail if not valid JSON) local function handle_cached(resp)
if not resp or type(resp) ~= 'table' or type(resp.choices) ~= 'table' then
_lua_log('stores: cache_parse result missing choices table; resp_type=' .. tostring(type(resp)))
return false
end
local out = {}
for _, v in ipairs(resp.choices) do
local name = trim(tostring(v or ''))
if name ~= '' then
out[#out + 1] = name
end
end
_cached_store_names = out
_store_cache_loaded = true
local preview = ''
if #out > 0 then
preview = table.concat(out, ', ')
end
_lua_log('stores: loaded ' .. tostring(#out) .. ' stores from cache: ' .. tostring(preview))
if type(on_complete) == 'function' then
on_complete(true, _store_names_key(out) ~= prev_key)
end
return true
end
local ok, cached_resp = pcall(utils.parse_json, cached_json) local ok, cached_resp = pcall(utils.parse_json, cached_json)
_lua_log('stores: cache_parse ok=' .. tostring(ok) .. ' resp_type=' .. tostring(type(cached_resp))) _lua_log('stores: cache_parse ok=' .. tostring(ok) .. ' resp_type=' .. tostring(type(cached_resp)))
-- Handle both cases: parsed object OR string (if JSON lib returns string)
if ok then if ok then
-- If parse returned a string, it might still be valid JSON; try parsing again
if type(cached_resp) == 'string' then if type(cached_resp) == 'string' then
_lua_log('stores: cache_parse returned string, trying again...') _lua_log('stores: cache_parse returned string, trying again...')
ok, cached_resp = pcall(utils.parse_json, cached_resp) ok, cached_resp = pcall(utils.parse_json, cached_resp)
_lua_log('stores: cache_parse retry ok=' .. tostring(ok) .. ' resp_type=' .. tostring(type(cached_resp))) _lua_log('stores: cache_parse retry ok=' .. tostring(ok) .. ' resp_type=' .. tostring(type(cached_resp)))
end end
if ok then
-- Now check if we have a table with choices if handle_cached(cached_resp) then
if type(cached_resp) == 'table' and type(cached_resp.choices) == 'table' then return true
local out = {}
for _, v in ipairs(cached_resp.choices) do
local name = trim(tostring(v or ''))
if name ~= '' then
out[#out + 1] = name
end
end end
_cached_store_names = out
_store_cache_loaded = true
local preview = ''
if #_cached_store_names > 0 then
preview = table.concat(_cached_store_names, ', ')
end
_lua_log('stores: loaded ' .. tostring(#_cached_store_names) .. ' stores from cache: ' .. tostring(preview))
return true
else
_lua_log('stores: cache_parse final type mismatch resp_type=' .. tostring(type(cached_resp)) .. ' choices_type=' .. tostring(cached_resp and type(cached_resp.choices) or 'n/a'))
end end
else else
_lua_log('stores: cache_parse failed ok=' .. tostring(ok) .. ' resp=' .. tostring(cached_resp)) _lua_log('stores: cache_parse failed ok=' .. tostring(ok) .. ' resp=' .. tostring(cached_resp))
@@ -1188,38 +1221,44 @@ local function _refresh_store_cache(timeout_seconds)
_lua_log('stores: cache_empty cached_json=' .. tostring(cached_json)) _lua_log('stores: cache_empty cached_json=' .. tostring(cached_json))
end end
-- Fallback: request fresh store-choices from helper (with timeout).
_lua_log('stores: requesting store-choices via helper (fallback)') _lua_log('stores: requesting store-choices via helper (fallback)')
local resp = _run_helper_request_response({ op = 'store-choices' }, timeout_seconds or 1) _run_helper_request_async({ op = 'store-choices' }, timeout_seconds or 1, function(resp, err)
if not resp or not resp.success or type(resp.choices) ~= 'table' then local success = false
_lua_log( local changed = false
'stores: failed to load store choices via helper; success=' if resp and resp.success and type(resp.choices) == 'table' then
.. tostring(resp and resp.success or false) local out = {}
.. ' choices_type=' for _, v in ipairs(resp.choices) do
.. tostring(resp and type(resp.choices) or 'nil') local name = trim(tostring(v or ''))
.. ' stderr=' if name ~= '' then
.. tostring(resp and resp.stderr or '') out[#out + 1] = name
.. ' error=' end
.. tostring(resp and resp.error or '') end
) _cached_store_names = out
return false _store_cache_loaded = true
end local preview = ''
if #out > 0 then
local out = {} preview = table.concat(out, ', ')
for _, v in ipairs(resp.choices) do end
local name = trim(tostring(v or '')) _lua_log('stores: loaded ' .. tostring(#out) .. ' stores via helper request: ' .. tostring(preview))
if name ~= '' then success = true
out[#out + 1] = name changed = (#out ~= prev_count) or (_store_names_key(out) ~= prev_key)
else
_lua_log(
'stores: failed to load store choices via helper; success='
.. tostring(resp and resp.success or false)
.. ' choices_type='
.. tostring(resp and type(resp.choices) or 'nil')
.. ' stderr='
.. tostring(resp and resp.stderr or '')
.. ' error='
.. tostring(resp and resp.error or err or '')
)
end end
end if type(on_complete) == 'function' then
_cached_store_names = out on_complete(success, changed)
_store_cache_loaded = true end
local preview = '' end)
if #_cached_store_names > 0 then return false
preview = table.concat(_cached_store_names, ', ')
end
_lua_log('stores: loaded ' .. tostring(#_cached_store_names) .. ' stores via helper request: ' .. tostring(preview))
return true
end end
local function _uosc_open_list_picker(menu_type, title, items) local function _uosc_open_list_picker(menu_type, title, items)
@@ -1286,35 +1325,12 @@ local function _open_store_picker()
-- Best-effort refresh; retry briefly to avoid races where the helper isn't -- Best-effort refresh; retry briefly to avoid races where the helper isn't
-- ready/observing yet at the exact moment the menu opens. -- ready/observing yet at the exact moment the menu opens.
local function attempt_refresh(tries_left) local function attempt_refresh(tries_left)
local before_count = (type(_cached_store_names) == 'table') and #_cached_store_names or 0 _refresh_store_cache(1.2, function(success, changed)
local before_preview = '' if success and changed then
if type(_cached_store_names) == 'table' and #_cached_store_names > 0 then _lua_log('stores: reopening menu (store list changed)')
before_preview = table.concat(_cached_store_names, ', ') _uosc_open_list_picker(STORE_PICKER_MENU_TYPE, 'Store', build_items())
end end
end)
local ok = _refresh_store_cache(1.2)
local after_count = (type(_cached_store_names) == 'table') and #_cached_store_names or 0
local after_preview = ''
if type(_cached_store_names) == 'table' and #_cached_store_names > 0 then
after_preview = table.concat(_cached_store_names, ', ')
end
_lua_log(
'stores: refresh attempt ok='
.. tostring(ok)
.. ' before='
.. tostring(before_count)
.. ' after='
.. tostring(after_count)
.. ' after='
.. tostring(after_preview)
)
if after_count > 0 and (after_count ~= before_count or after_preview ~= before_preview) then
_lua_log('stores: reopening menu (store list changed)')
_uosc_open_list_picker(STORE_PICKER_MENU_TYPE, 'Store', build_items())
return
end
if tries_left > 0 then if tries_left > 0 then
mp.add_timeout(0.25, function() mp.add_timeout(0.25, function()
@@ -1524,13 +1540,11 @@ function FileState:fetch_formats(cb)
return return
end end
-- Only applies to plain URLs (not store hash URLs).
if _extract_store_hash(url) then if _extract_store_hash(url) then
if cb then cb(false, 'store-hash url') end if cb then cb(false, 'store-hash url') end
return return
end end
-- Cache hit.
local cached = _get_cached_formats_table(url) local cached = _get_cached_formats_table(url)
if type(cached) == 'table' then if type(cached) == 'table' then
self:set_formats(url, cached) self:set_formats(url, cached)
@@ -1538,7 +1552,6 @@ function FileState:fetch_formats(cb)
return return
end end
-- In-flight: register waiter.
if _formats_inflight[url] then if _formats_inflight[url] then
_formats_waiters[url] = _formats_waiters[url] or {} _formats_waiters[url] = _formats_waiters[url] or {}
if cb then table.insert(_formats_waiters[url], cb) end if cb then table.insert(_formats_waiters[url], cb) end
@@ -1548,7 +1561,6 @@ function FileState:fetch_formats(cb)
_formats_waiters[url] = _formats_waiters[url] or {} _formats_waiters[url] = _formats_waiters[url] or {}
if cb then table.insert(_formats_waiters[url], cb) end if cb then table.insert(_formats_waiters[url], cb) end
-- Async request so the UI never blocks.
_run_helper_request_async({ op = 'ytdlp-formats', data = { url = url } }, 90, function(resp, err) _run_helper_request_async({ op = 'ytdlp-formats', data = { url = url } }, 90, function(resp, err)
_formats_inflight[url] = nil _formats_inflight[url] = nil
@@ -1664,12 +1676,26 @@ local function _current_ytdl_format_string()
return nil return nil
end end
local function _run_pipeline_detached(pipeline_cmd) local function _run_pipeline_detached(pipeline_cmd, on_failure)
if not pipeline_cmd or pipeline_cmd == '' then if not pipeline_cmd or pipeline_cmd == '' then
return false return false
end end
local resp = _run_helper_request_response({ op = 'run-detached', data = { pipeline = pipeline_cmd } }, 1.0) ensure_mpv_ipc_server()
return (resp and resp.success) and true or false if not ensure_pipeline_helper_running() then
if type(on_failure) == 'function' then
on_failure(nil, 'helper not running')
end
return false
end
_run_helper_request_async({ op = 'run-detached', data = { pipeline = pipeline_cmd } }, 1.0, function(resp, err)
if resp and resp.success then
return
end
if type(on_failure) == 'function' then
on_failure(resp, err)
end
end)
return true
end end
local function _open_save_location_picker_for_pending_download() local function _open_save_location_picker_for_pending_download()
@@ -1709,13 +1735,11 @@ local function _open_save_location_picker_for_pending_download()
if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then if type(_pending_download) ~= 'table' or not _pending_download.url or not _pending_download.format then
return return
end end
local before = (type(_cached_store_names) == 'table') and #_cached_store_names or 0 _refresh_store_cache(1.5, function(success, changed)
if _refresh_store_cache(1.5) then if success and changed then
local after = (type(_cached_store_names) == 'table') and #_cached_store_names or 0
if after > 0 and after ~= before then
_uosc_open_list_picker(DOWNLOAD_STORE_MENU_TYPE, 'Save location', build_items()) _uosc_open_list_picker(DOWNLOAD_STORE_MENU_TYPE, 'Save location', build_items())
end end
end end)
end) end)
end end
@@ -1769,7 +1793,12 @@ local function _start_download_flow_for_current()
return return
end end
ensure_mpv_ipc_server() ensure_mpv_ipc_server()
M.run_pipeline('get-file -store ' .. quote_pipeline_arg(store_hash.store) .. ' -query ' .. quote_pipeline_arg('hash:' .. store_hash.hash) .. ' -path ' .. quote_pipeline_arg(folder)) local pipeline_cmd = 'get-file -store ' .. quote_pipeline_arg(store_hash.store) .. ' -query ' .. quote_pipeline_arg('hash:' .. store_hash.hash) .. ' -path ' .. quote_pipeline_arg(folder)
M.run_pipeline(pipeline_cmd, nil, function(_, err)
if err then
mp.osd_message('Download failed: ' .. tostring(err), 5)
end
end)
mp.osd_message('Download started', 2) mp.osd_message('Download started', 2)
return return
end end
@@ -1994,9 +2023,18 @@ mp.register_script_message('medios-download-pick-store', function(json)
local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt) local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
.. ' | add-file -store ' .. quote_pipeline_arg(store) .. ' | add-file -store ' .. quote_pipeline_arg(store)
if not _run_pipeline_detached(pipeline_cmd) then local function run_pipeline_direct()
-- Fall back to synchronous execution if detached failed. M.run_pipeline(pipeline_cmd, nil, function(_, err)
M.run_pipeline(pipeline_cmd) if err then
mp.osd_message('Download failed: ' .. tostring(err), 5)
end
end)
end
if not _run_pipeline_detached(pipeline_cmd, function()
run_pipeline_direct()
end) then
run_pipeline_direct()
end end
mp.osd_message('Download started', 3) mp.osd_message('Download started', 3)
_pending_download = nil _pending_download = nil
@@ -2022,8 +2060,18 @@ mp.register_script_message('medios-download-pick-path', function()
local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt) local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
.. ' | add-file -path ' .. quote_pipeline_arg(folder) .. ' | add-file -path ' .. quote_pipeline_arg(folder)
if not _run_pipeline_detached(pipeline_cmd) then local function run_pipeline_direct()
M.run_pipeline(pipeline_cmd) M.run_pipeline(pipeline_cmd, nil, function(_, err)
if err then
mp.osd_message('Download failed: ' .. tostring(err), 5)
end
end)
end
if not _run_pipeline_detached(pipeline_cmd, function()
run_pipeline_direct()
end) then
run_pipeline_direct()
end end
mp.osd_message('Download started', 3) mp.osd_message('Download started', 3)
_pending_download = nil _pending_download = nil
@@ -2197,84 +2245,96 @@ local function _call_mpv_api(request)
end end
-- Run a Medeia pipeline command via the Python pipeline helper (IPC request/response). -- Run a Medeia pipeline command via the Python pipeline helper (IPC request/response).
-- Returns stdout string on success, or nil on failure. -- Calls the callback with stdout on success or error message on failure.
function M.run_pipeline(pipeline_cmd, seeds) function M.run_pipeline(pipeline_cmd, seeds, cb)
cb = cb or function() end
pipeline_cmd = trim(tostring(pipeline_cmd or '')) pipeline_cmd = trim(tostring(pipeline_cmd or ''))
if pipeline_cmd == '' then if pipeline_cmd == '' then
return nil cb(nil, 'empty pipeline command')
return
end end
ensure_mpv_ipc_server() ensure_mpv_ipc_server()
local resp = run_pipeline_via_ipc_response(pipeline_cmd, seeds, 30) _run_pipeline_request_async(pipeline_cmd, seeds, 30, function(resp, err)
if type(resp) == 'table' and resp.success then if resp and resp.success then
return resp.stdout or '' cb(resp.stdout or '', nil)
end return
local err = ''
if type(resp) == 'table' then
if resp.error and tostring(resp.error) ~= '' then
err = tostring(resp.error)
elseif resp.stderr and tostring(resp.stderr) ~= '' then
err = tostring(resp.stderr)
end end
end local details = err or ''
if err ~= '' then if details == '' and type(resp) == 'table' then
_lua_log('pipeline failed cmd=' .. tostring(pipeline_cmd) .. ' err=' .. err) if resp.error and tostring(resp.error) ~= '' then
else details = tostring(resp.error)
_lua_log('pipeline failed cmd=' .. tostring(pipeline_cmd) .. ' err=<unknown>') elseif resp.stderr and tostring(resp.stderr) ~= '' then
end details = tostring(resp.stderr)
return nil end
end
if details == '' then
details = 'unknown'
end
_lua_log('pipeline failed cmd=' .. tostring(pipeline_cmd) .. ' err=' .. details)
cb(nil, details)
end)
end end
-- Helper to run pipeline and parse JSON output -- Helper to run pipeline and parse JSON output
function M.run_pipeline_json(pipeline_cmd, seeds) function M.run_pipeline_json(pipeline_cmd, seeds, cb)
-- Append | output-json if not present cb = cb or function() end
if not pipeline_cmd:match("output%-json$") then if not pipeline_cmd:match('output%-json$') then
pipeline_cmd = pipeline_cmd .. " | output-json" pipeline_cmd = pipeline_cmd .. ' | output-json'
end end
M.run_pipeline(pipeline_cmd, seeds, function(output, err)
local output = M.run_pipeline(pipeline_cmd, seeds) if output then
if output then local ok, data = pcall(utils.parse_json, output)
local ok, data = pcall(utils.parse_json, output) if ok then
if ok then cb(data, nil)
return data return
else end
_lua_log("Failed to parse JSON: " .. output) _lua_log('Failed to parse JSON: ' .. output)
return nil cb(nil, 'malformed JSON response')
return
end end
end cb(nil, err)
return nil end)
end end
-- Command: Get info for current file -- Command: Get info for current file
function M.get_file_info() function M.get_file_info()
local path = mp.get_property("path") local path = mp.get_property('path')
if not path then return end if not path then
return
end
-- We can pass the path as a seed item
local seed = {{path = path}} local seed = {{path = path}}
-- Run pipeline: get-metadata M.run_pipeline_json('get-metadata', seed, function(data, err)
local data = M.run_pipeline_json("get-metadata", seed) if data then
_lua_log('Metadata: ' .. utils.format_json(data))
if data then mp.osd_message('Metadata loaded (check console)', 3)
-- Display metadata return
_lua_log("Metadata: " .. utils.format_json(data)) end
mp.osd_message("Metadata loaded (check console)", 3) if err then
end mp.osd_message('Failed to load metadata: ' .. tostring(err), 3)
end
end)
end end
-- Command: Delete current file -- Command: Delete current file
function M.delete_current_file() function M.delete_current_file()
local path = mp.get_property("path") local path = mp.get_property('path')
if not path then return end if not path then
return
end
local seed = {{path = path}} local seed = {{path = path}}
M.run_pipeline("delete-file", seed) M.run_pipeline('delete-file', seed, function(_, err)
mp.osd_message("File deleted", 3) if err then
mp.command("playlist-next") mp.osd_message('Delete failed: ' .. tostring(err), 3)
return
end
mp.osd_message('File deleted', 3)
mp.command('playlist-next')
end)
end end
-- Command: Load a URL via pipeline (Ctrl+Enter in prompt) -- Command: Load a URL via pipeline (Ctrl+Enter in prompt)
@@ -2619,14 +2679,18 @@ mp.register_script_message('medios-load-url-event', function(json)
end end
ensure_mpv_ipc_server() ensure_mpv_ipc_server()
local out = M.run_pipeline('.mpv -url ' .. quote_pipeline_arg(url) .. ' -play') local pipeline_cmd = '.mpv -url ' .. quote_pipeline_arg(url) .. ' -play'
if out ~= nil then M.run_pipeline(pipeline_cmd, nil, function(_, err)
if err then
mp.osd_message('Load URL failed: ' .. tostring(err), 3)
return
end
if ensure_uosc_loaded() then if ensure_uosc_loaded() then
mp.commandv('script-message-to', 'uosc', 'close-menu', LOAD_URL_MENU_TYPE) mp.commandv('script-message-to', 'uosc', 'close-menu', LOAD_URL_MENU_TYPE)
else else
_lua_log('menu: uosc not available; cannot close-menu') _lua_log('menu: uosc not available; cannot close-menu')
end end
end end)
end) end)
-- Menu integration with UOSC -- Menu integration with UOSC

View File

@@ -1,2 +1,2 @@
# Medeia MPV script options # Medeia MPV script options
store=tutorial store=rpi

View File

@@ -10,14 +10,34 @@ import time
import sys import sys
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Tuple from typing import Any, Dict, Iterable, List, Optional, Tuple
from urllib.parse import urlparse
from API.hifi import HifiApiClient from API.hifi import HifiApiClient
from ProviderCore.base import Provider, SearchResult from ProviderCore.base import Provider, SearchResult, parse_inline_query_arguments
from Provider.tidal_shared import (
build_track_tags,
coerce_duration_seconds,
extract_artists,
stringify,
)
from SYS import pipeline as pipeline_context
from SYS.logger import debug, log from SYS.logger import debug, log
DEFAULT_API_URLS = ( URL_API = (
"https://tidal-api.binimum.org",
"https://triton.squid.wtf",
"https://wolf.qqdl.site",
"https://maus.qqdl.site",
"https://vogel.qqdl.site",
"https://katze.qqdl.site",
"https://hund.qqdl.site",
"https://tidal.kinoplus.online",
"https://tidal-api.binimum.org", "https://tidal-api.binimum.org",
) )
_KEY_TO_PARAM: Dict[str, str] = { _KEY_TO_PARAM: Dict[str, str] = {
"album": "al", "album": "al",
"artist": "a", "artist": "a",
@@ -49,6 +69,20 @@ class HIFI(Provider):
TABLE_AUTO_STAGES = { TABLE_AUTO_STAGES = {
"hifi.track": ["download-file"], "hifi.track": ["download-file"],
} }
QUERY_ARG_CHOICES = {
"album": (),
"artist": (),
"playlist": (),
"track": (),
"title": (),
"video": (),
}
INLINE_QUERY_FIELD_CHOICES = QUERY_ARG_CHOICES
URL_DOMAINS = (
"tidal.com",
"listen.tidal.com",
)
URL = URL_DOMAINS
"""Provider that targets the HiFi-RestAPI (Tidal proxy) search endpoint. """Provider that targets the HiFi-RestAPI (Tidal proxy) search endpoint.
The CLI can supply a list of fail-over URLs via ``provider.hifi.api_urls`` or The CLI can supply a list of fail-over URLs via ``provider.hifi.api_urls`` or
@@ -65,6 +99,14 @@ class HIFI(Provider):
self.api_timeout = 10.0 self.api_timeout = 10.0
self.api_clients = [HifiApiClient(base_url=url, timeout=self.api_timeout) for url in self.api_urls] self.api_clients = [HifiApiClient(base_url=url, timeout=self.api_timeout) for url in self.api_urls]
def extract_query_arguments(self, query: str) -> Tuple[str, Dict[str, Any]]:
normalized, parsed = parse_inline_query_arguments(query)
filtered: Dict[str, Any] = {}
for key, value in parsed.items():
if key in self.QUERY_ARG_CHOICES:
filtered[key] = value
return normalized, filtered
def validate(self) -> bool: def validate(self) -> bool:
return bool(self.api_urls) return bool(self.api_urls)
@@ -77,8 +119,16 @@ class HIFI(Provider):
) -> List[SearchResult]: ) -> List[SearchResult]:
if limit <= 0: if limit <= 0:
return [] return []
view = self._get_view_from_query(query) normalized_query, inline_args = self.extract_query_arguments(query)
params = self._build_search_params(query) raw_query = str(query or "").strip()
search_query = normalized_query or raw_query
if not search_query and inline_args:
search_query = " ".join(f"{k}:{v}" for k, v in inline_args.items())
if not search_query:
return []
view = self._determine_view(search_query, inline_args)
params = self._build_search_params(search_query)
if not params: if not params:
return [] return []
@@ -126,6 +176,18 @@ class HIFI(Provider):
return "album" return "album"
return "track" return "track"
def _determine_view(self, query: str, inline_args: Dict[str, Any]) -> str:
if inline_args:
if "artist" in inline_args:
return "artist"
if "album" in inline_args:
return "album"
if "track" in inline_args or "title" in inline_args:
return "track"
if "video" in inline_args or "playlist" in inline_args:
return "track"
return self._get_view_from_query(query)
@staticmethod @staticmethod
def _safe_filename(value: Any, *, fallback: str = "hifi") -> str: def _safe_filename(value: Any, *, fallback: str = "hifi") -> str:
text = str(value or "").strip() text = str(value or "").strip()
@@ -169,6 +231,56 @@ class HIFI(Provider):
return None return None
return num if num > 0 else None return num if num > 0 else None
def _parse_tidal_url(self, url: str) -> Tuple[str, Optional[int]]:
try:
parsed = urlparse(str(url))
except Exception:
return "", None
parts = [segment for segment in (parsed.path or "").split("/") if segment]
if not parts:
return "", None
idx = 0
if parts[0].lower() == "browse":
idx = 1
if idx >= len(parts):
return "", None
view = parts[idx].lower()
if view not in {"album", "track"}:
return "", None
for segment in parts[idx + 1:]:
identifier = self._parse_int(segment)
if identifier is not None:
return view, identifier
return view, None
def _track_detail_to_result(self, detail: Optional[Dict[str, Any]], track_id: int) -> SearchResult:
if isinstance(detail, dict):
candidate = self._item_to_result(detail)
if candidate is not None:
try:
candidate.full_metadata = dict(detail)
except Exception:
pass
return candidate
title = f"Track {track_id}"
if isinstance(detail, dict):
title = self._stringify(detail.get("title")) or title
return SearchResult(
table="hifi",
title=title,
path=f"hifi://track/{track_id}",
detail=f"id:{track_id}",
annotations=["tidal", "track"],
media_kind="audio",
full_metadata=dict(detail) if isinstance(detail, dict) else {},
)
def _extract_artist_selection_context(self, selected_items: List[Any]) -> List[Tuple[int, str]]: def _extract_artist_selection_context(self, selected_items: List[Any]) -> List[Tuple[int, str]]:
contexts: List[Tuple[int, str]] = [] contexts: List[Tuple[int, str]] = []
seen: set[int] = set() seen: set[int] = set()
@@ -589,6 +701,65 @@ class HIFI(Provider):
return results return results
def _present_album_tracks(
self,
track_results: List[SearchResult],
*,
album_id: Optional[int],
album_title: str,
artist_name: str,
) -> None:
if not track_results:
return
try:
from SYS.rich_display import stdout_console
from SYS.result_table import ResultTable
except Exception:
return
label = album_title or "Album"
if artist_name:
label = f"{artist_name} - {label}"
table = ResultTable(f"HIFI Tracks: {label}").set_preserve_order(True)
table.set_table("hifi.track")
try:
table.set_table_metadata(
{
"provider": "hifi",
"view": "track",
"album_id": album_id,
"album_title": album_title,
"artist_name": artist_name,
}
)
except Exception:
pass
results_payload: List[Dict[str, Any]] = []
for result in track_results:
table.add_result(result)
try:
results_payload.append(result.to_dict())
except Exception:
results_payload.append(
{
"table": getattr(result, "table", "hifi.track"),
"title": getattr(result, "title", ""),
"path": getattr(result, "path", ""),
}
)
pipeline_context.set_last_result_table(table, results_payload)
pipeline_context.set_current_stage_table(table)
try:
stdout_console().print()
stdout_console().print(table)
except Exception:
pass
def _album_item_to_result(self, album: Dict[str, Any], *, artist_name: str) -> Optional[SearchResult]: def _album_item_to_result(self, album: Dict[str, Any], *, artist_name: str) -> Optional[SearchResult]:
if not isinstance(album, dict): if not isinstance(album, dict):
return None return None
@@ -1080,6 +1251,73 @@ class HIFI(Provider):
) )
return materialized return materialized
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]:
view, identifier = self._parse_tidal_url(url)
if not view:
return False, None
if view == "track":
if not identifier or output_dir is None:
return False, None
try:
detail = self._fetch_track_details(identifier)
except Exception:
detail = None
result = self._track_detail_to_result(detail, identifier)
try:
downloaded = self.download(result, output_dir)
except Exception:
return False, None
if downloaded:
return True, downloaded
return False, None
if view == "album":
if not identifier:
return False, None
try:
track_results = self._tracks_for_album(
album_id=identifier,
album_title="",
artist_name="",
limit=200,
)
except Exception:
return False, None
if not track_results:
return False, None
album_title = ""
artist_name = ""
metadata = getattr(track_results[0], "full_metadata", None)
if isinstance(metadata, dict):
album_obj = metadata.get("album")
if isinstance(album_obj, dict):
album_title = self._stringify(album_obj.get("title"))
else:
album_title = self._stringify(album_obj or metadata.get("album"))
artists = self._extract_artists(metadata)
if artists:
artist_name = artists[0]
if not album_title:
album_title = f"Album {identifier}"
self._present_album_tracks(
track_results,
album_id=identifier,
album_title=album_title,
artist_name=artist_name,
)
return True, None
return False, None
def _get_api_client_for_base(self, base_url: str) -> Optional[HifiApiClient]: def _get_api_client_for_base(self, base_url: str) -> Optional[HifiApiClient]:
base = base_url.rstrip("/") base = base_url.rstrip("/")
for client in self.api_clients: for client in self.api_clients:
@@ -1180,7 +1418,7 @@ class HIFI(Provider):
urls.append(raw.strip()) urls.append(raw.strip())
cleaned = [u.rstrip("/") for u in urls if isinstance(u, str) and u.strip()] cleaned = [u.rstrip("/") for u in urls if isinstance(u, str) and u.strip()]
if not cleaned: if not cleaned:
cleaned = [DEFAULT_API_URLS[0]] cleaned = [URL_API[0]]
return cleaned return cleaned
def _build_search_params(self, query: str) -> Dict[str, str]: def _build_search_params(self, query: str) -> Dict[str, str]:
@@ -1342,58 +1580,15 @@ class HIFI(Provider):
@staticmethod @staticmethod
def _coerce_duration_seconds(value: Any) -> Optional[int]: def _coerce_duration_seconds(value: Any) -> Optional[int]:
candidates = [] return coerce_duration_seconds(value)
candidates.append(value)
try:
if isinstance(value, dict):
for key in ("duration",
"durationSeconds",
"duration_sec",
"duration_ms",
"durationMillis"):
if key in value:
candidates.append(value.get(key))
except Exception:
pass
for cand in candidates:
try:
if cand is None:
continue
if isinstance(cand, str) and cand.strip().endswith("ms"):
cand = cand.strip()[:-2]
v = float(cand)
if v <= 0:
continue
if v > 10_000: # treat as milliseconds
v = v / 1000.0
return int(round(v))
except Exception:
continue
return None
@staticmethod @staticmethod
def _stringify(value: Any) -> str: def _stringify(value: Any) -> str:
text = str(value or "").strip() return stringify(value)
return text
@staticmethod @staticmethod
def _extract_artists(item: Dict[str, Any]) -> List[str]: def _extract_artists(item: Dict[str, Any]) -> List[str]:
names: List[str] = [] return extract_artists(item)
artists = item.get("artists")
if isinstance(artists, list):
for artist in artists:
if isinstance(artist, dict):
name = str(artist.get("name") or "").strip()
if name and name not in names:
names.append(name)
if not names:
primary = item.get("artist")
if isinstance(primary, dict):
name = str(primary.get("name") or "").strip()
if name:
names.append(name)
return names
def _item_to_result(self, item: Dict[str, Any]) -> Optional[SearchResult]: def _item_to_result(self, item: Dict[str, Any]) -> Optional[SearchResult]:
if not isinstance(item, dict): if not isinstance(item, dict):
@@ -1619,52 +1814,7 @@ class HIFI(Provider):
return [(name, value) for name, value in values if value] return [(name, value) for name, value in values if value]
def _build_track_tags(self, metadata: Dict[str, Any]) -> set[str]: def _build_track_tags(self, metadata: Dict[str, Any]) -> set[str]:
tags: set[str] = {"tidal"} return build_track_tags(metadata)
audio_quality = self._stringify(metadata.get("audioQuality"))
if audio_quality:
tags.add(f"quality:{audio_quality.lower()}")
media_md = metadata.get("mediaMetadata")
if isinstance(media_md, dict):
tag_values = media_md.get("tags") or []
for tag in tag_values:
if isinstance(tag, str):
candidate = tag.strip()
if candidate:
tags.add(candidate.lower())
title_text = self._stringify(metadata.get("title"))
if title_text:
tags.add(f"title:{title_text}")
artists = self._extract_artists(metadata)
for artist in artists:
artist_clean = self._stringify(artist)
if artist_clean:
tags.add(f"artist:{artist_clean}")
album_title = ""
album_obj = metadata.get("album")
if isinstance(album_obj, dict):
album_title = self._stringify(album_obj.get("title"))
else:
album_title = self._stringify(metadata.get("album"))
if album_title:
tags.add(f"album:{album_title}")
track_no_val = metadata.get("trackNumber") or metadata.get("track_number")
if track_no_val is not None:
try:
track_int = int(track_no_val)
if track_int > 0:
tags.add(f"track:{track_int}")
except Exception:
track_text = self._stringify(track_no_val)
if track_text:
tags.add(f"track:{track_text}")
return tags
def selector( def selector(
self, self,

View File

@@ -543,9 +543,72 @@ def adjust_output_dir_for_alldebrid(
class AllDebrid(Provider): class AllDebrid(Provider):
# Magnet URIs should be routed through this provider. # Magnet URIs should be routed through this provider.
TABLE_AUTO_STAGES = {"alldebrid": ["download-file"]}
URL = ("magnet:",) URL = ("magnet:",)
URL_DOMAINS = () URL_DOMAINS = ()
@staticmethod
def _resolve_magnet_spec_from_result(result: Any) -> Optional[str]:
table = getattr(result, "table", None)
media_kind = getattr(result, "media_kind", None)
tags = getattr(result, "tag", None)
full_metadata = getattr(result, "full_metadata", None)
target = getattr(result, "path", None) or getattr(result, "url", None)
if not table or str(table).strip().lower() != "alldebrid":
return None
kind_val = str(media_kind or "").strip().lower()
is_folder = kind_val == "folder"
if not is_folder and isinstance(tags, (list, set)):
for tag in tags:
if str(tag or "").strip().lower() == "folder":
is_folder = True
break
if not is_folder:
return resolve_magnet_spec(str(target or "")) if isinstance(target, str) else None
metadata = full_metadata if isinstance(full_metadata, dict) else {}
candidates: List[str] = []
def _maybe_add(value: Any) -> None:
if isinstance(value, str):
cleaned = value.strip()
if cleaned:
candidates.append(cleaned)
magnet_block = metadata.get("magnet")
if isinstance(magnet_block, dict):
for inner in ("magnet", "magnet_link", "link", "url"):
_maybe_add(magnet_block.get(inner))
for inner in ("hash", "info_hash", "torrenthash", "magnethash"):
_maybe_add(magnet_block.get(inner))
else:
_maybe_add(magnet_block)
for extra in ("magnet_link", "magnet_url", "magnet_spec"):
_maybe_add(metadata.get(extra))
_maybe_add(metadata.get("hash"))
_maybe_add(metadata.get("info_hash"))
for candidate in candidates:
spec = resolve_magnet_spec(candidate)
if spec:
return spec
return resolve_magnet_spec(str(target)) if isinstance(target, str) else None
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]:
spec = resolve_magnet_spec(url)
if not spec:
return False, None
cfg = self.config if isinstance(self.config, dict) else {}
try:
prepare_magnet(spec, cfg)
return True, None
except Exception:
return False, None
@classmethod @classmethod
def url_patterns(cls) -> Tuple[str, ...]: def url_patterns(cls) -> Tuple[str, ...]:
# Combine static patterns with cached host domains. # Combine static patterns with cached host domains.
@@ -744,11 +807,42 @@ class AllDebrid(Provider):
except Exception: except Exception:
return None return None
def download_items(
self,
result: SearchResult,
output_dir: Path,
*,
emit: Callable[[Path, str, str, Dict[str, Any]], None],
progress: Any,
quiet_mode: bool,
path_from_result: Callable[[Any], Path],
config: Optional[Dict[str, Any]] = None,
) -> int:
spec = self._resolve_magnet_spec_from_result(result)
if not spec:
return 0
cfg = config if isinstance(config, dict) else (self.config or {})
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
emit(path, file_url, relpath, metadata)
downloaded, _ = download_magnet(
spec,
str(getattr(result, "path", "") or ""),
output_dir,
cfg,
progress,
quiet_mode,
path_from_result,
_on_emit,
)
return downloaded
@staticmethod @staticmethod
def _flatten_files(items: Any, def _flatten_files(items: Any,
*, *,
_prefix: Optional[List[str]] = None) -> Iterable[Dict[str, _prefix: Optional[List[str]] = None) -> Iterable[Dict[str, Any]]:
Any]]:
"""Flatten AllDebrid magnet file tree into file dicts, preserving relative paths. """Flatten AllDebrid magnet file tree into file dicts, preserving relative paths.
API commonly returns: API commonly returns:
@@ -784,9 +878,7 @@ class AllDebrid(Provider):
name = node.get("n") or node.get("name") name = node.get("n") or node.get("name")
link = node.get("l") or node.get("link") link = node.get("l") or node.get("link")
if isinstance(name, if isinstance(name, str) and name.strip() and isinstance(link, str) and link.strip():
str) and name.strip() and isinstance(link,
str) and link.strip():
rel_parts = prefix + [name.strip()] rel_parts = prefix + [name.strip()]
relpath = "/".join([p for p in rel_parts if p]) relpath = "/".join([p for p in rel_parts if p])
enriched = dict(node) enriched = dict(node)
@@ -932,6 +1024,19 @@ class AllDebrid(Provider):
except Exception: except Exception:
size_bytes = None size_bytes = None
metadata = {
"magnet": magnet_status,
"magnet_id": magnet_id,
"magnet_name": magnet_name,
"relpath": relpath,
"file": file_node,
"provider": "alldebrid",
"provider_view": "files",
}
if file_url:
metadata["_selection_args"] = ["-url", file_url]
metadata["_selection_action"] = ["download-file", "-url", file_url]
results.append( results.append(
SearchResult( SearchResult(
table="alldebrid", table="alldebrid",
@@ -952,15 +1057,7 @@ class AllDebrid(Provider):
("ID", ("ID",
str(magnet_id)), str(magnet_id)),
], ],
full_metadata={ full_metadata=metadata,
"magnet": magnet_status,
"magnet_id": magnet_id,
"magnet_name": magnet_name,
"relpath": relpath,
"file": file_node,
"provider": "alldebrid",
"provider_view": "files",
},
) )
) )
if len(results) >= max(1, limit): if len(results) >= max(1, limit):

View File

@@ -11,6 +11,15 @@ import subprocess
from API.HTTP import HTTPClient from API.HTTP import HTTPClient
from ProviderCore.base import SearchResult from ProviderCore.base import SearchResult
try:
from Provider.HIFI import HIFI
except ImportError: # pragma: no cover - optional
HIFI = None
from Provider.tidal_shared import (
build_track_tags,
extract_artists,
stringify,
)
try: # Optional dependency for IMDb scraping try: # Optional dependency for IMDb scraping
from imdbinfo.services import search_title # type: ignore from imdbinfo.services import search_title # type: ignore
except ImportError: # pragma: no cover - optional except ImportError: # pragma: no cover - optional
@@ -1416,6 +1425,95 @@ except Exception:
# Registry --------------------------------------------------------------- # Registry ---------------------------------------------------------------
class TidalMetadataProvider(MetadataProvider):
"""Metadata provider that reuses the HIFI search provider for tidal info."""
@property
def name(self) -> str: # type: ignore[override]
return "tidal"
def __init__(self, config: Optional[Dict[str, Any]] = None) -> None:
if HIFI is None:
raise RuntimeError("HIFI provider unavailable for tidal metadata")
super().__init__(config)
self._provider = HIFI(self.config)
def search(self, query: str, limit: int = 10) -> List[Dict[str, Any]]:
normalized = str(query or "").strip()
if not normalized:
return []
try:
results = self._provider.search(normalized, limit=limit)
except Exception as exc:
debug(f"[tidal-meta] search failed for '{normalized}': {exc}")
return []
items: List[Dict[str, Any]] = []
for result in results:
metadata = getattr(result, "full_metadata", {}) or {}
if not isinstance(metadata, dict):
metadata = {}
title = stringify(metadata.get("title") or result.title)
if not title:
continue
artists = extract_artists(metadata)
artist_display = ", ".join(artists) if artists else stringify(metadata.get("artist"))
album_obj = metadata.get("album")
album = ""
if isinstance(album_obj, dict):
album = stringify(album_obj.get("title"))
else:
album = stringify(metadata.get("album"))
year = stringify(metadata.get("releaseDate") or metadata.get("year") or metadata.get("date"))
track_id = self._provider._parse_track_id(metadata.get("trackId") or metadata.get("id"))
lyrics_data = None
if track_id is not None:
try:
lyrics_data = self._provider._fetch_track_lyrics(track_id)
except Exception as exc:
debug(f"[tidal-meta] lyrics lookup failed for {track_id}: {exc}")
lyrics = None
if isinstance(lyrics_data, dict):
lyrics = stringify(lyrics_data.get("lyrics") or lyrics_data.get("text"))
subtitles = stringify(lyrics_data.get("subtitles"))
if subtitles:
metadata.setdefault("_tidal_lyrics", {})["subtitles"] = subtitles
tags = sorted(build_track_tags(metadata))
items.append({
"title": title,
"artist": artist_display,
"album": album,
"year": year,
"lyrics": lyrics,
"tags": tags,
"provider": self.name,
"path": getattr(result, "path", ""),
"track_id": track_id,
"full_metadata": metadata,
})
return items
def to_tags(self, item: Dict[str, Any]) -> List[str]:
tags: List[str] = []
for value in item.get("tags", []):
value_text = stringify(value)
if value_text:
normalized = value_text.lower()
if normalized in {"tidal", "lossless"}:
continue
if normalized.startswith("quality:lossless"):
continue
tags.append(value_text)
return tags
_METADATA_PROVIDERS: Dict[str, _METADATA_PROVIDERS: Dict[str,
Type[MetadataProvider]] = { Type[MetadataProvider]] = {
"itunes": ITunesProvider, "itunes": ITunesProvider,
@@ -1426,6 +1524,7 @@ _METADATA_PROVIDERS: Dict[str,
"musicbrainz": MusicBrainzMetadataProvider, "musicbrainz": MusicBrainzMetadataProvider,
"imdb": ImdbMetadataProvider, "imdb": ImdbMetadataProvider,
"ytdlp": YtdlpMetadataProvider, "ytdlp": YtdlpMetadataProvider,
"tidal": TidalMetadataProvider,
} }

109
Provider/tidal_shared.py Normal file
View File

@@ -0,0 +1,109 @@
from __future__ import annotations
from typing import Any, Dict, List, Optional, Set
def stringify(value: Any) -> str:
text = str(value or "").strip()
return text
def extract_artists(item: Dict[str, Any]) -> List[str]:
names: List[str] = []
artists = item.get("artists")
if isinstance(artists, list):
for artist in artists:
if isinstance(artist, dict):
name = stringify(artist.get("name"))
if name and name not in names:
names.append(name)
if not names:
primary = item.get("artist")
if isinstance(primary, dict):
name = stringify(primary.get("name"))
if name:
names.append(name)
return names
def build_track_tags(metadata: Dict[str, Any]) -> Set[str]:
tags: Set[str] = {"tidal"}
audio_quality = stringify(metadata.get("audioQuality"))
if audio_quality:
tags.add(f"quality:{audio_quality.lower()}")
media_md = metadata.get("mediaMetadata")
if isinstance(media_md, dict):
tag_values = media_md.get("tags") or []
for tag in tag_values:
if isinstance(tag, str):
candidate = tag.strip()
if candidate:
tags.add(candidate.lower())
title_text = stringify(metadata.get("title"))
if title_text:
tags.add(f"title:{title_text}")
artists = extract_artists(metadata)
for artist in artists:
artist_clean = stringify(artist)
if artist_clean:
tags.add(f"artist:{artist_clean}")
album_title = ""
album_obj = metadata.get("album")
if isinstance(album_obj, dict):
album_title = stringify(album_obj.get("title"))
else:
album_title = stringify(metadata.get("album"))
if album_title:
tags.add(f"album:{album_title}")
track_no_val = metadata.get("trackNumber") or metadata.get("track_number")
if track_no_val is not None:
try:
track_int = int(track_no_val)
if track_int > 0:
tags.add(f"track:{track_int}")
except Exception:
track_text = stringify(track_no_val)
if track_text:
tags.add(f"track:{track_text}")
return tags
def coerce_duration_seconds(value: Any) -> Optional[int]:
candidates = [value]
try:
if isinstance(value, dict):
for key in (
"duration",
"durationSeconds",
"duration_sec",
"duration_ms",
"durationMillis",
):
if key in value:
candidates.append(value.get(key))
except Exception:
pass
for cand in candidates:
try:
if cand is None:
continue
text = str(cand).strip()
if text.lower().endswith("ms"):
text = text[:-2].strip()
num = float(text)
if num <= 0:
continue
if num > 10_000:
num = num / 1000.0
return int(round(num))
except Exception:
continue
return None

View File

@@ -5,7 +5,7 @@ import re
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple from typing import Any, Dict, List, Optional, Sequence, Tuple, Callable
@dataclass @dataclass
@@ -62,6 +62,22 @@ class SearchResult:
if selection_args: if selection_args:
out["_selection_args"] = selection_args out["_selection_args"] = selection_args
try:
selection_action = getattr(self, "selection_action", None)
except Exception:
selection_action = None
if selection_action is None:
try:
fm = getattr(self, "full_metadata", None)
if isinstance(fm, dict):
selection_action = fm.get("_selection_action") or fm.get("selection_action")
except Exception:
selection_action = None
if selection_action:
normalized = [str(x) for x in selection_action if x is not None]
if normalized:
out["_selection_action"] = normalized
return out return out
@@ -167,6 +183,35 @@ class Provider(ABC):
return None return None
def download_items(
self,
result: SearchResult,
output_dir: Path,
*,
emit: Callable[[Path, str, str, Dict[str, Any]], None],
progress: Any,
quiet_mode: bool,
path_from_result: Callable[[Any], Path],
config: Optional[Dict[str, Any]] = None,
) -> int:
"""Optional multi-item download hook (default no-op)."""
_ = result
_ = output_dir
_ = emit
_ = progress
_ = quiet_mode
_ = path_from_result
_ = config
return 0
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]:
"""Optional provider override to parse and act on URLs."""
_ = url
_ = output_dir
return False, None
def upload(self, file_path: str, **kwargs: Any) -> str: def upload(self, file_path: str, **kwargs: Any) -> str:
"""Upload a file and return a URL or identifier.""" """Upload a file and return a URL or identifier."""
raise NotImplementedError(f"Provider '{self.name}' does not support upload") raise NotImplementedError(f"Provider '{self.name}' does not support upload")

View File

@@ -135,6 +135,8 @@ class HydrusNetwork(Store):
instance_name=self.NAME instance_name=self.NAME
) )
self._service_key_cache: Dict[str, Optional[str]] = {}
# Best-effort total count (used for startup diagnostics). Avoid heavy payloads. # Best-effort total count (used for startup diagnostics). Avoid heavy payloads.
# Some Hydrus setups appear to return no count via the CBOR client for this endpoint, # Some Hydrus setups appear to return no count via the CBOR client for this endpoint,
# so prefer a direct JSON request with a short timeout. # so prefer a direct JSON request with a short timeout.
@@ -143,6 +145,30 @@ class HydrusNetwork(Store):
except Exception: except Exception:
pass pass
def _get_service_key(self, service_name: str, *, refresh: bool = False) -> Optional[str]:
"""Resolve (and cache) the Hydrus service key for the given service name."""
normalized = str(service_name or "my tags").strip()
if not normalized:
normalized = "my tags"
cache_key = normalized.lower()
if not refresh and cache_key in self._service_key_cache:
return self._service_key_cache[cache_key]
client = self._client
if client is None:
self._service_key_cache[cache_key] = None
return None
try:
from API import HydrusNetwork as hydrus_wrapper
resolved = hydrus_wrapper.get_tag_service_key(client, normalized)
except Exception:
resolved = None
self._service_key_cache[cache_key] = resolved
return resolved
def get_total_count(self, *, refresh: bool = False) -> Optional[int]: def get_total_count(self, *, refresh: bool = False) -> Optional[int]:
"""Best-effort total file count for this Hydrus instance. """Best-effort total file count for this Hydrus instance.
@@ -1404,8 +1430,6 @@ class HydrusNetwork(Store):
where source is always "hydrus" where source is always "hydrus"
""" """
try: try:
from API import HydrusNetwork as hydrus_wrapper
file_hash = str(file_identifier or "").strip().lower() file_hash = str(file_identifier or "").strip().lower()
if len(file_hash) != 64 or not all(ch in "0123456789abcdef" if len(file_hash) != 64 or not all(ch in "0123456789abcdef"
for ch in file_hash): for ch in file_hash):
@@ -1441,9 +1465,8 @@ class HydrusNetwork(Store):
) )
return [], "unknown" return [], "unknown"
# Extract tags using service name service_name = kwargs.get("service_name") or "my tags"
service_name = "my tags" service_key = self._get_service_key(service_name)
service_key = hydrus_wrapper.get_tag_service_key(client, service_name)
# Extract tags from metadata # Extract tags from metadata
tags = self._extract_tags_from_hydrus_meta(meta, service_key, service_name) tags = self._extract_tags_from_hydrus_meta(meta, service_key, service_name)
@@ -1495,14 +1518,7 @@ class HydrusNetwork(Store):
return True return True
service_key: Optional[str] = None service_key: Optional[str] = None
try: service_key = self._get_service_key(service_name)
from API import HydrusNetwork as hydrus_wrapper
service_key = hydrus_wrapper.get_tag_service_key(
client, service_name
)
except Exception:
service_key = None
mutate_success = False mutate_success = False
if service_key: if service_key:

View File

@@ -17,7 +17,6 @@ from contextlib import AbstractContextManager, nullcontext
import requests import requests
from API.alldebrid import is_magnet_link
from Provider import internetarchive as ia_provider from Provider import internetarchive as ia_provider
from Provider import alldebrid as ad_provider from Provider import alldebrid as ad_provider
from Provider import openlibrary as ol_provider from Provider import openlibrary as ol_provider
@@ -279,15 +278,34 @@ class Download_File(Cmdlet):
except Exception: except Exception:
pass pass
if (provider_name provider_for_url = None
and str(provider_name).lower() == "alldebrid" if provider_name and get_provider is not None:
and is_magnet_link(str(url))): provider_for_url = get_provider(provider_name, config)
magnet_spec = ad_provider.resolve_magnet_spec(str(url))
if magnet_spec: if provider_for_url is not None:
_, magnet_id = ad_provider.prepare_magnet(magnet_spec, config) try:
if magnet_id is not None: handled, handled_path = provider_for_url.handle_url(
str(url),
output_dir=final_output_dir,
)
except Exception as exc:
raise DownloadError(str(exc))
if handled:
if handled_path:
downloaded_path = Path(handled_path)
self._emit_local_file(
downloaded_path=downloaded_path,
source=str(url),
title_hint=downloaded_path.stem,
tags_hint=None,
media_kind_hint="file",
full_metadata=None,
provider_hint=str(provider_name),
progress=progress,
config=config,
)
downloaded_count += 1 downloaded_count += 1
continue continue
if provider_name and get_provider is not None and SearchResult is not None: if provider_name and get_provider is not None and SearchResult is not None:
# OpenLibrary URLs should be handled by the OpenLibrary provider. # OpenLibrary URLs should be handled by the OpenLibrary provider.
@@ -841,16 +859,16 @@ class Download_File(Cmdlet):
return expanded_items return expanded_items
def _process_provider_items(self, def _process_provider_items(self,
*, *,
piped_items: Sequence[Any], piped_items: Sequence[Any],
final_output_dir: Path, final_output_dir: Path,
config: Dict[str, config: Dict[str,
Any], Any],
quiet_mode: bool, quiet_mode: bool,
registry: Dict[str, registry: Dict[str,
Any], Any],
progress: PipelineProgress, progress: PipelineProgress,
) -> tuple[int, int]: ) -> tuple[int, int]:
downloaded_count = 0 downloaded_count = 0
queued_magnet_submissions = 0 queued_magnet_submissions = 0
get_search_provider = registry.get("get_search_provider") get_search_provider = registry.get("get_search_provider")
@@ -916,9 +934,10 @@ class Download_File(Cmdlet):
downloaded_path: Optional[Path] = None downloaded_path: Optional[Path] = None
attempted_provider_download = False attempted_provider_download = False
provider_sr = None provider_sr = None
provider_obj = None
if table and get_search_provider and SearchResult: if table and get_search_provider and SearchResult:
provider = get_search_provider(str(table), config) provider_obj = get_search_provider(str(table), config)
if provider is not None: if provider_obj is not None:
attempted_provider_download = True attempted_provider_download = True
sr = SearchResult( sr = SearchResult(
table=str(table), table=str(table),
@@ -944,9 +963,53 @@ class Download_File(Cmdlet):
except Exception: except Exception:
output_dir = final_output_dir output_dir = final_output_dir
downloaded_path = provider.download(sr, output_dir) downloaded_path = provider_obj.download(sr, output_dir)
provider_sr = sr provider_sr = sr
if downloaded_path is None:
download_items = getattr(provider_obj, "download_items", None)
if callable(download_items):
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
title_hint = metadata.get("name") or relpath or title
self._emit_local_file(
downloaded_path=path,
source=file_url or target,
title_hint=title_hint,
tags_hint=tags_list,
media_kind_hint="file",
full_metadata=metadata,
progress=progress,
config=config,
provider_hint=str(table) if table else None,
)
try:
downloaded_extra = download_items(
sr,
output_dir,
emit=_on_emit,
progress=progress,
quiet_mode=quiet_mode,
path_from_result=self._path_from_download_result,
config=config,
)
except TypeError:
downloaded_extra = download_items(
sr,
output_dir,
emit=_on_emit,
progress=progress,
quiet_mode=quiet_mode,
path_from_result=self._path_from_download_result,
)
except Exception:
downloaded_extra = 0
if downloaded_extra:
downloaded_count += int(downloaded_extra)
continue
# OpenLibrary: if provider download failed, do NOT try to download the OpenLibrary page HTML. # OpenLibrary: if provider download failed, do NOT try to download the OpenLibrary page HTML.
if (downloaded_path is None and attempted_provider_download if (downloaded_path is None and attempted_provider_download
and str(table or "").lower() == "openlibrary"): and str(table or "").lower() == "openlibrary"):
@@ -1044,45 +1107,6 @@ class Download_File(Cmdlet):
continue continue
# Magnet targets (e.g., torrent provider results) -> submit/download via AllDebrid
if downloaded_path is None and isinstance(target, str) and is_magnet_link(str(target)):
magnet_spec = ad_provider.resolve_magnet_spec(str(target))
if magnet_spec:
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
title_hint = metadata.get("name") or relpath or title
self._emit_local_file(
downloaded_path=path,
source=file_url or target,
title_hint=title_hint,
tags_hint=None,
media_kind_hint="file",
full_metadata=metadata,
progress=progress,
config=config,
provider_hint="alldebrid",
)
downloaded, magnet_id = ad_provider.download_magnet(
magnet_spec,
str(target),
final_output_dir,
config,
progress,
quiet_mode,
self._path_from_download_result,
_on_emit,
)
if downloaded > 0:
downloaded_count += downloaded
continue
# If queued but not yet ready, skip the generic unsupported-target error.
if magnet_id is not None:
queued_magnet_submissions += 1
continue
# Fallback: if we have a direct HTTP URL, download it directly # Fallback: if we have a direct HTTP URL, download it directly
if (downloaded_path is None and isinstance(target, if (downloaded_path is None and isinstance(target,
str) str)

View File

@@ -3,11 +3,10 @@ typer>=0.9.0
rich>=13.7.0 rich>=13.7.0
prompt-toolkit>=3.0.0 prompt-toolkit>=3.0.0
textual>=0.30.0 textual>=0.30.0
pip-system-certs
# Media processing and downloading # Media processing and downloading
yt-dlp[default]>=2023.11.0 yt-dlp[default]
requests>=2.31.0 requests>=2.31.0
httpx>=0.25.0 httpx>=0.25.0
# Ensure requests can detect encodings and ship certificates # Ensure requests can detect encodings and ship certificates