fdsfjlk
This commit is contained in:
166
CLI.py
166
CLI.py
@@ -636,6 +636,7 @@ if (
|
||||
and Completion is not None
|
||||
and Completer is not None
|
||||
and Document is not None
|
||||
and Lexer is not None
|
||||
):
|
||||
CompletionType = cast(Any, Completion)
|
||||
|
||||
@@ -934,7 +935,11 @@ def _create_cmdlet_cli():
|
||||
prompt_text = "🜂🜄🜁🜃|"
|
||||
|
||||
# Prepare startup table (always attempt; fall back gracefully if import fails)
|
||||
startup_table = ResultTable("*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********") if RESULT_TABLE_AVAILABLE else None
|
||||
startup_table = None
|
||||
if RESULT_TABLE_AVAILABLE and ResultTable is not None:
|
||||
startup_table = ResultTable(
|
||||
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
|
||||
)
|
||||
if startup_table:
|
||||
startup_table.set_no_choice(True).set_preserve_order(True)
|
||||
|
||||
@@ -1173,7 +1178,7 @@ def _create_cmdlet_cli():
|
||||
|
||||
api_key = _get_debrid_api_key(config)
|
||||
if not api_key:
|
||||
_add_startup_check("DISABLED", display, prov, "Not configured")
|
||||
_add_startup_check("DISABLED", display, provider=prov, detail="Not configured")
|
||||
else:
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
@@ -1347,7 +1352,7 @@ def _create_cmdlet_cli():
|
||||
except Exception:
|
||||
pass # Silently ignore if config loading fails
|
||||
|
||||
if PROMPT_TOOLKIT_AVAILABLE and PromptSession is not None and CmdletCompleter is not None:
|
||||
if PROMPT_TOOLKIT_AVAILABLE and PromptSession is not None and CmdletCompleter is not None and Style is not None:
|
||||
completer = CmdletCompleter()
|
||||
|
||||
# Define style for syntax highlighting
|
||||
@@ -1363,7 +1368,7 @@ def _create_cmdlet_cli():
|
||||
# Toolbar state for background notifications
|
||||
class ToolbarState:
|
||||
text = ""
|
||||
last_update_time = 0
|
||||
last_update_time: float = 0.0
|
||||
clear_timer: Optional[threading.Timer] = None
|
||||
|
||||
toolbar_state = ToolbarState()
|
||||
@@ -1677,6 +1682,112 @@ def _execute_pipeline(tokens: list):
|
||||
if isinstance(config, dict):
|
||||
# Request terminal-only background updates for this pipeline session
|
||||
config['_quiet_background_output'] = True
|
||||
|
||||
def _maybe_run_class_selector(selected_items: list, *, stage_is_last: bool) -> bool:
|
||||
"""Allow providers/stores to override `@N` selection semantics."""
|
||||
if not stage_is_last:
|
||||
return False
|
||||
|
||||
# Gather potential keys from table + selected rows.
|
||||
candidates: list[str] = []
|
||||
seen: set[str] = set()
|
||||
|
||||
def _add(value) -> None:
|
||||
try:
|
||||
text = str(value or '').strip().lower()
|
||||
except Exception:
|
||||
return
|
||||
if not text or text in seen:
|
||||
return
|
||||
seen.add(text)
|
||||
candidates.append(text)
|
||||
|
||||
try:
|
||||
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
|
||||
_add(current_table.table if current_table and hasattr(current_table, 'table') else None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for item in selected_items or []:
|
||||
if isinstance(item, dict):
|
||||
_add(item.get('provider'))
|
||||
_add(item.get('store'))
|
||||
_add(item.get('table'))
|
||||
else:
|
||||
_add(getattr(item, 'provider', None))
|
||||
_add(getattr(item, 'store', None))
|
||||
_add(getattr(item, 'table', None))
|
||||
|
||||
# Provider selector
|
||||
try:
|
||||
from ProviderCore.registry import get_provider as _get_provider
|
||||
except Exception:
|
||||
_get_provider = None
|
||||
|
||||
if _get_provider is not None:
|
||||
for key in candidates:
|
||||
try:
|
||||
provider = _get_provider(key, config)
|
||||
except Exception:
|
||||
continue
|
||||
try:
|
||||
handled = bool(provider.selector(selected_items, ctx=ctx, stage_is_last=True))
|
||||
except TypeError:
|
||||
# Backwards-compat: selector(selected_items)
|
||||
handled = bool(provider.selector(selected_items))
|
||||
except Exception as exc:
|
||||
print(f"{key} selector failed: {exc}\n")
|
||||
return True
|
||||
if handled:
|
||||
return True
|
||||
|
||||
# Store selector
|
||||
store_keys: list[str] = []
|
||||
for item in selected_items or []:
|
||||
if isinstance(item, dict):
|
||||
v = item.get('store')
|
||||
else:
|
||||
v = getattr(item, 'store', None)
|
||||
try:
|
||||
name = str(v or '').strip()
|
||||
except Exception:
|
||||
name = ''
|
||||
if name:
|
||||
store_keys.append(name)
|
||||
|
||||
if store_keys:
|
||||
try:
|
||||
from Store.registry import Store as _StoreRegistry
|
||||
store_registry = _StoreRegistry(config, suppress_debug=True)
|
||||
try:
|
||||
_backend_names = list(store_registry.list_backends())
|
||||
except Exception:
|
||||
_backend_names = []
|
||||
_backend_by_lower = {str(n).lower(): str(n) for n in _backend_names if str(n).strip()}
|
||||
for name in store_keys:
|
||||
resolved_name = name
|
||||
if not store_registry.is_available(resolved_name):
|
||||
try:
|
||||
resolved_name = _backend_by_lower.get(str(name).lower(), name)
|
||||
except Exception:
|
||||
resolved_name = name
|
||||
if not store_registry.is_available(resolved_name):
|
||||
continue
|
||||
backend = store_registry[resolved_name]
|
||||
selector = getattr(backend, 'selector', None)
|
||||
if selector is None:
|
||||
continue
|
||||
try:
|
||||
handled = bool(selector(selected_items, ctx=ctx, stage_is_last=True))
|
||||
except TypeError:
|
||||
handled = bool(selector(selected_items))
|
||||
if handled:
|
||||
return True
|
||||
except Exception:
|
||||
# Store init failure should not break normal selection.
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
# Check if the first stage has @ selection - if so, apply it before pipeline execution
|
||||
first_stage_tokens = stages[0] if stages else []
|
||||
@@ -1827,6 +1938,10 @@ def _execute_pipeline(tokens: list):
|
||||
try:
|
||||
filtered = [resolved_items[i] for i in first_stage_selection_indices if 0 <= i < len(resolved_items)]
|
||||
if filtered:
|
||||
# Allow providers/stores to override selection behavior (e.g., Matrix room picker).
|
||||
if _maybe_run_class_selector(filtered, stage_is_last=(not stages)):
|
||||
return
|
||||
|
||||
# Convert filtered items to PipeObjects for consistent pipeline handling
|
||||
from cmdlet._shared import coerce_to_pipe_object
|
||||
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
|
||||
@@ -2011,17 +2126,21 @@ def _execute_pipeline(tokens: list):
|
||||
# If not expanding, use as filter
|
||||
if not should_expand_to_command:
|
||||
# This is a selection stage - filter piped results
|
||||
# Prefer selecting from the active result context even when nothing is piped.
|
||||
# Some cmdlets present a selectable table and rely on @N afterwards.
|
||||
if piped_result is None:
|
||||
print(f"No piped results to select from with {cmd_name}\n")
|
||||
pipeline_status = "failed"
|
||||
pipeline_error = f"Selection {cmd_name} without upstream results"
|
||||
return
|
||||
|
||||
# Normalize piped_result to always be a list for indexing
|
||||
if isinstance(piped_result, dict) or not isinstance(piped_result, (list, tuple)):
|
||||
piped_result_list = [piped_result]
|
||||
piped_result_list = ctx.get_last_result_items()
|
||||
if not piped_result_list:
|
||||
print(f"No piped results to select from with {cmd_name}\n")
|
||||
pipeline_status = "failed"
|
||||
pipeline_error = f"Selection {cmd_name} without upstream results"
|
||||
return
|
||||
else:
|
||||
piped_result_list = piped_result
|
||||
# Normalize piped_result to always be a list for indexing
|
||||
if isinstance(piped_result, dict) or not isinstance(piped_result, (list, tuple)):
|
||||
piped_result_list = [piped_result]
|
||||
else:
|
||||
piped_result_list = piped_result
|
||||
|
||||
# Get indices to select
|
||||
if is_select_all:
|
||||
@@ -2038,12 +2157,29 @@ def _execute_pipeline(tokens: list):
|
||||
stage_table = ctx.get_display_table()
|
||||
if not stage_table:
|
||||
stage_table = ctx.get_last_result_table()
|
||||
resolved_list = _resolve_items_for_selection(stage_table, list(piped_result_list))
|
||||
_debug_selection("pipeline-stage", selection_indices, stage_table, piped_result_list, resolved_list)
|
||||
# Prefer selecting from the displayed table's items if available.
|
||||
# This matters when a cmdlet shows a selectable overlay table but does not emit
|
||||
# items downstream (e.g., add-file -provider matrix shows rooms, but the piped
|
||||
# value is still the original file).
|
||||
selection_base = list(piped_result_list)
|
||||
try:
|
||||
table_rows = len(stage_table.rows) if stage_table and hasattr(stage_table, 'rows') and stage_table.rows else None
|
||||
last_items = ctx.get_last_result_items()
|
||||
if last_items and table_rows is not None and len(last_items) == table_rows:
|
||||
selection_base = list(last_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
resolved_list = _resolve_items_for_selection(stage_table, selection_base)
|
||||
_debug_selection("pipeline-stage", selection_indices, stage_table, selection_base, resolved_list)
|
||||
|
||||
try:
|
||||
filtered = [resolved_list[i] for i in selection_indices if 0 <= i < len(resolved_list)]
|
||||
if filtered:
|
||||
# Allow providers/stores to override selection behavior (e.g., Matrix room picker).
|
||||
if _maybe_run_class_selector(filtered, stage_is_last=(stage_index + 1 >= len(stages))):
|
||||
return
|
||||
|
||||
# Convert filtered items to PipeObjects for consistent pipeline handling
|
||||
from cmdlet._shared import coerce_to_pipe_object
|
||||
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
"""Medeia-Macina package - Media management system."""
|
||||
__version__ = "0.1.0"
|
||||
@@ -1,13 +0,0 @@
|
||||
"""Entry point wrapper for Medeia-Macina CLI."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add the parent directory to sys.path so we can import CLI
|
||||
root_dir = Path(__file__).parent.parent
|
||||
if str(root_dir) not in sys.path:
|
||||
sys.path.insert(0, str(root_dir))
|
||||
|
||||
from CLI import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
Log/medeia_macina/telegram.session
Normal file
BIN
Log/medeia_macina/telegram.session
Normal file
Binary file not shown.
380
MPV/LUA/main.lua
380
MPV/LUA/main.lua
@@ -268,6 +268,9 @@ local _cached_store_names = {}
|
||||
local _store_cache_loaded = false
|
||||
|
||||
local _pipeline_helper_started = false
|
||||
local _last_ipc_error = ''
|
||||
local _last_ipc_last_req_json = ''
|
||||
local _last_ipc_last_resp_json = ''
|
||||
|
||||
local function _is_pipeline_helper_ready()
|
||||
local ready = mp.get_property_native(PIPELINE_READY_PROP)
|
||||
@@ -431,8 +434,10 @@ end
|
||||
local ensure_pipeline_helper_running
|
||||
|
||||
local function _run_helper_request_response(req, timeout_seconds)
|
||||
_last_ipc_error = ''
|
||||
if not ensure_pipeline_helper_running() then
|
||||
_lua_log('ipc: helper not running; cannot execute request')
|
||||
_last_ipc_error = 'helper not running'
|
||||
return nil
|
||||
end
|
||||
|
||||
@@ -445,7 +450,9 @@ local function _run_helper_request_response(req, timeout_seconds)
|
||||
mp.wait_event(0.05)
|
||||
end
|
||||
if not _is_pipeline_helper_ready() then
|
||||
_lua_log('ipc: helper not ready; ready=' .. tostring(mp.get_property_native(PIPELINE_READY_PROP)))
|
||||
local rv = tostring(mp.get_property_native(PIPELINE_READY_PROP))
|
||||
_lua_log('ipc: helper not ready; ready=' .. rv)
|
||||
_last_ipc_error = 'helper not ready (ready=' .. rv .. ')'
|
||||
_pipeline_helper_started = false
|
||||
return nil
|
||||
end
|
||||
@@ -471,13 +478,21 @@ local function _run_helper_request_response(req, timeout_seconds)
|
||||
end
|
||||
_lua_log('ipc: send request id=' .. tostring(id) .. ' ' .. label)
|
||||
|
||||
local req_json = utils.format_json(req)
|
||||
_last_ipc_last_req_json = req_json
|
||||
mp.set_property(PIPELINE_RESP_PROP, '')
|
||||
mp.set_property(PIPELINE_REQ_PROP, utils.format_json(req))
|
||||
mp.set_property(PIPELINE_REQ_PROP, req_json)
|
||||
-- Read-back for debugging: confirms MPV accepted the property write.
|
||||
local echoed = mp.get_property(PIPELINE_REQ_PROP) or ''
|
||||
if echoed == '' then
|
||||
_lua_log('ipc: WARNING request property echoed empty after set')
|
||||
end
|
||||
|
||||
local deadline = mp.get_time() + (timeout_seconds or 5)
|
||||
while mp.get_time() < deadline do
|
||||
local resp_json = mp.get_property(PIPELINE_RESP_PROP)
|
||||
if resp_json and resp_json ~= '' then
|
||||
_last_ipc_last_resp_json = resp_json
|
||||
local ok, resp = pcall(utils.parse_json, resp_json)
|
||||
if ok and resp and resp.id == id then
|
||||
_lua_log('ipc: got response id=' .. tostring(id) .. ' success=' .. tostring(resp.success))
|
||||
@@ -488,6 +503,7 @@ local function _run_helper_request_response(req, timeout_seconds)
|
||||
end
|
||||
|
||||
_lua_log('ipc: timeout waiting response; ' .. label)
|
||||
_last_ipc_error = 'timeout waiting response (' .. label .. ')'
|
||||
_pipeline_helper_started = false
|
||||
return nil
|
||||
end
|
||||
@@ -593,12 +609,39 @@ end)
|
||||
local _pending_download = nil
|
||||
local _pending_format_change = nil
|
||||
|
||||
-- Per-file state (class-like) for format caching.
|
||||
local FileState = {}
|
||||
FileState.__index = FileState
|
||||
|
||||
function FileState.new()
|
||||
return setmetatable({
|
||||
url = nil,
|
||||
formats = nil,
|
||||
formats_table = nil, -- back-compat alias
|
||||
}, FileState)
|
||||
end
|
||||
|
||||
function FileState:has_formats()
|
||||
return type(self.formats) == 'table'
|
||||
and type(self.formats.rows) == 'table'
|
||||
and #self.formats.rows > 0
|
||||
end
|
||||
|
||||
function FileState:set_formats(url, tbl)
|
||||
self.url = url
|
||||
self.formats = tbl
|
||||
self.formats_table = tbl
|
||||
end
|
||||
|
||||
M.file = M.file or FileState.new()
|
||||
|
||||
-- Cache yt-dlp format lists per URL so Change Format is instant.
|
||||
M.file = M.file or {}
|
||||
M.file.formats_table = nil
|
||||
M.file.url = nil
|
||||
local _formats_cache = {}
|
||||
local _formats_inflight = {}
|
||||
local _formats_waiters = {}
|
||||
|
||||
local _ipc_async_busy = false
|
||||
local _ipc_async_queue = {}
|
||||
|
||||
local function _is_http_url(u)
|
||||
if type(u) ~= 'string' then
|
||||
@@ -615,8 +658,13 @@ local function _cache_formats_for_url(url, tbl)
|
||||
return
|
||||
end
|
||||
_formats_cache[url] = { table = tbl, ts = mp.get_time() }
|
||||
M.file.url = url
|
||||
M.file.formats_table = tbl
|
||||
if type(M.file) == 'table' and M.file.set_formats then
|
||||
M.file:set_formats(url, tbl)
|
||||
else
|
||||
M.file.url = url
|
||||
M.file.formats = tbl
|
||||
M.file.formats_table = tbl
|
||||
end
|
||||
end
|
||||
|
||||
local function _get_cached_formats_table(url)
|
||||
@@ -630,42 +678,175 @@ local function _get_cached_formats_table(url)
|
||||
return nil
|
||||
end
|
||||
|
||||
local function _prefetch_formats_for_url(url)
|
||||
url = tostring(url or '')
|
||||
local function _run_helper_request_async(req, timeout_seconds, cb)
|
||||
cb = cb or function() end
|
||||
|
||||
if _ipc_async_busy then
|
||||
_ipc_async_queue[#_ipc_async_queue + 1] = { req = req, timeout = timeout_seconds, cb = cb }
|
||||
return
|
||||
end
|
||||
_ipc_async_busy = true
|
||||
|
||||
local function done(resp, err)
|
||||
_ipc_async_busy = false
|
||||
cb(resp, err)
|
||||
|
||||
if #_ipc_async_queue > 0 then
|
||||
local next_job = table.remove(_ipc_async_queue, 1)
|
||||
-- Schedule next job slightly later to let mpv deliver any pending events.
|
||||
mp.add_timeout(0.01, function()
|
||||
_run_helper_request_async(next_job.req, next_job.timeout, next_job.cb)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
if type(req) ~= 'table' then
|
||||
done(nil, 'invalid request')
|
||||
return
|
||||
end
|
||||
|
||||
ensure_mpv_ipc_server()
|
||||
if not ensure_pipeline_helper_running() then
|
||||
done(nil, 'helper not running')
|
||||
return
|
||||
end
|
||||
|
||||
-- Assign id.
|
||||
local id = tostring(req.id or '')
|
||||
if id == '' then
|
||||
id = tostring(math.floor(mp.get_time() * 1000)) .. '-' .. tostring(math.random(100000, 999999))
|
||||
req.id = id
|
||||
end
|
||||
|
||||
local label = ''
|
||||
if req.op then
|
||||
label = 'op=' .. tostring(req.op)
|
||||
elseif req.pipeline then
|
||||
label = 'cmd=' .. tostring(req.pipeline)
|
||||
else
|
||||
label = '(unknown)'
|
||||
end
|
||||
|
||||
-- Wait for helper READY without blocking the UI.
|
||||
local ready_deadline = mp.get_time() + 3.0
|
||||
local ready_timer
|
||||
ready_timer = mp.add_periodic_timer(0.05, function()
|
||||
if _is_pipeline_helper_ready() then
|
||||
ready_timer:kill()
|
||||
|
||||
_lua_log('ipc-async: send request id=' .. tostring(id) .. ' ' .. label)
|
||||
local req_json = utils.format_json(req)
|
||||
_last_ipc_last_req_json = req_json
|
||||
|
||||
mp.set_property(PIPELINE_RESP_PROP, '')
|
||||
mp.set_property(PIPELINE_REQ_PROP, req_json)
|
||||
|
||||
local deadline = mp.get_time() + (timeout_seconds or 5)
|
||||
local poll_timer
|
||||
poll_timer = mp.add_periodic_timer(0.05, function()
|
||||
if mp.get_time() >= deadline then
|
||||
poll_timer:kill()
|
||||
done(nil, 'timeout waiting response (' .. label .. ')')
|
||||
return
|
||||
end
|
||||
|
||||
local resp_json = mp.get_property(PIPELINE_RESP_PROP)
|
||||
if resp_json and resp_json ~= '' then
|
||||
_last_ipc_last_resp_json = resp_json
|
||||
local ok, resp = pcall(utils.parse_json, resp_json)
|
||||
if ok and resp and resp.id == id then
|
||||
poll_timer:kill()
|
||||
_lua_log('ipc-async: got response id=' .. tostring(id) .. ' success=' .. tostring(resp.success))
|
||||
done(resp, nil)
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
if mp.get_time() >= ready_deadline then
|
||||
ready_timer:kill()
|
||||
done(nil, 'helper not ready')
|
||||
return
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
function FileState:fetch_formats(cb)
|
||||
local url = tostring(self.url or '')
|
||||
if url == '' or not _is_http_url(url) then
|
||||
if cb then cb(false, 'not a url') end
|
||||
return
|
||||
end
|
||||
|
||||
-- Only applies to plain URLs (not store hash URLs).
|
||||
if _extract_store_hash(url) then
|
||||
if cb then cb(false, 'store-hash url') end
|
||||
return
|
||||
end
|
||||
|
||||
if _get_cached_formats_table(url) then
|
||||
-- Cache hit.
|
||||
local cached = _get_cached_formats_table(url)
|
||||
if type(cached) == 'table' then
|
||||
self:set_formats(url, cached)
|
||||
if cb then cb(true, nil) end
|
||||
return
|
||||
end
|
||||
|
||||
-- In-flight: register waiter.
|
||||
if _formats_inflight[url] then
|
||||
_formats_waiters[url] = _formats_waiters[url] or {}
|
||||
if cb then table.insert(_formats_waiters[url], cb) end
|
||||
return
|
||||
end
|
||||
_formats_inflight[url] = true
|
||||
_formats_waiters[url] = _formats_waiters[url] or {}
|
||||
if cb then table.insert(_formats_waiters[url], cb) end
|
||||
|
||||
mp.add_timeout(0.01, function()
|
||||
if _get_cached_formats_table(url) then
|
||||
_formats_inflight[url] = nil
|
||||
return
|
||||
end
|
||||
|
||||
ensure_mpv_ipc_server()
|
||||
local resp = _run_helper_request_response({ op = 'ytdlp-formats', data = { url = url } }, 20)
|
||||
-- Async request so the UI never blocks.
|
||||
_run_helper_request_async({ op = 'ytdlp-formats', data = { url = url } }, 90, function(resp, err)
|
||||
_formats_inflight[url] = nil
|
||||
|
||||
local ok = false
|
||||
local reason = err
|
||||
if resp and resp.success and type(resp.table) == 'table' then
|
||||
ok = true
|
||||
reason = nil
|
||||
self:set_formats(url, resp.table)
|
||||
_cache_formats_for_url(url, resp.table)
|
||||
_lua_log('formats: cached ' .. tostring((resp.table.rows and #resp.table.rows) or 0) .. ' rows for url')
|
||||
else
|
||||
if type(resp) == 'table' then
|
||||
if resp.error and tostring(resp.error) ~= '' then
|
||||
reason = tostring(resp.error)
|
||||
elseif resp.stderr and tostring(resp.stderr) ~= '' then
|
||||
reason = tostring(resp.stderr)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local waiters = _formats_waiters[url] or {}
|
||||
_formats_waiters[url] = nil
|
||||
for _, fn in ipairs(waiters) do
|
||||
pcall(fn, ok, reason)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
local function _prefetch_formats_for_url(url)
|
||||
url = tostring(url or '')
|
||||
if url == '' or not _is_http_url(url) then
|
||||
return
|
||||
end
|
||||
if type(M.file) == 'table' then
|
||||
M.file.url = url
|
||||
if M.file.fetch_formats then
|
||||
M.file:fetch_formats(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function _open_loading_formats_menu(title)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, title or 'Pick format', {
|
||||
{
|
||||
@@ -676,6 +857,34 @@ local function _open_loading_formats_menu(title)
|
||||
})
|
||||
end
|
||||
|
||||
local function _debug_dump_formatted_formats(url, tbl, items)
|
||||
local row_count = 0
|
||||
if type(tbl) == 'table' and type(tbl.rows) == 'table' then
|
||||
row_count = #tbl.rows
|
||||
end
|
||||
local item_count = 0
|
||||
if type(items) == 'table' then
|
||||
item_count = #items
|
||||
end
|
||||
|
||||
_lua_log('formats-dump: url=' .. tostring(url or '') .. ' rows=' .. tostring(row_count) .. ' menu_items=' .. tostring(item_count))
|
||||
|
||||
-- Dump the formatted picker items (first 30) so we can confirm the
|
||||
-- list is being built and looks sane.
|
||||
if type(items) == 'table' then
|
||||
local limit = 30
|
||||
for i = 1, math.min(#items, limit) do
|
||||
local it = items[i] or {}
|
||||
local title = tostring(it.title or '')
|
||||
local hint = tostring(it.hint or '')
|
||||
_lua_log('formats-item[' .. tostring(i) .. ']: ' .. title .. (hint ~= '' and (' | ' .. hint) or ''))
|
||||
end
|
||||
if #items > limit then
|
||||
_lua_log('formats-dump: (truncated; total=' .. tostring(#items) .. ')')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function _current_ytdl_format_string()
|
||||
-- Preferred: mpv exposes the active ytdl format string.
|
||||
local fmt = trim(tostring(mp.get_property_native('ytdl-format') or ''))
|
||||
@@ -857,8 +1066,18 @@ mp.register_script_message('medios-change-format-current', function()
|
||||
|
||||
local url = tostring(target)
|
||||
|
||||
-- Ensure file state is tracking the current URL.
|
||||
if type(M.file) == 'table' then
|
||||
M.file.url = url
|
||||
end
|
||||
|
||||
-- If formats were already prefetched for this URL, open instantly.
|
||||
local cached_tbl = _get_cached_formats_table(url)
|
||||
local cached_tbl = nil
|
||||
if type(M.file) == 'table' and type(M.file.formats) == 'table' then
|
||||
cached_tbl = M.file.formats
|
||||
else
|
||||
cached_tbl = _get_cached_formats_table(url)
|
||||
end
|
||||
if type(cached_tbl) == 'table' and type(cached_tbl.rows) == 'table' and #cached_tbl.rows > 0 then
|
||||
_pending_format_change = { url = url, token = 'cached', formats_table = cached_tbl }
|
||||
|
||||
@@ -890,6 +1109,7 @@ mp.register_script_message('medios-change-format-current', function()
|
||||
}
|
||||
end
|
||||
|
||||
_debug_dump_formatted_formats(url, cached_tbl, items)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', items)
|
||||
return
|
||||
end
|
||||
@@ -898,77 +1118,69 @@ mp.register_script_message('medios-change-format-current', function()
|
||||
_pending_format_change = { url = url, token = token }
|
||||
_open_loading_formats_menu('Change format')
|
||||
|
||||
mp.add_timeout(0.05, function()
|
||||
if type(_pending_format_change) ~= 'table' or _pending_format_change.token ~= token then
|
||||
return
|
||||
end
|
||||
|
||||
ensure_mpv_ipc_server()
|
||||
_lua_log('change-format: requesting formats via helper op for url')
|
||||
|
||||
local resp = _run_helper_request_response({ op = 'ytdlp-formats', data = { url = url } }, 30)
|
||||
if type(_pending_format_change) ~= 'table' or _pending_format_change.token ~= token then
|
||||
return
|
||||
end
|
||||
|
||||
if not resp or not resp.success or type(resp.table) ~= 'table' then
|
||||
local err = ''
|
||||
if type(resp) == 'table' then
|
||||
if resp.error and tostring(resp.error) ~= '' then err = tostring(resp.error) end
|
||||
if resp.stderr and tostring(resp.stderr) ~= '' then
|
||||
err = (err ~= '' and (err .. ' | ') or '') .. tostring(resp.stderr)
|
||||
-- Non-blocking: ask the per-file state to fetch formats in the background.
|
||||
if type(M.file) == 'table' and M.file.fetch_formats then
|
||||
_lua_log('change-format: formats not cached yet; fetching in background')
|
||||
M.file:fetch_formats(function(ok, err)
|
||||
if type(_pending_format_change) ~= 'table' or _pending_format_change.token ~= token then
|
||||
return
|
||||
end
|
||||
if not ok then
|
||||
local msg2 = tostring(err or '')
|
||||
if msg2 == '' then
|
||||
msg2 = 'unknown'
|
||||
end
|
||||
_lua_log('change-format: formats failed: ' .. msg2)
|
||||
mp.osd_message('Failed to load format list: ' .. msg2, 7)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', {
|
||||
{
|
||||
title = 'Failed to load format list',
|
||||
hint = msg2,
|
||||
value = { 'script-message-to', mp.get_script_name(), 'medios-nop', '{}' },
|
||||
},
|
||||
})
|
||||
return
|
||||
end
|
||||
_lua_log('change-format: formats failed: ' .. (err ~= '' and err or '(no details)'))
|
||||
mp.osd_message('Failed to load format list', 5)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', {
|
||||
{
|
||||
title = 'Failed to load format list',
|
||||
hint = 'Check logs (medeia-mpv-lua.log / medeia-mpv-helper.log)',
|
||||
value = { 'script-message-to', mp.get_script_name(), 'medios-nop', '{}' },
|
||||
},
|
||||
})
|
||||
return
|
||||
end
|
||||
|
||||
local tbl = resp.table
|
||||
if type(tbl.rows) ~= 'table' or #tbl.rows == 0 then
|
||||
mp.osd_message('No formats available', 4)
|
||||
return
|
||||
end
|
||||
|
||||
local items = {}
|
||||
for idx, row in ipairs(tbl.rows) do
|
||||
local cols = row.columns or {}
|
||||
local id_val = ''
|
||||
local res_val = ''
|
||||
local ext_val = ''
|
||||
local size_val = ''
|
||||
for _, c in ipairs(cols) do
|
||||
if c.name == 'ID' then id_val = tostring(c.value or '') end
|
||||
if c.name == 'Resolution' then res_val = tostring(c.value or '') end
|
||||
if c.name == 'Ext' then ext_val = tostring(c.value or '') end
|
||||
if c.name == 'Size' then size_val = tostring(c.value or '') end
|
||||
local tbl = (type(M.file.formats) == 'table') and M.file.formats or _get_cached_formats_table(url)
|
||||
if type(tbl) ~= 'table' or type(tbl.rows) ~= 'table' or #tbl.rows == 0 then
|
||||
mp.osd_message('No formats available', 4)
|
||||
return
|
||||
end
|
||||
local label = id_val ~= '' and id_val or ('Format ' .. tostring(idx))
|
||||
local hint_parts = {}
|
||||
if res_val ~= '' and res_val ~= 'N/A' then table.insert(hint_parts, res_val) end
|
||||
if ext_val ~= '' then table.insert(hint_parts, ext_val) end
|
||||
if size_val ~= '' and size_val ~= 'N/A' then table.insert(hint_parts, size_val) end
|
||||
local hint = table.concat(hint_parts, ' | ')
|
||||
|
||||
local payload = { index = idx }
|
||||
items[#items + 1] = {
|
||||
title = label,
|
||||
hint = hint,
|
||||
value = { 'script-message-to', mp.get_script_name(), 'medios-change-format-pick', utils.format_json(payload) },
|
||||
}
|
||||
end
|
||||
local items = {}
|
||||
for idx, row in ipairs(tbl.rows) do
|
||||
local cols = row.columns or {}
|
||||
local id_val = ''
|
||||
local res_val = ''
|
||||
local ext_val = ''
|
||||
local size_val = ''
|
||||
for _, c in ipairs(cols) do
|
||||
if c.name == 'ID' then id_val = tostring(c.value or '') end
|
||||
if c.name == 'Resolution' then res_val = tostring(c.value or '') end
|
||||
if c.name == 'Ext' then ext_val = tostring(c.value or '') end
|
||||
if c.name == 'Size' then size_val = tostring(c.value or '') end
|
||||
end
|
||||
local label = id_val ~= '' and id_val or ('Format ' .. tostring(idx))
|
||||
local hint_parts = {}
|
||||
if res_val ~= '' and res_val ~= 'N/A' then table.insert(hint_parts, res_val) end
|
||||
if ext_val ~= '' then table.insert(hint_parts, ext_val) end
|
||||
if size_val ~= '' and size_val ~= 'N/A' then table.insert(hint_parts, size_val) end
|
||||
local hint = table.concat(hint_parts, ' | ')
|
||||
|
||||
_pending_format_change.formats_table = tbl
|
||||
_cache_formats_for_url(url, tbl)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', items)
|
||||
end)
|
||||
local payload = { index = idx }
|
||||
items[#items + 1] = {
|
||||
title = label,
|
||||
hint = hint,
|
||||
value = { 'script-message-to', mp.get_script_name(), 'medios-change-format-pick', utils.format_json(payload) },
|
||||
}
|
||||
end
|
||||
|
||||
_pending_format_change.formats_table = tbl
|
||||
_debug_dump_formatted_formats(url, tbl, items)
|
||||
_uosc_open_list_picker(DOWNLOAD_FORMAT_MENU_TYPE, 'Change format', items)
|
||||
end)
|
||||
end
|
||||
end)
|
||||
|
||||
-- Prefetch formats for yt-dlp-supported URLs on load so Change Format is instant.
|
||||
|
||||
@@ -575,6 +575,46 @@ class MPV:
|
||||
debug("Starting MPV")
|
||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
||||
|
||||
# Start the persistent pipeline helper eagerly so MPV Lua can issue
|
||||
# non-blocking requests (e.g., format list prefetch) without needing
|
||||
# to spawn the helper on-demand from inside mpv.
|
||||
try:
|
||||
helper_path = (repo_root / "MPV" / "pipeline_helper.py").resolve()
|
||||
if helper_path.exists():
|
||||
py = sys.executable or "python"
|
||||
helper_cmd = [
|
||||
py,
|
||||
str(helper_path),
|
||||
"--ipc",
|
||||
str(self.ipc_path),
|
||||
"--timeout",
|
||||
"30",
|
||||
]
|
||||
|
||||
helper_kwargs: Dict[str, Any] = {}
|
||||
if platform.system() == "Windows":
|
||||
flags = 0
|
||||
try:
|
||||
flags |= int(getattr(subprocess, "DETACHED_PROCESS", 0x00000008))
|
||||
except Exception:
|
||||
flags |= 0x00000008
|
||||
try:
|
||||
flags |= int(getattr(subprocess, "CREATE_NO_WINDOW", 0x08000000))
|
||||
except Exception:
|
||||
flags |= 0x08000000
|
||||
helper_kwargs["creationflags"] = flags
|
||||
helper_kwargs.update({k: v for k, v in _windows_hidden_subprocess_kwargs().items() if k != "creationflags"})
|
||||
|
||||
subprocess.Popen(
|
||||
helper_cmd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
**helper_kwargs,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_ipc_pipe_path() -> str:
|
||||
"""Get the fixed IPC pipe/socket path for persistent MPV connection.
|
||||
|
||||
@@ -29,6 +29,7 @@ import tempfile
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
@@ -259,6 +260,53 @@ def _run_op(op: str, data: Any) -> Dict[str, Any]:
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl: # type: ignore[attr-defined]
|
||||
info = ydl.extract_info(url, download=False)
|
||||
|
||||
# Debug: dump a short summary of the format list to the helper log.
|
||||
try:
|
||||
formats_any = info.get("formats") if isinstance(info, dict) else None
|
||||
count = len(formats_any) if isinstance(formats_any, list) else 0
|
||||
_append_helper_log(f"[ytdlp-formats] extracted formats count={count} url={url}")
|
||||
|
||||
if isinstance(formats_any, list) and formats_any:
|
||||
limit = 60
|
||||
for i, f in enumerate(formats_any[:limit], start=1):
|
||||
if not isinstance(f, dict):
|
||||
continue
|
||||
fid = str(f.get("format_id") or "")
|
||||
ext = str(f.get("ext") or "")
|
||||
note = f.get("format_note") or f.get("format") or ""
|
||||
vcodec = str(f.get("vcodec") or "")
|
||||
acodec = str(f.get("acodec") or "")
|
||||
size = f.get("filesize") or f.get("filesize_approx")
|
||||
res = str(f.get("resolution") or "")
|
||||
if not res:
|
||||
try:
|
||||
w = f.get("width")
|
||||
h = f.get("height")
|
||||
if w and h:
|
||||
res = f"{int(w)}x{int(h)}"
|
||||
elif h:
|
||||
res = f"{int(h)}p"
|
||||
except Exception:
|
||||
res = ""
|
||||
_append_helper_log(
|
||||
f"[ytdlp-format {i:02d}] id={fid} ext={ext} res={res} note={note} codecs={vcodec}/{acodec} size={size}"
|
||||
)
|
||||
if count > limit:
|
||||
_append_helper_log(f"[ytdlp-formats] (truncated; total={count})")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Optional: dump the full extracted JSON for inspection.
|
||||
try:
|
||||
dump = os.environ.get("MEDEIA_MPV_YTDLP_DUMP", "").strip()
|
||||
if dump and dump != "0" and isinstance(info, dict):
|
||||
h = hashlib.sha1(url.encode("utf-8", errors="replace")).hexdigest()[:10]
|
||||
out_path = _repo_root() / "Log" / f"ytdlp-probe-{h}.json"
|
||||
out_path.write_text(json.dumps(info, ensure_ascii=False, indent=2), encoding="utf-8", errors="replace")
|
||||
_append_helper_log(f"[ytdlp-formats] wrote probe json: {out_path}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not isinstance(info, dict):
|
||||
return {
|
||||
"success": False,
|
||||
@@ -577,7 +625,9 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
# Mirror mpv's own log messages into our helper log file so debugging does
|
||||
# not depend on the mpv on-screen console or mpv's log-file.
|
||||
try:
|
||||
level = "debug" if debug_enabled else "warn"
|
||||
# IMPORTANT: mpv debug logs can be extremely chatty (especially ytdl_hook)
|
||||
# and can starve request handling. Default to warn unless explicitly overridden.
|
||||
level = os.environ.get("MEDEIA_MPV_HELPER_MPVLOG", "").strip() or "warn"
|
||||
client.send_command_no_wait(["request_log_messages", level])
|
||||
_append_helper_log(f"[helper] requested mpv log messages level={level}")
|
||||
except Exception:
|
||||
@@ -666,8 +716,17 @@ def main(argv: Optional[list[str]] = None) -> int:
|
||||
if msg.get("id") != OBS_ID_REQUEST:
|
||||
continue
|
||||
|
||||
req = _parse_request(msg.get("data"))
|
||||
raw = msg.get("data")
|
||||
req = _parse_request(raw)
|
||||
if not req:
|
||||
try:
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
snippet = raw.strip().replace("\r", "").replace("\n", " ")
|
||||
if len(snippet) > 220:
|
||||
snippet = snippet[:220] + "…"
|
||||
_append_helper_log(f"[request-raw] could not parse request json: {snippet}")
|
||||
except Exception:
|
||||
pass
|
||||
continue
|
||||
|
||||
req_id = str(req.get("id") or "")
|
||||
|
||||
@@ -4,7 +4,7 @@ from pathlib import Path
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from ProviderCore.download import sanitize_filename
|
||||
from SYS.logger import log
|
||||
|
||||
@@ -53,7 +53,7 @@ def _get_debrid_api_key(config: Dict[str, Any]) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
class AllDebrid(SearchProvider):
|
||||
class AllDebrid(Provider):
|
||||
"""Search provider for AllDebrid account content.
|
||||
|
||||
This provider lists and searches the files/magnets already present in the
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from SYS.logger import log, debug
|
||||
|
||||
try:
|
||||
@@ -12,7 +12,7 @@ except ImportError: # pragma: no cover
|
||||
sync_playwright = None
|
||||
|
||||
|
||||
class Bandcamp(SearchProvider):
|
||||
class Bandcamp(Provider):
|
||||
"""Search provider for Bandcamp."""
|
||||
|
||||
def search(
|
||||
|
||||
@@ -9,7 +9,7 @@ from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
from urllib.parse import urljoin, urlparse, unquote
|
||||
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from ProviderCore.download import sanitize_filename
|
||||
from SYS.logger import log
|
||||
from models import ProgressBar
|
||||
@@ -22,7 +22,7 @@ except ImportError:
|
||||
lxml_html = None
|
||||
|
||||
|
||||
class Libgen(SearchProvider):
|
||||
class Libgen(Provider):
|
||||
"""Search provider for Library Genesis books."""
|
||||
|
||||
def search(
|
||||
|
||||
@@ -9,7 +9,7 @@ from urllib.parse import quote
|
||||
|
||||
import requests
|
||||
|
||||
from ProviderCore.base import FileProvider
|
||||
from ProviderCore.base import Provider
|
||||
|
||||
|
||||
_MATRIX_INIT_CHECK_CACHE: Dict[str, Tuple[bool, Optional[str]]] = {}
|
||||
@@ -50,7 +50,7 @@ def _matrix_health_check(*, homeserver: str, access_token: Optional[str]) -> Tup
|
||||
return False, str(exc)
|
||||
|
||||
|
||||
class Matrix(FileProvider):
|
||||
class Matrix(Provider):
|
||||
"""File provider for Matrix (Element) chat rooms."""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
@@ -208,3 +208,82 @@ class Matrix(FileProvider):
|
||||
if not room_id:
|
||||
raise Exception("Matrix room_id missing")
|
||||
return self.upload_to_room(file_path, str(room_id))
|
||||
|
||||
def selector(self, selected_items: List[Any], *, ctx: Any, stage_is_last: bool = True, **_kwargs: Any) -> bool:
|
||||
"""Handle Matrix room selection via `@N`.
|
||||
|
||||
If the CLI has a pending upload stash, selecting a room triggers an upload.
|
||||
"""
|
||||
if not stage_is_last:
|
||||
return False
|
||||
|
||||
pending = None
|
||||
try:
|
||||
pending = ctx.load_value('matrix_pending_uploads', default=None)
|
||||
except Exception:
|
||||
pending = None
|
||||
|
||||
pending_list = list(pending) if isinstance(pending, list) else []
|
||||
if not pending_list:
|
||||
return False
|
||||
|
||||
room_ids: List[str] = []
|
||||
for item in selected_items or []:
|
||||
rid = None
|
||||
if isinstance(item, dict):
|
||||
rid = item.get('room_id') or item.get('id')
|
||||
else:
|
||||
rid = getattr(item, 'room_id', None) or getattr(item, 'id', None)
|
||||
if rid and str(rid).strip():
|
||||
room_ids.append(str(rid).strip())
|
||||
|
||||
if not room_ids:
|
||||
print("No Matrix room selected\n")
|
||||
return True
|
||||
|
||||
any_failed = False
|
||||
for room_id in room_ids:
|
||||
for payload in pending_list:
|
||||
try:
|
||||
file_path = ''
|
||||
delete_after = False
|
||||
if isinstance(payload, dict):
|
||||
file_path = str(payload.get('path') or '')
|
||||
delete_after = bool(payload.get('delete_after', False))
|
||||
else:
|
||||
file_path = str(getattr(payload, 'path', '') or '')
|
||||
if not file_path:
|
||||
any_failed = True
|
||||
continue
|
||||
|
||||
media_path = Path(file_path)
|
||||
if not media_path.exists():
|
||||
any_failed = True
|
||||
print(f"Matrix upload file missing: {file_path}")
|
||||
continue
|
||||
|
||||
link = self.upload_to_room(str(media_path), str(room_id))
|
||||
if link:
|
||||
print(link)
|
||||
|
||||
if delete_after:
|
||||
try:
|
||||
media_path.unlink(missing_ok=True) # type: ignore[arg-type]
|
||||
except TypeError:
|
||||
try:
|
||||
if media_path.exists():
|
||||
media_path.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as exc:
|
||||
any_failed = True
|
||||
print(f"Matrix upload failed: {exc}")
|
||||
|
||||
try:
|
||||
ctx.store_value('matrix_pending_uploads', [])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if any_failed:
|
||||
print("\nOne or more Matrix uploads failed\n")
|
||||
return True
|
||||
|
||||
@@ -16,7 +16,7 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||
import requests
|
||||
|
||||
from API.HTTP import HTTPClient
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from ProviderCore.download import download_file, sanitize_filename
|
||||
from cli_syntax import get_field, get_free_text, parse_query
|
||||
from SYS.logger import debug, log
|
||||
@@ -183,7 +183,7 @@ def _resolve_archive_id(session: requests.Session, edition_id: str, ia_candidate
|
||||
return ""
|
||||
|
||||
|
||||
class OpenLibrary(SearchProvider):
|
||||
class OpenLibrary(Provider):
|
||||
"""Search provider for OpenLibrary books + Archive.org direct/borrow download."""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
|
||||
@@ -11,7 +11,7 @@ import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from SYS.logger import log, debug
|
||||
from models import ProgressBar
|
||||
|
||||
@@ -153,7 +153,7 @@ def _suppress_aioslsk_noise() -> Any:
|
||||
sys.stdout, sys.stderr = old_out, old_err
|
||||
|
||||
|
||||
class Soulseek(SearchProvider):
|
||||
class Soulseek(Provider):
|
||||
"""Search provider for Soulseek P2P network."""
|
||||
|
||||
MUSIC_EXTENSIONS = {
|
||||
|
||||
284
Provider/telegram.py
Normal file
284
Provider/telegram.py
Normal file
@@ -0,0 +1,284 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
|
||||
|
||||
def _looks_like_telegram_message_url(url: str) -> bool:
|
||||
try:
|
||||
parsed = urlparse(str(url))
|
||||
except Exception:
|
||||
return False
|
||||
host = (parsed.hostname or "").lower().strip()
|
||||
if host in {"t.me", "telegram.me"}:
|
||||
return True
|
||||
if host.endswith(".t.me"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _parse_telegram_message_url(url: str) -> Tuple[str, int]:
|
||||
"""Parse a Telegram message URL into (entity, message_id).
|
||||
|
||||
Supported:
|
||||
- https://t.me/<username>/<msg_id>
|
||||
- https://t.me/s/<username>/<msg_id>
|
||||
- https://t.me/c/<internal_channel_id>/<msg_id>
|
||||
"""
|
||||
parsed = urlparse(str(url))
|
||||
path = (parsed.path or "").strip("/")
|
||||
if not path:
|
||||
raise ValueError(f"Invalid Telegram URL: {url}")
|
||||
|
||||
parts = [p for p in path.split("/") if p]
|
||||
if not parts:
|
||||
raise ValueError(f"Invalid Telegram URL: {url}")
|
||||
|
||||
# Strip preview prefix
|
||||
if parts and parts[0].lower() == "s":
|
||||
parts = parts[1:]
|
||||
|
||||
if len(parts) < 2:
|
||||
raise ValueError(f"Invalid Telegram URL (expected /<chat>/<msg>): {url}")
|
||||
|
||||
chat = parts[0]
|
||||
msg_raw = parts[1]
|
||||
|
||||
# t.me/c/<id>/<msg>
|
||||
if chat.lower() == "c":
|
||||
if len(parts) < 3:
|
||||
raise ValueError(f"Invalid Telegram /c/ URL: {url}")
|
||||
chat = f"c:{parts[1]}"
|
||||
msg_raw = parts[2]
|
||||
|
||||
m = re.fullmatch(r"\d+", str(msg_raw).strip())
|
||||
if not m:
|
||||
raise ValueError(f"Invalid Telegram message id in URL: {url}")
|
||||
|
||||
return str(chat), int(msg_raw)
|
||||
|
||||
|
||||
class Telegram(Provider):
|
||||
"""Telegram provider using Telethon.
|
||||
|
||||
Config:
|
||||
[provider=telegram]
|
||||
app_id=
|
||||
api_hash=
|
||||
"""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(config)
|
||||
telegram_conf = self.config.get("provider", {}).get("telegram", {}) if isinstance(self.config, dict) else {}
|
||||
self._app_id = telegram_conf.get("app_id")
|
||||
self._api_hash = telegram_conf.get("api_hash")
|
||||
|
||||
def validate(self) -> bool:
|
||||
try:
|
||||
__import__("telethon")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
try:
|
||||
app_id = int(self._app_id) if self._app_id not in (None, "") else None
|
||||
except Exception:
|
||||
app_id = None
|
||||
api_hash = str(self._api_hash).strip() if self._api_hash not in (None, "") else ""
|
||||
return bool(app_id and api_hash)
|
||||
|
||||
def _session_base_path(self) -> Path:
|
||||
root = Path(__file__).resolve().parents[1]
|
||||
session_dir = root / "Log" / "medeia_macina"
|
||||
try:
|
||||
session_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
pass
|
||||
return session_dir / "telegram"
|
||||
|
||||
def _credentials(self) -> Tuple[int, str]:
|
||||
raw_app_id = self._app_id
|
||||
if raw_app_id in (None, ""):
|
||||
raise Exception("Telegram app_id missing")
|
||||
try:
|
||||
app_id = int(str(raw_app_id).strip())
|
||||
except Exception:
|
||||
raise Exception("Telegram app_id invalid")
|
||||
api_hash = str(self._api_hash or "").strip()
|
||||
if not api_hash:
|
||||
raise Exception("Telegram api_hash missing")
|
||||
return app_id, api_hash
|
||||
|
||||
def _ensure_event_loop(self) -> None:
|
||||
"""Telethon sync wrapper requires an event loop to exist in this thread."""
|
||||
try:
|
||||
asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
def _download_message_media_sync(self, *, url: str, output_dir: Path) -> Tuple[Path, Dict[str, Any]]:
|
||||
try:
|
||||
from telethon import errors
|
||||
from telethon.sync import TelegramClient
|
||||
from telethon.tl.types import PeerChannel
|
||||
except Exception as exc:
|
||||
raise Exception(f"Telethon not available: {exc}")
|
||||
|
||||
self._ensure_event_loop()
|
||||
loop = asyncio.get_event_loop()
|
||||
if getattr(loop, "is_running", lambda: False)():
|
||||
raise Exception("Telegram provider cannot run while an event loop is already running")
|
||||
|
||||
def _resolve(value):
|
||||
if asyncio.iscoroutine(value):
|
||||
return loop.run_until_complete(value)
|
||||
return value
|
||||
|
||||
app_id, api_hash = self._credentials()
|
||||
session_base = self._session_base_path()
|
||||
chat, message_id = _parse_telegram_message_url(url)
|
||||
|
||||
client = TelegramClient(str(session_base), app_id, api_hash)
|
||||
try:
|
||||
# This prompts on first run for phone/code and persists the session.
|
||||
_resolve(client.start())
|
||||
|
||||
if chat.startswith("c:"):
|
||||
channel_id = int(chat.split(":", 1)[1])
|
||||
entity = PeerChannel(channel_id)
|
||||
else:
|
||||
entity = chat
|
||||
if isinstance(entity, str) and entity and not entity.startswith("@"):
|
||||
entity = "@" + entity
|
||||
|
||||
# Use the list form to be robust across Telethon sync/async stubs.
|
||||
messages = _resolve(client.get_messages(entity, ids=[message_id]))
|
||||
message = None
|
||||
if isinstance(messages, (list, tuple)):
|
||||
message = messages[0] if messages else None
|
||||
else:
|
||||
try:
|
||||
# TotalList is list-like
|
||||
message = messages[0] # type: ignore[index]
|
||||
except Exception:
|
||||
message = None
|
||||
if not message:
|
||||
raise Exception("Telegram message not found")
|
||||
if not getattr(message, "media", None):
|
||||
raise Exception("Telegram message has no media")
|
||||
|
||||
chat_title = ""
|
||||
chat_username = ""
|
||||
chat_id = None
|
||||
try:
|
||||
chat_obj = getattr(message, "chat", None)
|
||||
if chat_obj is not None:
|
||||
maybe_title = getattr(chat_obj, "title", None)
|
||||
maybe_username = getattr(chat_obj, "username", None)
|
||||
maybe_id = getattr(chat_obj, "id", None)
|
||||
if isinstance(maybe_title, str):
|
||||
chat_title = maybe_title.strip()
|
||||
if isinstance(maybe_username, str):
|
||||
chat_username = maybe_username.strip()
|
||||
if maybe_id is not None:
|
||||
chat_id = int(maybe_id)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
caption = ""
|
||||
try:
|
||||
maybe_caption = getattr(message, "message", None)
|
||||
if isinstance(maybe_caption, str):
|
||||
caption = maybe_caption.strip()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
msg_id = None
|
||||
msg_date = None
|
||||
try:
|
||||
msg_id = int(getattr(message, "id", 0) or 0)
|
||||
except Exception:
|
||||
msg_id = None
|
||||
try:
|
||||
msg_date = getattr(message, "date", None)
|
||||
except Exception:
|
||||
msg_date = None
|
||||
|
||||
file_name = ""
|
||||
file_mime = ""
|
||||
file_size = None
|
||||
try:
|
||||
file_obj = getattr(message, "file", None)
|
||||
maybe_name = getattr(file_obj, "name", None)
|
||||
maybe_mime = getattr(file_obj, "mime_type", None)
|
||||
maybe_size = getattr(file_obj, "size", None)
|
||||
if isinstance(maybe_name, str):
|
||||
file_name = maybe_name.strip()
|
||||
if isinstance(maybe_mime, str):
|
||||
file_mime = maybe_mime.strip()
|
||||
if maybe_size is not None:
|
||||
file_size = int(maybe_size)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
downloaded = _resolve(client.download_media(message, file=str(output_dir)))
|
||||
if not downloaded:
|
||||
raise Exception("Telegram download returned no file")
|
||||
downloaded_path = Path(str(downloaded))
|
||||
date_iso = None
|
||||
try:
|
||||
if msg_date is not None and hasattr(msg_date, "isoformat"):
|
||||
date_iso = msg_date.isoformat() # type: ignore[union-attr]
|
||||
except Exception:
|
||||
date_iso = None
|
||||
|
||||
info: Dict[str, Any] = {
|
||||
"provider": "telegram",
|
||||
"source_url": url,
|
||||
"chat": {
|
||||
"key": chat,
|
||||
"title": chat_title,
|
||||
"username": chat_username,
|
||||
"id": chat_id,
|
||||
},
|
||||
"message": {
|
||||
"id": msg_id,
|
||||
"date": date_iso,
|
||||
"caption": caption,
|
||||
},
|
||||
"file": {
|
||||
"name": file_name,
|
||||
"mime_type": file_mime,
|
||||
"size": file_size,
|
||||
"downloaded_path": str(downloaded_path),
|
||||
},
|
||||
}
|
||||
return downloaded_path, info
|
||||
except errors.RPCError as exc:
|
||||
raise Exception(f"Telegram RPC error: {exc}")
|
||||
finally:
|
||||
try:
|
||||
_resolve(client.disconnect())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def download_url(self, url: str, output_dir: Path) -> Tuple[Path, Dict[str, Any]]:
|
||||
"""Download a Telegram message URL and return (path, metadata)."""
|
||||
if not _looks_like_telegram_message_url(url):
|
||||
raise ValueError("Not a Telegram URL")
|
||||
return self._download_message_media_sync(url=url, output_dir=output_dir)
|
||||
|
||||
def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]:
|
||||
url = str(getattr(result, "path", "") or "")
|
||||
if not url:
|
||||
return None
|
||||
if not _looks_like_telegram_message_url(url):
|
||||
return None
|
||||
|
||||
path, _info = self._download_message_media_sync(url=url, output_dir=output_dir)
|
||||
return path
|
||||
@@ -6,11 +6,11 @@ import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ProviderCore.base import SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from SYS.logger import log
|
||||
|
||||
|
||||
class YouTube(SearchProvider):
|
||||
class YouTube(Provider):
|
||||
"""Search provider for YouTube using yt-dlp."""
|
||||
|
||||
def search(
|
||||
|
||||
@@ -4,11 +4,11 @@ import os
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
from ProviderCore.base import FileProvider
|
||||
from ProviderCore.base import Provider
|
||||
from SYS.logger import log
|
||||
|
||||
|
||||
class ZeroXZero(FileProvider):
|
||||
class ZeroXZero(Provider):
|
||||
"""File provider for 0x0.st."""
|
||||
|
||||
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||
|
||||
@@ -39,14 +39,26 @@ class SearchResult:
|
||||
}
|
||||
|
||||
|
||||
class SearchProvider(ABC):
|
||||
"""Base class for search providers."""
|
||||
class Provider(ABC):
|
||||
"""Unified provider base class.
|
||||
|
||||
This replaces the older split between "search providers" and "file providers".
|
||||
Concrete providers may implement any subset of:
|
||||
- search(query, ...)
|
||||
- download(result, output_dir)
|
||||
- upload(file_path, ...)
|
||||
- login(...)
|
||||
- validate()
|
||||
"""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
self.config = config or {}
|
||||
self.name = self.__class__.__name__.lower()
|
||||
|
||||
@abstractmethod
|
||||
# Standard lifecycle/auth hook.
|
||||
def login(self, **_kwargs: Any) -> bool:
|
||||
return True
|
||||
|
||||
def search(
|
||||
self,
|
||||
query: str,
|
||||
@@ -55,30 +67,46 @@ class SearchProvider(ABC):
|
||||
**kwargs: Any,
|
||||
) -> List[SearchResult]:
|
||||
"""Search for items matching the query."""
|
||||
raise NotImplementedError(f"Provider '{self.name}' does not support search")
|
||||
|
||||
def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]:
|
||||
"""Download an item from a search result."""
|
||||
|
||||
return None
|
||||
|
||||
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||
"""Upload a file and return a URL or identifier."""
|
||||
raise NotImplementedError(f"Provider '{self.name}' does not support upload")
|
||||
|
||||
def validate(self) -> bool:
|
||||
"""Check if provider is available and properly configured."""
|
||||
|
||||
return True
|
||||
|
||||
def selector(self, selected_items: List[Any], *, ctx: Any, stage_is_last: bool = True, **_kwargs: Any) -> bool:
|
||||
"""Optional hook for handling `@N` selection semantics.
|
||||
|
||||
class FileProvider(ABC):
|
||||
"""Base class for file upload providers."""
|
||||
The CLI can delegate selection behavior to a provider/store instead of
|
||||
applying the default selection filtering.
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
self.config = config or {}
|
||||
self.name = self.__class__.__name__.lower()
|
||||
Return True if the selection was handled and default behavior should be skipped.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||
"""Upload a file and return the URL."""
|
||||
_ = selected_items
|
||||
_ = ctx
|
||||
_ = stage_is_last
|
||||
return False
|
||||
|
||||
def validate(self) -> bool:
|
||||
"""Check if provider is available/configured."""
|
||||
|
||||
return True
|
||||
class SearchProvider(Provider):
|
||||
"""Compatibility alias for older code.
|
||||
|
||||
Prefer inheriting from Provider directly.
|
||||
"""
|
||||
|
||||
|
||||
class FileProvider(Provider):
|
||||
"""Compatibility alias for older code.
|
||||
|
||||
Prefer inheriting from Provider directly.
|
||||
"""
|
||||
|
||||
@@ -11,33 +11,47 @@ import sys
|
||||
|
||||
from SYS.logger import log
|
||||
|
||||
from ProviderCore.base import FileProvider, SearchProvider, SearchResult
|
||||
from ProviderCore.base import Provider, SearchProvider, FileProvider, SearchResult
|
||||
from Provider.alldebrid import AllDebrid
|
||||
from Provider.bandcamp import Bandcamp
|
||||
from Provider.libgen import Libgen
|
||||
from Provider.matrix import Matrix
|
||||
from Provider.openlibrary import OpenLibrary
|
||||
from Provider.soulseek import Soulseek, download_soulseek_file
|
||||
from Provider.telegram import Telegram
|
||||
from Provider.youtube import YouTube
|
||||
from Provider.zeroxzero import ZeroXZero
|
||||
|
||||
|
||||
_SEARCH_PROVIDERS: Dict[str, Type[SearchProvider]] = {
|
||||
_PROVIDERS: Dict[str, Type[Provider]] = {
|
||||
# Search-capable providers
|
||||
"alldebrid": AllDebrid,
|
||||
"libgen": Libgen,
|
||||
"openlibrary": OpenLibrary,
|
||||
"soulseek": Soulseek,
|
||||
"bandcamp": Bandcamp,
|
||||
"youtube": YouTube,
|
||||
"telegram": Telegram,
|
||||
# Upload-capable providers
|
||||
"0x0": ZeroXZero,
|
||||
"matrix": Matrix,
|
||||
}
|
||||
|
||||
|
||||
def get_search_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[SearchProvider]:
|
||||
"""Get a search provider by name."""
|
||||
def _supports_search(provider: Provider) -> bool:
|
||||
return provider.__class__.search is not Provider.search
|
||||
|
||||
provider_class = _SEARCH_PROVIDERS.get((name or "").lower())
|
||||
|
||||
def _supports_upload(provider: Provider) -> bool:
|
||||
return provider.__class__.upload is not Provider.upload
|
||||
|
||||
|
||||
def get_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[Provider]:
|
||||
"""Get a provider by name (unified registry)."""
|
||||
|
||||
provider_class = _PROVIDERS.get((name or "").lower())
|
||||
if provider_class is None:
|
||||
log(f"[provider] Unknown search provider: {name}", file=sys.stderr)
|
||||
log(f"[provider] Unknown provider: {name}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
@@ -51,11 +65,11 @@ def get_search_provider(name: str, config: Optional[Dict[str, Any]] = None) -> O
|
||||
return None
|
||||
|
||||
|
||||
def list_search_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bool]:
|
||||
"""List all search providers and their availability."""
|
||||
def list_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bool]:
|
||||
"""List all providers and their availability."""
|
||||
|
||||
availability: Dict[str, bool] = {}
|
||||
for name, provider_class in _SEARCH_PROVIDERS.items():
|
||||
for name, provider_class in _PROVIDERS.items():
|
||||
try:
|
||||
provider = provider_class(config)
|
||||
availability[name] = provider.validate()
|
||||
@@ -64,39 +78,51 @@ def list_search_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str,
|
||||
return availability
|
||||
|
||||
|
||||
_FILE_PROVIDERS: Dict[str, Type[FileProvider]] = {
|
||||
"0x0": ZeroXZero,
|
||||
"matrix": Matrix,
|
||||
}
|
||||
def get_search_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[SearchProvider]:
|
||||
"""Get a search-capable provider by name (compat API)."""
|
||||
|
||||
provider = get_provider(name, config)
|
||||
if provider is None:
|
||||
return None
|
||||
if not _supports_search(provider):
|
||||
log(f"[provider] Provider '{name}' does not support search", file=sys.stderr)
|
||||
return None
|
||||
return provider # type: ignore[return-value]
|
||||
|
||||
|
||||
def list_search_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bool]:
|
||||
"""List all search providers and their availability."""
|
||||
|
||||
availability: Dict[str, bool] = {}
|
||||
for name, provider_class in _PROVIDERS.items():
|
||||
try:
|
||||
provider = provider_class(config)
|
||||
availability[name] = bool(provider.validate() and _supports_search(provider))
|
||||
except Exception:
|
||||
availability[name] = False
|
||||
return availability
|
||||
|
||||
|
||||
def get_file_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[FileProvider]:
|
||||
"""Get a file provider by name."""
|
||||
"""Get an upload-capable provider by name (compat API)."""
|
||||
|
||||
provider_class = _FILE_PROVIDERS.get((name or "").lower())
|
||||
if provider_class is None:
|
||||
log(f"[provider] Unknown file provider: {name}", file=sys.stderr)
|
||||
provider = get_provider(name, config)
|
||||
if provider is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
provider = provider_class(config)
|
||||
if not provider.validate():
|
||||
log(f"[provider] File provider '{name}' is not available", file=sys.stderr)
|
||||
return None
|
||||
return provider
|
||||
except Exception as exc:
|
||||
log(f"[provider] Error initializing file provider '{name}': {exc}", file=sys.stderr)
|
||||
if not _supports_upload(provider):
|
||||
log(f"[provider] Provider '{name}' does not support upload", file=sys.stderr)
|
||||
return None
|
||||
return provider # type: ignore[return-value]
|
||||
|
||||
|
||||
def list_file_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bool]:
|
||||
"""List all file providers and their availability."""
|
||||
|
||||
availability: Dict[str, bool] = {}
|
||||
for name, provider_class in _FILE_PROVIDERS.items():
|
||||
for name, provider_class in _PROVIDERS.items():
|
||||
try:
|
||||
provider = provider_class(config)
|
||||
availability[name] = provider.validate()
|
||||
availability[name] = bool(provider.validate() and _supports_upload(provider))
|
||||
except Exception:
|
||||
availability[name] = False
|
||||
return availability
|
||||
@@ -104,8 +130,11 @@ def list_file_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bo
|
||||
|
||||
__all__ = [
|
||||
"SearchResult",
|
||||
"Provider",
|
||||
"SearchProvider",
|
||||
"FileProvider",
|
||||
"get_provider",
|
||||
"list_providers",
|
||||
"get_search_provider",
|
||||
"list_search_providers",
|
||||
"get_file_provider",
|
||||
|
||||
@@ -67,6 +67,16 @@ class Store(ABC):
|
||||
"""Add or replace a named note for a file."""
|
||||
raise NotImplementedError
|
||||
|
||||
def selector(self, selected_items: List[Any], *, ctx: Any, stage_is_last: bool = True, **_kwargs: Any) -> bool:
|
||||
"""Optional hook for handling `@N` selection semantics.
|
||||
|
||||
Return True if the selection was handled and default behavior should be skipped.
|
||||
"""
|
||||
_ = selected_items
|
||||
_ = ctx
|
||||
_ = stage_is_last
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def delete_note(self, file_identifier: str, name: str, **kwargs: Any) -> bool:
|
||||
"""Delete a named note for a file."""
|
||||
|
||||
@@ -1618,6 +1618,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
pipe_obj = models.PipeObject(
|
||||
hash=hash_val,
|
||||
store=store_val,
|
||||
provider=str(value.get("provider") or value.get("prov") or extra.get("provider") or "").strip() or None,
|
||||
tag=tag_val,
|
||||
title=title_val,
|
||||
url=url_val,
|
||||
@@ -1660,6 +1661,7 @@ def coerce_to_pipe_object(value: Any, default_path: Optional[str] = None) -> mod
|
||||
pipe_obj = models.PipeObject(
|
||||
hash=hash_val,
|
||||
store=store_val,
|
||||
provider=None,
|
||||
path=str(path_val) if path_val and path_val != "unknown" else None,
|
||||
title=title_val,
|
||||
tag=[],
|
||||
|
||||
@@ -15,6 +15,7 @@ from SYS.logger import log, debug
|
||||
from SYS.utils_constant import ALL_SUPPORTED_EXTENSIONS
|
||||
from Store import Store
|
||||
from . import _shared as sh
|
||||
from result_table import ResultTable
|
||||
|
||||
Cmdlet = sh.Cmdlet
|
||||
CmdletArg = sh.CmdletArg
|
||||
@@ -49,6 +50,13 @@ class Add_File(Cmdlet):
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.HASH,
|
||||
CmdletArg(name="provider", type="string", required=False, description="File hosting provider (e.g., 0x0)", alias="prov"),
|
||||
CmdletArg(
|
||||
name="room",
|
||||
type="string",
|
||||
required=False,
|
||||
description="Matrix room_id (when -provider matrix). If omitted, a room picker table is shown.",
|
||||
alias="room_id",
|
||||
),
|
||||
CmdletArg(name="delete", type="flag", required=False, description="Delete file after successful upload", alias="del"),
|
||||
],
|
||||
detail=[
|
||||
@@ -70,6 +78,7 @@ class Add_File(Cmdlet):
|
||||
path_arg = parsed.get("path")
|
||||
location = parsed.get("store")
|
||||
provider_name = parsed.get("provider")
|
||||
provider_room = parsed.get("room")
|
||||
delete_after = parsed.get("delete", False)
|
||||
|
||||
stage_ctx = ctx.get_stage_context()
|
||||
@@ -250,7 +259,31 @@ class Add_File(Cmdlet):
|
||||
continue
|
||||
|
||||
if provider_name:
|
||||
code = self._handle_provider_upload(media_path, provider_name, pipe_obj, config, delete_after_item)
|
||||
# Matrix provider can prompt for a room selection if one is not configured.
|
||||
if str(provider_name).strip().lower() == "matrix":
|
||||
room_id = None
|
||||
if provider_room:
|
||||
room_id = str(provider_room).strip()
|
||||
if not room_id:
|
||||
try:
|
||||
matrix_conf = config.get("provider", {}).get("matrix", {}) if isinstance(config, dict) else {}
|
||||
room_id = str(matrix_conf.get("room_id") or "").strip() or None
|
||||
except Exception:
|
||||
room_id = None
|
||||
|
||||
if not room_id:
|
||||
pending = [
|
||||
{
|
||||
"path": str(media_path),
|
||||
"pipe_obj": pipe_obj,
|
||||
"delete_after": bool(delete_after_item),
|
||||
}
|
||||
]
|
||||
return self._matrix_prompt_room_selection(pending, config, list(args))
|
||||
|
||||
code = self._handle_matrix_upload(media_path, pipe_obj, config, delete_after_item, room_id=room_id)
|
||||
else:
|
||||
code = self._handle_provider_upload(media_path, provider_name, pipe_obj, config, delete_after_item)
|
||||
if code == 0:
|
||||
successes += 1
|
||||
else:
|
||||
@@ -1496,6 +1529,134 @@ class Add_File(Cmdlet):
|
||||
debug(f"[add-file] Soulseek download traceback: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _handle_matrix_upload(
|
||||
media_path: Path,
|
||||
pipe_obj: models.PipeObject,
|
||||
config: Dict[str, Any],
|
||||
delete_after: bool,
|
||||
*,
|
||||
room_id: str,
|
||||
) -> int:
|
||||
"""Upload to Matrix and update the PipeObject.
|
||||
|
||||
Matrix needs a room_id. If you don't have one, use the interactive
|
||||
room picker path which resumes via `-matrix-send`.
|
||||
"""
|
||||
from Provider.matrix import Matrix
|
||||
|
||||
log(f"Uploading via matrix: {media_path.name}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
log(f"Matrix not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
hoster_url = provider.upload_to_room(str(media_path), str(room_id))
|
||||
log(f"File uploaded: {hoster_url}", file=sys.stderr)
|
||||
|
||||
# Associate URL with Hydrus if possible
|
||||
f_hash = Add_File._resolve_file_hash(None, media_path, pipe_obj, None)
|
||||
if f_hash:
|
||||
try:
|
||||
store_name = getattr(pipe_obj, "store", None)
|
||||
if store_name:
|
||||
store = Store(config)
|
||||
backend = store[str(store_name)]
|
||||
client = getattr(backend, "_client", None)
|
||||
if client is not None and hasattr(client, "associate_url"):
|
||||
client.associate_url(str(f_hash), hoster_url)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Upload failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Update PipeObject and emit
|
||||
extra_updates: Dict[str, Any] = {
|
||||
"provider": "matrix",
|
||||
"provider_url": hoster_url,
|
||||
"room_id": str(room_id),
|
||||
}
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
existing_known = list(pipe_obj.extra.get("url") or [])
|
||||
if hoster_url and hoster_url not in existing_known:
|
||||
existing_known.append(hoster_url)
|
||||
extra_updates["url"] = existing_known
|
||||
|
||||
file_path = pipe_obj.path or (str(media_path) if media_path else None) or ""
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash_value=f_hash or "unknown",
|
||||
store="matrix",
|
||||
path=file_path,
|
||||
tag=pipe_obj.tag,
|
||||
title=pipe_obj.title or (media_path.name if media_path else None),
|
||||
extra_updates=extra_updates,
|
||||
)
|
||||
Add_File._emit_pipe_object(pipe_obj)
|
||||
Add_File._cleanup_after_success(media_path, delete_source=bool(delete_after))
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _matrix_prompt_room_selection(
|
||||
pending_items: List[Dict[str, Any]],
|
||||
config: Dict[str, Any],
|
||||
original_args: List[str],
|
||||
) -> int:
|
||||
"""Show rooms table and pause pipeline for @N selection."""
|
||||
from Provider.matrix import Matrix
|
||||
|
||||
# Stash pending uploads so @N on the matrix table can trigger Matrix.upload_to_room.
|
||||
ctx.store_value("matrix_pending_uploads", pending_items)
|
||||
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
log(f"Matrix not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
rooms = provider.list_rooms()
|
||||
except Exception as exc:
|
||||
log(f"Failed to list Matrix rooms: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not rooms:
|
||||
log("No joined rooms found.", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
table = ResultTable("Matrix Rooms")
|
||||
table.set_table("matrix")
|
||||
table.set_source_command("add-file", list(original_args or []))
|
||||
|
||||
for room in rooms:
|
||||
row = table.add_row()
|
||||
name = str(room.get("name") or "").strip() if isinstance(room, dict) else ""
|
||||
rid = str(room.get("room_id") or "").strip() if isinstance(room, dict) else ""
|
||||
row.add_column("Name", name)
|
||||
row.add_column("Room", rid)
|
||||
|
||||
room_items: List[Dict[str, Any]] = []
|
||||
for room in rooms:
|
||||
if not isinstance(room, dict):
|
||||
continue
|
||||
rid = str(room.get("room_id") or "").strip()
|
||||
name = str(room.get("name") or "").strip()
|
||||
room_items.append({**room, "store": "matrix", "provider": "matrix", "title": name or rid or "Matrix Room"})
|
||||
|
||||
# Overlay table: user selects @N on this Matrix rooms table to upload.
|
||||
ctx.set_last_result_table_overlay(table, room_items)
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print()
|
||||
print(table.format_plain())
|
||||
print("\nSelect room(s) with @N (e.g. @1 or @1-3) to upload the selected item(s)")
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _handle_provider_upload(
|
||||
media_path: Path,
|
||||
|
||||
@@ -12,6 +12,7 @@ from __future__ import annotations
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from SYS.download import DownloadError, _download_direct_file
|
||||
from SYS.logger import log, debug
|
||||
@@ -102,7 +103,7 @@ class Download_File(Cmdlet):
|
||||
get_search_provider = None
|
||||
SearchResult = None
|
||||
|
||||
def _emit_local_file(downloaded_path: Path, source: Optional[str], title_hint: Optional[str], tags_hint: Optional[List[str]], media_kind_hint: Optional[str], full_metadata: Optional[Dict[str, Any]]) -> None:
|
||||
def _emit_local_file(downloaded_path: Path, source: Optional[str], title_hint: Optional[str], tags_hint: Optional[List[str]], media_kind_hint: Optional[str], full_metadata: Optional[Dict[str, Any]], provider_hint: Optional[str] = None) -> None:
|
||||
title_val = (title_hint or downloaded_path.stem or "Unknown").strip() or downloaded_path.stem
|
||||
hash_value = self._compute_file_hash(downloaded_path)
|
||||
tag: List[str] = []
|
||||
@@ -121,6 +122,8 @@ class Download_File(Cmdlet):
|
||||
"media_kind": media_kind_hint or "file",
|
||||
"tag": tag,
|
||||
}
|
||||
if provider_hint:
|
||||
payload["provider"] = str(provider_hint)
|
||||
if full_metadata:
|
||||
payload["full_metadata"] = full_metadata
|
||||
if source and str(source).startswith("http"):
|
||||
@@ -140,6 +143,79 @@ class Download_File(Cmdlet):
|
||||
try:
|
||||
debug(f"Processing URL: {url}")
|
||||
|
||||
# Telegram message URLs are not direct files; route through the provider.
|
||||
try:
|
||||
parsed = urlparse(str(url))
|
||||
host = (parsed.hostname or "").lower().strip()
|
||||
except Exception:
|
||||
host = ""
|
||||
|
||||
is_telegram = host in {"t.me", "telegram.me"} or host.endswith(".t.me")
|
||||
if is_telegram and SearchResult:
|
||||
try:
|
||||
from ProviderCore.registry import get_provider as _get_provider
|
||||
except Exception:
|
||||
_get_provider = None
|
||||
|
||||
if _get_provider is None:
|
||||
raise DownloadError("Telegram provider registry not available")
|
||||
|
||||
provider = _get_provider("telegram", config)
|
||||
if provider is None:
|
||||
raise DownloadError("Telegram provider not configured or not available (check telethon/app_id/api_hash)")
|
||||
|
||||
sr = SearchResult(table="telegram", title=str(url), path=str(url), full_metadata={})
|
||||
downloaded_path = None
|
||||
telegram_info: Optional[Dict[str, Any]] = None
|
||||
if hasattr(provider, "download_url"):
|
||||
try:
|
||||
downloaded_path, telegram_info = provider.download_url(str(url), final_output_dir) # type: ignore[attr-defined]
|
||||
except Exception as exc:
|
||||
raise DownloadError(str(exc))
|
||||
else:
|
||||
downloaded_path = provider.download(sr, final_output_dir)
|
||||
|
||||
if not downloaded_path:
|
||||
raise DownloadError("Telegram download returned no file")
|
||||
|
||||
channel = ""
|
||||
post = None
|
||||
if isinstance(telegram_info, dict):
|
||||
try:
|
||||
chat_info = telegram_info.get("chat") if isinstance(telegram_info.get("chat"), dict) else {}
|
||||
msg_info = telegram_info.get("message") if isinstance(telegram_info.get("message"), dict) else {}
|
||||
channel = str(chat_info.get("title") or chat_info.get("username") or "").strip()
|
||||
post = msg_info.get("id")
|
||||
except Exception:
|
||||
channel = ""
|
||||
post = None
|
||||
|
||||
title_hint = None
|
||||
tags_hint: List[str] = []
|
||||
if channel:
|
||||
tags_hint.append(f"channel:{channel}")
|
||||
if post is not None:
|
||||
tags_hint.append(f"post:{post}")
|
||||
if channel and post is not None:
|
||||
title_hint = f"{channel} {post}"
|
||||
elif post is not None:
|
||||
title_hint = f"post:{post}"
|
||||
else:
|
||||
title_hint = downloaded_path.stem
|
||||
|
||||
_emit_local_file(
|
||||
downloaded_path=downloaded_path,
|
||||
source=str(url),
|
||||
title_hint=title_hint,
|
||||
tags_hint=tags_hint,
|
||||
media_kind_hint="file",
|
||||
full_metadata=telegram_info,
|
||||
provider_hint="telegram",
|
||||
)
|
||||
downloaded_count += 1
|
||||
debug("✓ Downloaded via Telegram provider and emitted")
|
||||
continue
|
||||
|
||||
result_obj = _download_direct_file(url, final_output_dir, quiet=quiet_mode)
|
||||
file_path = None
|
||||
if hasattr(result_obj, "path"):
|
||||
|
||||
@@ -37,6 +37,7 @@ class PipeObject:
|
||||
"""
|
||||
hash: str
|
||||
store: str
|
||||
provider: Optional[str] = None
|
||||
tag: List[str] = field(default_factory=list)
|
||||
title: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
@@ -90,6 +91,7 @@ class PipeObject:
|
||||
# Prepare display values
|
||||
hash_display = str(self.hash or "N/A")
|
||||
store_display = str(self.store or "N/A")
|
||||
provider_display = str(self.provider or "N/A")
|
||||
title_display = str(self.title or "N/A")
|
||||
tag_display = ", ".join(self.tag[:3]) if self.tag else "[]"
|
||||
if len(self.tag) > 3:
|
||||
@@ -134,6 +136,7 @@ class PipeObject:
|
||||
rows = [
|
||||
("Hash", hash_display),
|
||||
("Store", store_display),
|
||||
("Provider", provider_display),
|
||||
("Title", title_display),
|
||||
("Tag", tag_display),
|
||||
("URL", str(url_display)),
|
||||
@@ -226,6 +229,9 @@ class PipeObject:
|
||||
"hash": self.hash,
|
||||
"store": self.store,
|
||||
}
|
||||
|
||||
if self.provider:
|
||||
data["provider"] = self.provider
|
||||
|
||||
if self.tag:
|
||||
data["tag"] = self.tag
|
||||
|
||||
@@ -8,6 +8,7 @@ yt-dlp[default]>=2023.11.0
|
||||
requests>=2.31.0
|
||||
httpx>=0.25.0
|
||||
ffmpeg-python>=0.2.0
|
||||
telethon>=1.36.0
|
||||
|
||||
# Document and data handling
|
||||
pypdf>=3.0.0
|
||||
|
||||
Reference in New Issue
Block a user