j
This commit is contained in:
@@ -542,6 +542,53 @@ class HydrusNetwork:
|
||||
}
|
||||
return self._post("/add_tags/add_tags", data=body)
|
||||
|
||||
def mutate_tags_by_key(
|
||||
self,
|
||||
hash: Union[str,
|
||||
Iterable[str]],
|
||||
service_key: str,
|
||||
*,
|
||||
add_tags: Optional[Iterable[str]] = None,
|
||||
remove_tags: Optional[Iterable[str]] = None,
|
||||
) -> dict[str,
|
||||
Any]:
|
||||
"""Add or remove tags with a single /add_tags/add_tags call.
|
||||
|
||||
Hydrus Client API: POST /add_tags/add_tags
|
||||
Use `service_keys_to_actions_to_tags` so the client can apply additions
|
||||
and removals in a single request (action '0' = add, '1' = remove).
|
||||
"""
|
||||
hash_list = self._ensure_hashes(hash)
|
||||
def _clean(tags: Optional[Iterable[str]]) -> list[str]:
|
||||
if not tags:
|
||||
return []
|
||||
clean_list: list[str] = []
|
||||
for tag in tags:
|
||||
if not isinstance(tag, str):
|
||||
continue
|
||||
text = tag.strip()
|
||||
if not text:
|
||||
continue
|
||||
clean_list.append(text)
|
||||
return clean_list
|
||||
|
||||
actions: dict[str, list[str]] = {}
|
||||
adds = _clean(add_tags)
|
||||
removes = _clean(remove_tags)
|
||||
if adds:
|
||||
actions["0"] = adds
|
||||
if removes:
|
||||
actions["1"] = removes
|
||||
if not actions:
|
||||
return {}
|
||||
body = {
|
||||
"hashes": hash_list,
|
||||
"service_keys_to_actions_to_tags": {
|
||||
str(service_key): actions
|
||||
},
|
||||
}
|
||||
return self._post("/add_tags/add_tags", data=body)
|
||||
|
||||
def associate_url(self,
|
||||
file_hashes: Union[str,
|
||||
Iterable[str]],
|
||||
|
||||
@@ -7,13 +7,15 @@ API docs: https://docs.alldebrid.com/#general-informations
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from SYS.logger import log, debug
|
||||
import time
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from typing import Any, Dict, Optional, Set, List, Sequence, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.rich_display import show_provider_config_panel
|
||||
from .HTTP import HTTPClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -1035,6 +1037,7 @@ def unlock_link_cmdlet(result: Any, args: Sequence[str], config: Dict[str, Any])
|
||||
api_key = _get_alldebrid_api_key_from_config(config)
|
||||
|
||||
if not api_key:
|
||||
show_provider_config_panel("alldebrid", ["api_key"])
|
||||
log(
|
||||
"AllDebrid API key not configured (provider.alldebrid.api_key)",
|
||||
file=sys.stderr
|
||||
|
||||
177
CLI.py
177
CLI.py
@@ -17,6 +17,7 @@ import threading
|
||||
import time
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, TextIO, Tuple, cast
|
||||
|
||||
@@ -57,7 +58,6 @@ from SYS.logger import debug, set_debug
|
||||
from SYS.worker_manager import WorkerManager
|
||||
|
||||
from SYS.cmdlet_catalog import (
|
||||
ensure_registry_loaded,
|
||||
get_cmdlet_arg_choices,
|
||||
get_cmdlet_arg_flags,
|
||||
get_cmdlet_metadata,
|
||||
@@ -871,7 +871,7 @@ class CmdletCompleter(Completer):
|
||||
) -> Set[str]:
|
||||
"""Return logical argument names already used in this cmdlet stage.
|
||||
|
||||
Example: if the user has typed `download-media -url ...`, then `url`
|
||||
Example: if the user has typed `download-file -url ...`, then `url`
|
||||
is considered used and should not be suggested again (even as `--url`).
|
||||
"""
|
||||
arg_flags = CmdletIntrospection.cmdlet_args(cmd_name, config)
|
||||
@@ -970,8 +970,9 @@ class CmdletCompleter(Completer):
|
||||
)
|
||||
if choices:
|
||||
for choice in choices:
|
||||
if choice.lower().startswith(current_token):
|
||||
yield Completion(choice, start_position=-len(current_token))
|
||||
# Example: if the user has typed `download-file -url ...`, then `url`
|
||||
# is considered used and should not be suggested again (even as `--url`).
|
||||
return
|
||||
|
||||
arg_names = CmdletIntrospection.cmdlet_args(cmd_name, config)
|
||||
@@ -1347,8 +1348,6 @@ class CmdletExecutor:
|
||||
from SYS import pipeline as ctx
|
||||
from cmdlet import REGISTRY
|
||||
|
||||
ensure_registry_loaded()
|
||||
|
||||
# REPL guard: stage-local selection tables should not leak across independent
|
||||
# commands. @ selection can always re-seed from the last result table.
|
||||
try:
|
||||
@@ -1732,11 +1731,9 @@ class CmdletExecutor:
|
||||
selectable_commands = {
|
||||
"search-file",
|
||||
"download-data",
|
||||
"download-media",
|
||||
"download-file",
|
||||
"search_file",
|
||||
"download_data",
|
||||
"download_media",
|
||||
"download_file",
|
||||
".config",
|
||||
".worker",
|
||||
@@ -1924,14 +1921,14 @@ class PipelineExecutor:
|
||||
return stages
|
||||
|
||||
@staticmethod
|
||||
def _validate_download_media_relationship_order(stages: List[List[str]]) -> bool:
|
||||
"""Guard against running add-relationship on unstored download-media results.
|
||||
def _validate_download_file_relationship_order(stages: List[List[str]]) -> bool:
|
||||
"""Guard against running add-relationship on unstored download-file results.
|
||||
|
||||
Intended UX:
|
||||
download-media ... | add-file -store <store> | add-relationship
|
||||
download-file ... | add-file -store <store> | add-relationship
|
||||
|
||||
Rationale:
|
||||
download-media outputs items that may not yet have a stable store+hash.
|
||||
download-file outputs items that may not yet have a stable store+hash.
|
||||
add-relationship is designed to operate in store/hash mode.
|
||||
"""
|
||||
|
||||
@@ -1944,14 +1941,14 @@ class PipelineExecutor:
|
||||
continue
|
||||
names.append(_norm(stage[0]))
|
||||
|
||||
dl_idxs = [i for i, n in enumerate(names) if n == "download-media"]
|
||||
dl_idxs = [i for i, n in enumerate(names) if n == "download-file"]
|
||||
rel_idxs = [i for i, n in enumerate(names) if n == "add-relationship"]
|
||||
add_file_idxs = [i for i, n in enumerate(names) if n == "add-file"]
|
||||
|
||||
if not dl_idxs or not rel_idxs:
|
||||
return True
|
||||
|
||||
# If download-media is upstream of add-relationship, require an add-file in between.
|
||||
# If download-file is upstream of add-relationship, require an add-file in between.
|
||||
for rel_i in rel_idxs:
|
||||
dl_before = [d for d in dl_idxs if d < rel_i]
|
||||
if not dl_before:
|
||||
@@ -1959,9 +1956,9 @@ class PipelineExecutor:
|
||||
dl_i = max(dl_before)
|
||||
if not any(dl_i < a < rel_i for a in add_file_idxs):
|
||||
print(
|
||||
"Pipeline order error: when using download-media with add-relationship, "
|
||||
"Pipeline order error: when using download-file with add-relationship, "
|
||||
"add-relationship must come after add-file (so items are stored and have store+hash).\n"
|
||||
"Example: download-media <...> | add-file -store <store> | add-relationship\n"
|
||||
"Example: download-file <...> | add-file -store <store> | add-relationship\n"
|
||||
)
|
||||
return False
|
||||
|
||||
@@ -2238,6 +2235,37 @@ class PipelineExecutor:
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _summarize_stage_text(stage_tokens: Sequence[str], limit: int = 140) -> str:
|
||||
combined = " ".join(str(tok) for tok in stage_tokens if tok is not None).strip()
|
||||
if not combined:
|
||||
return ""
|
||||
normalized = re.sub(r"\s+", " ", combined)
|
||||
if len(normalized) <= limit:
|
||||
return normalized
|
||||
return normalized[:limit - 3].rstrip() + "..."
|
||||
|
||||
@staticmethod
|
||||
def _log_pipeline_event(
|
||||
worker_manager: Any,
|
||||
worker_id: Optional[str],
|
||||
message: str,
|
||||
) -> None:
|
||||
if not worker_manager or not worker_id or not message:
|
||||
return
|
||||
try:
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
timestamp = ""
|
||||
if timestamp:
|
||||
text = f"{timestamp} - PIPELINE - {message}"
|
||||
else:
|
||||
text = f"PIPELINE - {message}"
|
||||
try:
|
||||
worker_manager.append_stdout(worker_id, text + "\n", channel="log")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _maybe_open_url_selection(
|
||||
current_table: Any,
|
||||
@@ -2571,11 +2599,11 @@ class PipelineExecutor:
|
||||
|
||||
if not stages:
|
||||
if table_type == "youtube":
|
||||
print("Auto-running YouTube selection via download-media")
|
||||
stages.append(["download-media"])
|
||||
print("Auto-running YouTube selection via download-file")
|
||||
stages.append(["download-file"])
|
||||
elif table_type == "bandcamp":
|
||||
print("Auto-running Bandcamp selection via download-media")
|
||||
stages.append(["download-media"])
|
||||
print("Auto-running Bandcamp selection via download-file")
|
||||
stages.append(["download-file"])
|
||||
elif table_type == "internetarchive":
|
||||
print("Auto-loading Internet Archive item via download-file")
|
||||
stages.append(["download-file"])
|
||||
@@ -2594,32 +2622,24 @@ class PipelineExecutor:
|
||||
first_cmd = stages[0][0] if stages and stages[0] else None
|
||||
if table_type == "soulseek" and first_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
debug("Auto-inserting download-file after Soulseek selection")
|
||||
stages.insert(0, ["download-file"])
|
||||
if table_type == "youtube" and first_cmd not in (
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
".pipe",
|
||||
):
|
||||
debug("Auto-inserting download-media after YouTube selection")
|
||||
stages.insert(0, ["download-media"])
|
||||
debug("Auto-inserting download-file after YouTube selection")
|
||||
stages.insert(0, ["download-file"])
|
||||
if table_type == "bandcamp" and first_cmd not in (
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-media after Bandcamp selection")
|
||||
stages.insert(0, ["download-media"])
|
||||
print("Auto-inserting download-file after Bandcamp selection")
|
||||
stages.insert(0, ["download-file"])
|
||||
if table_type == "internetarchive" and first_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
debug(
|
||||
@@ -2628,16 +2648,12 @@ class PipelineExecutor:
|
||||
stages.insert(0, ["download-file"])
|
||||
if table_type == "podcastindex.episodes" and first_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-file after PodcastIndex episode selection")
|
||||
stages.insert(0, ["download-file"])
|
||||
if table_type == "libgen" and first_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-file after Libgen selection")
|
||||
@@ -2814,6 +2830,12 @@ class PipelineExecutor:
|
||||
pipeline_text=pipeline_text,
|
||||
config=config
|
||||
)
|
||||
if pipeline_session and worker_manager:
|
||||
self._log_pipeline_event(
|
||||
worker_manager,
|
||||
pipeline_session.worker_id,
|
||||
f"Pipeline start: {pipeline_text or '(empty pipeline)'}",
|
||||
)
|
||||
raw_stage_texts = self._get_raw_stage_texts(ctx)
|
||||
self._maybe_enable_background_notifier(
|
||||
worker_manager,
|
||||
@@ -2843,8 +2865,8 @@ class PipelineExecutor:
|
||||
if initial_piped is not None:
|
||||
piped_result = initial_piped
|
||||
|
||||
# REPL guard: prevent add-relationship before add-file for download-media pipelines.
|
||||
if not self._validate_download_media_relationship_order(stages):
|
||||
# REPL guard: prevent add-relationship before add-file for download-file pipelines.
|
||||
if not self._validate_download_file_relationship_order(stages):
|
||||
pipeline_status = "failed"
|
||||
pipeline_error = "Invalid pipeline order"
|
||||
return
|
||||
@@ -3144,11 +3166,11 @@ class PipelineExecutor:
|
||||
if filter_spec is None:
|
||||
if stage_index + 1 >= len(stages):
|
||||
if table_type == "youtube":
|
||||
print("Auto-running YouTube selection via download-media")
|
||||
stages.append(["download-media", *stage_args])
|
||||
print("Auto-running YouTube selection via download-file")
|
||||
stages.append(["download-file", *stage_args])
|
||||
elif table_type == "bandcamp":
|
||||
print("Auto-running Bandcamp selection via download-media")
|
||||
stages.append(["download-media"])
|
||||
print("Auto-running Bandcamp selection via download-file")
|
||||
stages.append(["download-file"])
|
||||
elif table_type == "internetarchive":
|
||||
print("Auto-loading Internet Archive item via download-file")
|
||||
stages.append(["download-file"])
|
||||
@@ -3161,56 +3183,53 @@ class PipelineExecutor:
|
||||
else:
|
||||
if table_type == "soulseek" and next_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
debug("Auto-inserting download-file after Soulseek selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
if table_type == "youtube" and next_cmd not in (
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
".pipe",
|
||||
):
|
||||
debug("Auto-inserting download-media after YouTube selection")
|
||||
stages.insert(stage_index + 1, ["download-media"])
|
||||
debug("Auto-inserting download-file after YouTube selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
if table_type == "bandcamp" and next_cmd not in (
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-media after Bandcamp selection")
|
||||
stages.insert(stage_index + 1, ["download-media"])
|
||||
print("Auto-inserting download-file after Bandcamp selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
if table_type == "internetarchive" and next_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
debug("Auto-inserting download-file after Internet Archive selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
if table_type == "podcastindex.episodes" and next_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-file after PodcastIndex episode selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
if table_type == "libgen" and next_cmd not in (
|
||||
"download-file",
|
||||
"download-media",
|
||||
"download_media",
|
||||
".pipe",
|
||||
):
|
||||
print("Auto-inserting download-file after Libgen selection")
|
||||
stages.insert(stage_index + 1, ["download-file"])
|
||||
continue
|
||||
|
||||
ensure_registry_loaded()
|
||||
cmd_fn = REGISTRY.get(cmd_name)
|
||||
if not cmd_fn:
|
||||
try:
|
||||
mod = import_cmd_module(cmd_name)
|
||||
data = getattr(mod, "CMDLET", None) if mod else None
|
||||
if data and hasattr(data, "exec") and callable(getattr(data, "exec")):
|
||||
run_fn = getattr(data, "exec")
|
||||
REGISTRY[cmd_name] = run_fn
|
||||
cmd_fn = run_fn
|
||||
except Exception:
|
||||
cmd_fn = None
|
||||
if not cmd_fn:
|
||||
print(f"Unknown command: {cmd_name}\n")
|
||||
pipeline_status = "failed"
|
||||
@@ -3226,6 +3245,14 @@ class PipelineExecutor:
|
||||
)
|
||||
|
||||
stage_worker_id = stage_session.worker_id if stage_session else None
|
||||
stage_summary = self._summarize_stage_text(stage_tokens)
|
||||
if pipeline_session and worker_manager:
|
||||
summary_text = stage_summary or cmd_name
|
||||
self._log_pipeline_event(
|
||||
worker_manager,
|
||||
pipeline_session.worker_id,
|
||||
f"Stage {stage_index + 1}/{len(stages)} start: {summary_text}",
|
||||
)
|
||||
|
||||
# Estimate how many per-item tasks this pipe will run.
|
||||
pipe_idx = pipe_index_by_stage.get(stage_index)
|
||||
@@ -3433,7 +3460,7 @@ class PipelineExecutor:
|
||||
else:
|
||||
piped_result = None
|
||||
|
||||
# Some cmdlets (notably download-media format selection) populate a selectable
|
||||
# Some cmdlets (notably download-file format selection) populate a selectable
|
||||
# current-stage table without emitting pipeline items. In these cases, render
|
||||
# the table and pause the pipeline so the user can pick @N.
|
||||
stage_table = (
|
||||
@@ -3458,19 +3485,16 @@ class PipelineExecutor:
|
||||
except Exception:
|
||||
stage_table_source = ""
|
||||
if ((not stage_is_last) and (not emits) and cmd_name in {
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
"download-data",
|
||||
"download_data",
|
||||
} and stage_table is not None
|
||||
and (stage_table_type in {
|
||||
"ytdlp.formatlist",
|
||||
"download-media",
|
||||
"download_media",
|
||||
"download-file",
|
||||
"bandcamp",
|
||||
"youtube",
|
||||
} or stage_table_source in {"download-media",
|
||||
"download_media"}
|
||||
} or stage_table_source in {"download-file"}
|
||||
or stage_table_type in {"internetarchive.formats"}
|
||||
or stage_table_source in {"download-file"})):
|
||||
try:
|
||||
@@ -3671,6 +3695,18 @@ class PipelineExecutor:
|
||||
pipeline_error = f"{stage_label} error: {exc}"
|
||||
return
|
||||
finally:
|
||||
if pipeline_session and worker_manager:
|
||||
status_label = (
|
||||
"completed" if stage_status == "completed" else "failed"
|
||||
)
|
||||
msg = f"{stage_label} {status_label}"
|
||||
if stage_error and stage_status != "completed":
|
||||
msg += f": {stage_error}"
|
||||
self._log_pipeline_event(
|
||||
worker_manager,
|
||||
pipeline_session.worker_id,
|
||||
msg,
|
||||
)
|
||||
if progress_ui is not None and pipe_idx is not None:
|
||||
try:
|
||||
progress_ui.finish_pipe(
|
||||
@@ -3820,6 +3856,17 @@ class PipelineExecutor:
|
||||
ctx.set_current_stage_table(None)
|
||||
except Exception:
|
||||
pass
|
||||
if pipeline_session and worker_manager:
|
||||
final_msg = f"Pipeline {pipeline_status}"
|
||||
if pipeline_error:
|
||||
final_msg += f": {pipeline_error}"
|
||||
else:
|
||||
final_msg += " (ok)"
|
||||
self._log_pipeline_event(
|
||||
worker_manager,
|
||||
pipeline_session.worker_id,
|
||||
final_msg,
|
||||
)
|
||||
if pipeline_session:
|
||||
pipeline_session.close(
|
||||
status=pipeline_status,
|
||||
|
||||
@@ -1987,7 +1987,7 @@ mp.register_script_message('medios-download-pick-store', function(json)
|
||||
local url = tostring(_pending_download.url)
|
||||
local fmt = tostring(_pending_download.format)
|
||||
|
||||
local pipeline_cmd = 'download-media -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
|
||||
local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
|
||||
.. ' | add-file -store ' .. quote_pipeline_arg(store)
|
||||
|
||||
if not _run_pipeline_detached(pipeline_cmd) then
|
||||
@@ -2015,7 +2015,7 @@ mp.register_script_message('medios-download-pick-path', function()
|
||||
local url = tostring(_pending_download.url)
|
||||
local fmt = tostring(_pending_download.format)
|
||||
|
||||
local pipeline_cmd = 'download-media -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
|
||||
local pipeline_cmd = 'download-file -url ' .. quote_pipeline_arg(url) .. ' -format ' .. quote_pipeline_arg(fmt)
|
||||
.. ' | add-file -path ' .. quote_pipeline_arg(folder)
|
||||
|
||||
if not _run_pipeline_detached(pipeline_cmd) then
|
||||
|
||||
@@ -343,7 +343,7 @@ class MPV:
|
||||
def _q(s: str) -> str:
|
||||
return '"' + s.replace("\\", "\\\\").replace('"', '\\"') + '"'
|
||||
|
||||
pipeline = f"download-media -url {_q(url)} -format {_q(fmt)}"
|
||||
pipeline = f"download-file -url {_q(url)} -format {_q(fmt)}"
|
||||
if store:
|
||||
pipeline += f" | add-file -store {_q(store)}"
|
||||
else:
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterable, List, Optional, Callable, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from API.HTTP import HTTPClient
|
||||
from API.alldebrid import AllDebridClient, parse_magnet_or_hash, is_magnet_link, is_torrent_file
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from ProviderCore.download import sanitize_filename
|
||||
from SYS.download import _download_direct_file
|
||||
from SYS.logger import log
|
||||
|
||||
|
||||
@@ -53,7 +59,356 @@ def _get_debrid_api_key(config: Dict[str, Any]) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _consume_bencoded_value(data: bytes, pos: int) -> int:
|
||||
if pos >= len(data):
|
||||
raise ValueError("Unexpected end of bencode")
|
||||
token = data[pos:pos + 1]
|
||||
if token == b"i":
|
||||
end = data.find(b"e", pos + 1)
|
||||
if end == -1:
|
||||
raise ValueError("Unterminated integer")
|
||||
return end + 1
|
||||
if token == b"l" or token == b"d":
|
||||
cursor = pos + 1
|
||||
while cursor < len(data):
|
||||
if data[cursor:cursor + 1] == b"e":
|
||||
return cursor + 1
|
||||
cursor = _consume_bencoded_value(data, cursor)
|
||||
raise ValueError("Unterminated list/dict")
|
||||
if token and b"0" <= token <= b"9":
|
||||
colon = data.find(b":", pos)
|
||||
if colon == -1:
|
||||
raise ValueError("Invalid string length")
|
||||
length = int(data[pos:colon])
|
||||
return colon + 1 + length
|
||||
raise ValueError("Unknown bencode token")
|
||||
|
||||
|
||||
def _info_hash_from_torrent_bytes(data: bytes) -> Optional[str]:
|
||||
needle = b"4:info"
|
||||
idx = data.find(needle)
|
||||
if idx == -1:
|
||||
return None
|
||||
|
||||
start = idx + len(needle)
|
||||
try:
|
||||
end = _consume_bencoded_value(data, start)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
info_bytes = data[start:end]
|
||||
try:
|
||||
return hashlib.sha1(info_bytes).hexdigest()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_torrent_bytes(target: str) -> Optional[bytes]:
|
||||
path_obj = Path(str(target))
|
||||
try:
|
||||
if path_obj.exists() and path_obj.is_file():
|
||||
return path_obj.read_bytes()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
parsed = urlparse(target)
|
||||
except Exception:
|
||||
parsed = None
|
||||
|
||||
if parsed is None or not parsed.scheme or parsed.scheme.lower() not in {"http", "https"}:
|
||||
return None
|
||||
|
||||
if not target.lower().endswith(".torrent"):
|
||||
return None
|
||||
|
||||
try:
|
||||
with HTTPClient(timeout=30.0) as client:
|
||||
response = client.get(target)
|
||||
return response.content
|
||||
except Exception as exc:
|
||||
log(f"Failed to download .torrent from {target}: {exc}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def resolve_magnet_spec(target: str) -> Optional[str]:
|
||||
"""Resolve a magnet/hash/torrent URL into a magnet/hash string."""
|
||||
candidate = str(target or "").strip()
|
||||
if not candidate:
|
||||
return None
|
||||
|
||||
parsed = parse_magnet_or_hash(candidate)
|
||||
if parsed:
|
||||
return parsed
|
||||
|
||||
if is_torrent_file(candidate):
|
||||
torrent_bytes = _fetch_torrent_bytes(candidate)
|
||||
if not torrent_bytes:
|
||||
return None
|
||||
hash_value = _info_hash_from_torrent_bytes(torrent_bytes)
|
||||
if hash_value:
|
||||
return hash_value
|
||||
return None
|
||||
|
||||
|
||||
def _dispatch_alldebrid_magnet_search(
|
||||
magnet_id: int,
|
||||
config: Dict[str, Any],
|
||||
) -> None:
|
||||
try:
|
||||
from cmdlet.search_file import CMDLET as _SEARCH_FILE_CMDLET
|
||||
|
||||
exec_fn = getattr(_SEARCH_FILE_CMDLET, "exec", None)
|
||||
if callable(exec_fn):
|
||||
exec_fn(
|
||||
None,
|
||||
["-provider", "alldebrid", f"ID={magnet_id}"],
|
||||
config,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
log(f"[alldebrid] Sent magnet {magnet_id} to AllDebrid for download", file=sys.stderr)
|
||||
|
||||
|
||||
def prepare_magnet(
|
||||
magnet_spec: str,
|
||||
config: Dict[str, Any],
|
||||
) -> tuple[Optional[AllDebridClient], Optional[int]]:
|
||||
api_key = _get_debrid_api_key(config or {})
|
||||
if not api_key:
|
||||
try:
|
||||
from ProviderCore.registry import show_provider_config_panel
|
||||
|
||||
show_provider_config_panel("alldebrid", ["api_key"])
|
||||
except Exception:
|
||||
pass
|
||||
log("AllDebrid API key not configured (provider.alldebrid.api_key)", file=sys.stderr)
|
||||
return None, None
|
||||
|
||||
try:
|
||||
client = AllDebridClient(api_key)
|
||||
except Exception as exc:
|
||||
log(f"Failed to initialize AllDebrid client: {exc}", file=sys.stderr)
|
||||
return None, None
|
||||
|
||||
try:
|
||||
magnet_info = client.magnet_add(magnet_spec)
|
||||
magnet_id = int(magnet_info.get("id", 0))
|
||||
if magnet_id <= 0:
|
||||
log(f"AllDebrid magnet submission failed: {magnet_info}", file=sys.stderr)
|
||||
return None, None
|
||||
except Exception as exc:
|
||||
log(f"Failed to submit magnet to AllDebrid: {exc}", file=sys.stderr)
|
||||
return None, None
|
||||
|
||||
_dispatch_alldebrid_magnet_search(magnet_id, config)
|
||||
return client, magnet_id
|
||||
|
||||
|
||||
def _flatten_files_with_relpath(items: Any) -> Iterable[Dict[str, Any]]:
|
||||
for node in AllDebrid._flatten_files(items):
|
||||
enriched = dict(node)
|
||||
rel = node.get("_relpath") or node.get("relpath")
|
||||
if not rel:
|
||||
name = node.get("n") or node.get("name")
|
||||
rel = str(name or "").strip()
|
||||
enriched["relpath"] = rel
|
||||
yield enriched
|
||||
|
||||
|
||||
def download_magnet(
|
||||
magnet_spec: str,
|
||||
original_url: str,
|
||||
final_output_dir: Path,
|
||||
config: Dict[str, Any],
|
||||
progress: Any,
|
||||
quiet_mode: bool,
|
||||
path_from_result: Callable[[Any], Path],
|
||||
on_emit: Callable[[Path, str, str, Dict[str, Any]], None],
|
||||
) -> tuple[int, Optional[int]]:
|
||||
client, magnet_id = prepare_magnet(magnet_spec, config)
|
||||
if client is None or magnet_id is None:
|
||||
return 0, None
|
||||
|
||||
wait_timeout = 300
|
||||
try:
|
||||
streaming_config = config.get("streaming", {}) if isinstance(config, dict) else {}
|
||||
wait_timeout = int(streaming_config.get("wait_timeout", 300))
|
||||
except Exception:
|
||||
wait_timeout = 300
|
||||
|
||||
elapsed = 0
|
||||
while elapsed < wait_timeout:
|
||||
try:
|
||||
status = client.magnet_status(magnet_id)
|
||||
except Exception as exc:
|
||||
log(f"Failed to read magnet status {magnet_id}: {exc}", file=sys.stderr)
|
||||
return 0, magnet_id
|
||||
ready = bool(status.get("ready")) or status.get("statusCode") == 4
|
||||
if ready:
|
||||
break
|
||||
time.sleep(5)
|
||||
elapsed += 5
|
||||
else:
|
||||
log(f"AllDebrid magnet {magnet_id} timed out after {wait_timeout}s", file=sys.stderr)
|
||||
return 0, magnet_id
|
||||
|
||||
try:
|
||||
files_result = client.magnet_links([magnet_id])
|
||||
except Exception as exc:
|
||||
log(f"Failed to list AllDebrid magnet files: {exc}", file=sys.stderr)
|
||||
return 0, magnet_id
|
||||
|
||||
magnet_files = files_result.get(str(magnet_id), {}) if isinstance(files_result, dict) else {}
|
||||
file_nodes = magnet_files.get("files") if isinstance(magnet_files, dict) else []
|
||||
if not file_nodes:
|
||||
log(f"AllDebrid magnet {magnet_id} produced no files", file=sys.stderr)
|
||||
return 0, magnet_id
|
||||
|
||||
downloaded = 0
|
||||
for node in _flatten_files_with_relpath(file_nodes):
|
||||
file_url = str(node.get("link") or "").strip()
|
||||
file_name = str(node.get("name") or "").strip()
|
||||
relpath = str(node.get("relpath") or file_name).strip()
|
||||
if not file_url or not relpath:
|
||||
continue
|
||||
|
||||
target_path = final_output_dir
|
||||
rel_path_obj = Path(relpath)
|
||||
output_dir = target_path
|
||||
if rel_path_obj.parent:
|
||||
output_dir = target_path / rel_path_obj.parent
|
||||
try:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
output_dir = target_path
|
||||
|
||||
try:
|
||||
result_obj = _download_direct_file(
|
||||
file_url,
|
||||
output_dir,
|
||||
quiet=quiet_mode,
|
||||
suggested_filename=rel_path_obj.name,
|
||||
pipeline_progress=progress,
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Failed to download AllDebrid file {file_url}: {exc}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
downloaded_path = path_from_result(result_obj)
|
||||
metadata = {
|
||||
"magnet_id": magnet_id,
|
||||
"relpath": relpath,
|
||||
"name": file_name,
|
||||
}
|
||||
on_emit(downloaded_path, file_url or original_url, relpath, metadata)
|
||||
downloaded += 1
|
||||
|
||||
return downloaded, magnet_id
|
||||
|
||||
|
||||
def expand_folder_item(
|
||||
item: Any,
|
||||
get_search_provider: Optional[Callable[[str, Dict[str, Any]], Any]],
|
||||
config: Dict[str, Any],
|
||||
) -> Tuple[List[Any], Optional[str]]:
|
||||
table = getattr(item, "table", None) if not isinstance(item, dict) else item.get("table")
|
||||
media_kind = getattr(item, "media_kind", None) if not isinstance(item, dict) else item.get("media_kind")
|
||||
full_metadata = getattr(item, "full_metadata", None) if not isinstance(item, dict) else item.get("full_metadata")
|
||||
target = None
|
||||
if isinstance(item, dict):
|
||||
target = item.get("path") or item.get("url")
|
||||
else:
|
||||
target = getattr(item, "path", None) or getattr(item, "url", None)
|
||||
|
||||
if (str(table or "").lower() != "alldebrid") or (str(media_kind or "").lower() != "folder"):
|
||||
return [], None
|
||||
|
||||
magnet_id = None
|
||||
if isinstance(full_metadata, dict):
|
||||
magnet_id = full_metadata.get("magnet_id")
|
||||
if magnet_id is None and isinstance(target, str) and target.lower().startswith("alldebrid:magnet:"):
|
||||
try:
|
||||
magnet_id = int(target.split(":")[-1])
|
||||
except Exception:
|
||||
magnet_id = None
|
||||
|
||||
if magnet_id is None or get_search_provider is None:
|
||||
return [], None
|
||||
|
||||
provider = get_search_provider("alldebrid", config) if get_search_provider else None
|
||||
if provider is None:
|
||||
return [], None
|
||||
|
||||
try:
|
||||
files = provider.search("*", limit=10_000, filters={"view": "files", "magnet_id": int(magnet_id)})
|
||||
except Exception:
|
||||
files = []
|
||||
|
||||
if files and len(files) == 1 and getattr(files[0], "media_kind", "") == "folder":
|
||||
detail = getattr(files[0], "detail", "")
|
||||
return [], str(detail or "unknown")
|
||||
|
||||
expanded: List[Any] = []
|
||||
for sr in files:
|
||||
expanded.append(sr.to_dict() if hasattr(sr, "to_dict") else sr)
|
||||
return expanded, None
|
||||
|
||||
|
||||
def adjust_output_dir_for_alldebrid(
|
||||
base_output_dir: Path,
|
||||
full_metadata: Optional[Dict[str, Any]],
|
||||
item: Any,
|
||||
) -> Path:
|
||||
from ProviderCore.download import sanitize_filename as _sf
|
||||
|
||||
output_dir = base_output_dir
|
||||
md = full_metadata if isinstance(full_metadata, dict) else {}
|
||||
magnet_name = md.get("magnet_name") or md.get("folder")
|
||||
if not magnet_name:
|
||||
try:
|
||||
detail_val = getattr(item, "detail", None) if not isinstance(item, dict) else item.get("detail")
|
||||
magnet_name = str(detail_val or "").strip() or None
|
||||
except Exception:
|
||||
magnet_name = None
|
||||
|
||||
magnet_dir_name = _sf(str(magnet_name)) if magnet_name else ""
|
||||
try:
|
||||
base_tail = str(Path(output_dir).name or "")
|
||||
except Exception:
|
||||
base_tail = ""
|
||||
base_tail_norm = _sf(base_tail).lower() if base_tail.strip() else ""
|
||||
magnet_dir_norm = magnet_dir_name.lower() if magnet_dir_name else ""
|
||||
|
||||
if magnet_dir_name and (not base_tail_norm or base_tail_norm != magnet_dir_norm):
|
||||
output_dir = Path(output_dir) / magnet_dir_name
|
||||
|
||||
relpath = md.get("relpath") if isinstance(md, dict) else None
|
||||
if (not relpath) and isinstance(md.get("file"), dict):
|
||||
relpath = md["file"].get("_relpath")
|
||||
|
||||
if relpath:
|
||||
parts = [p for p in str(relpath).replace("\\", "/").split("/") if p and p not in {".", ".."}]
|
||||
if magnet_dir_name and parts:
|
||||
try:
|
||||
if _sf(parts[0]).lower() == magnet_dir_norm:
|
||||
parts = parts[1:]
|
||||
except Exception:
|
||||
pass
|
||||
for part in parts[:-1]:
|
||||
output_dir = Path(output_dir) / _sf(part)
|
||||
|
||||
try:
|
||||
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
output_dir = base_output_dir
|
||||
|
||||
return output_dir
|
||||
|
||||
|
||||
class AllDebrid(Provider):
|
||||
# Magnet URIs should be routed through this provider.
|
||||
URL = ("magnet:",)
|
||||
"""Search provider for AllDebrid account content.
|
||||
|
||||
This provider lists and searches the files/magnets already present in the
|
||||
@@ -311,7 +666,10 @@ class AllDebrid(Provider):
|
||||
],
|
||||
full_metadata={
|
||||
"magnet": magnet_status,
|
||||
"magnet_id": magnet_id
|
||||
"magnet_id": magnet_id,
|
||||
"provider": "alldebrid",
|
||||
"provider_view": "files",
|
||||
"magnet_name": magnet_name,
|
||||
},
|
||||
)
|
||||
]
|
||||
@@ -382,6 +740,8 @@ class AllDebrid(Provider):
|
||||
"magnet_name": magnet_name,
|
||||
"relpath": relpath,
|
||||
"file": file_node,
|
||||
"provider": "alldebrid",
|
||||
"provider_view": "files",
|
||||
},
|
||||
)
|
||||
)
|
||||
@@ -465,7 +825,10 @@ class AllDebrid(Provider):
|
||||
],
|
||||
full_metadata={
|
||||
"magnet": magnet,
|
||||
"magnet_id": magnet_id
|
||||
"magnet_id": magnet_id,
|
||||
"provider": "alldebrid",
|
||||
"provider_view": "folders",
|
||||
"magnet_name": magnet_name,
|
||||
},
|
||||
)
|
||||
)
|
||||
@@ -474,3 +837,128 @@ class AllDebrid(Provider):
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
def selector(
|
||||
self,
|
||||
selected_items: List[Any],
|
||||
*,
|
||||
ctx: Any,
|
||||
stage_is_last: bool = True,
|
||||
**_kwargs: Any,
|
||||
) -> bool:
|
||||
"""Handle AllDebrid `@N` selection by drilling into magnet files."""
|
||||
if not stage_is_last:
|
||||
return False
|
||||
|
||||
def _as_payload(item: Any) -> Dict[str, Any]:
|
||||
if isinstance(item, dict):
|
||||
return dict(item)
|
||||
try:
|
||||
if hasattr(item, "to_dict"):
|
||||
maybe = item.to_dict() # type: ignore[attr-defined]
|
||||
if isinstance(maybe, dict):
|
||||
return maybe
|
||||
except Exception:
|
||||
pass
|
||||
payload: Dict[str, Any] = {}
|
||||
try:
|
||||
payload = {
|
||||
"title": getattr(item, "title", None),
|
||||
"path": getattr(item, "path", None),
|
||||
"table": getattr(item, "table", None),
|
||||
"annotations": getattr(item, "annotations", None),
|
||||
"media_kind": getattr(item, "media_kind", None),
|
||||
"full_metadata": getattr(item, "full_metadata", None),
|
||||
}
|
||||
except Exception:
|
||||
payload = {}
|
||||
return payload
|
||||
|
||||
chosen: List[Dict[str, Any]] = []
|
||||
for item in selected_items or []:
|
||||
payload = _as_payload(item)
|
||||
meta = payload.get("full_metadata") or payload.get("metadata") or {}
|
||||
if not isinstance(meta, dict):
|
||||
meta = {}
|
||||
|
||||
ann_set: set[str] = set()
|
||||
for ann_source in (payload.get("annotations"), meta.get("annotations")):
|
||||
if isinstance(ann_source, (list, tuple, set)):
|
||||
for ann in ann_source:
|
||||
ann_text = str(ann or "").strip().lower()
|
||||
if ann_text:
|
||||
ann_set.add(ann_text)
|
||||
|
||||
media_kind = str(payload.get("media_kind") or meta.get("media_kind") or "").strip().lower()
|
||||
is_folder = (media_kind == "folder") or ("folder" in ann_set)
|
||||
magnet_id = meta.get("magnet_id")
|
||||
if magnet_id is None or (not is_folder):
|
||||
continue
|
||||
|
||||
title = str(payload.get("title") or meta.get("magnet_name") or meta.get("name") or "").strip()
|
||||
if not title:
|
||||
title = f"magnet-{magnet_id}"
|
||||
|
||||
chosen.append({
|
||||
"magnet_id": magnet_id,
|
||||
"title": title,
|
||||
})
|
||||
|
||||
if not chosen:
|
||||
return False
|
||||
|
||||
target = chosen[0]
|
||||
magnet_id = target.get("magnet_id")
|
||||
title = target.get("title") or f"magnet-{magnet_id}"
|
||||
|
||||
try:
|
||||
files = self.search("*", limit=200, filters={"view": "files", "magnet_id": magnet_id})
|
||||
except Exception as exc:
|
||||
print(f"alldebrid selector failed: {exc}\n")
|
||||
return True
|
||||
|
||||
try:
|
||||
from SYS.result_table import ResultTable
|
||||
from SYS.rich_display import stdout_console
|
||||
except Exception:
|
||||
return True
|
||||
|
||||
table = ResultTable(f"AllDebrid Files: {title}").set_preserve_order(True)
|
||||
table.set_table("alldebrid")
|
||||
try:
|
||||
table.set_table_metadata({"provider": "alldebrid", "view": "files", "magnet_id": magnet_id})
|
||||
except Exception:
|
||||
pass
|
||||
table.set_source_command(
|
||||
"search-file",
|
||||
["-provider", "alldebrid", "-open", str(magnet_id), "-query", "*"],
|
||||
)
|
||||
|
||||
results_payload: List[Dict[str, Any]] = []
|
||||
for r in files or []:
|
||||
table.add_result(r)
|
||||
try:
|
||||
results_payload.append(r.to_dict())
|
||||
except Exception:
|
||||
results_payload.append(
|
||||
{
|
||||
"table": getattr(r, "table", "alldebrid"),
|
||||
"title": getattr(r, "title", ""),
|
||||
"path": getattr(r, "path", ""),
|
||||
"full_metadata": getattr(r, "full_metadata", None),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
ctx.set_last_result_table(table, results_payload)
|
||||
ctx.set_current_stage_table(table)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
stdout_console().print()
|
||||
stdout_console().print(table)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
@@ -13,6 +13,156 @@ from ProviderCore.base import Provider, SearchResult
|
||||
from ProviderCore.download import sanitize_filename
|
||||
from SYS.logger import log
|
||||
|
||||
# Helper for download-file: render selectable formats for a details URL.
|
||||
def maybe_show_formats_table(
|
||||
*,
|
||||
raw_urls: Any,
|
||||
piped_items: Any,
|
||||
parsed: Dict[str, Any],
|
||||
config: Dict[str, Any],
|
||||
quiet_mode: bool,
|
||||
get_field: Any,
|
||||
) -> Optional[int]:
|
||||
"""If input is a single Internet Archive details URL, render a formats table.
|
||||
|
||||
Returns an exit code when handled; otherwise None.
|
||||
"""
|
||||
if quiet_mode:
|
||||
return None
|
||||
|
||||
try:
|
||||
total_inputs = int(len(raw_urls or []) + len(piped_items or []))
|
||||
except Exception:
|
||||
total_inputs = 0
|
||||
|
||||
if total_inputs != 1:
|
||||
return None
|
||||
|
||||
item = piped_items[0] if piped_items else None
|
||||
target = ""
|
||||
if item is not None:
|
||||
try:
|
||||
target = str(get_field(item,
|
||||
"path") or get_field(item,
|
||||
"url") or "").strip()
|
||||
except Exception:
|
||||
target = ""
|
||||
if not target and raw_urls:
|
||||
target = str(raw_urls[0]).strip()
|
||||
if not target:
|
||||
return None
|
||||
|
||||
identifier = ""
|
||||
try:
|
||||
md = get_field(item, "full_metadata") if item is not None else None
|
||||
if isinstance(md, dict):
|
||||
identifier = str(md.get("identifier") or "").strip()
|
||||
except Exception:
|
||||
identifier = ""
|
||||
if not identifier:
|
||||
try:
|
||||
identifier = str(extract_identifier(target) or "").strip()
|
||||
except Exception:
|
||||
identifier = ""
|
||||
if not identifier:
|
||||
return None
|
||||
|
||||
# Only show picker for item pages (details); direct download URLs should download immediately.
|
||||
try:
|
||||
if not is_details_url(target):
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
try:
|
||||
files = list_download_files(identifier)
|
||||
except Exception as exc:
|
||||
log(f"download-file: Internet Archive lookup failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not files:
|
||||
log("download-file: Internet Archive item has no downloadable files", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
title = ""
|
||||
try:
|
||||
title = str(get_field(item, "title") or "").strip() if item is not None else ""
|
||||
except Exception:
|
||||
title = ""
|
||||
table_title = (
|
||||
f"Internet Archive: {title}".strip().rstrip(":")
|
||||
if title else f"Internet Archive: {identifier}"
|
||||
)
|
||||
|
||||
try:
|
||||
from SYS.result_table import ResultTable
|
||||
from SYS import pipeline as pipeline_context
|
||||
except Exception as exc:
|
||||
log(f"download-file: ResultTable unavailable: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
base_args: List[str] = []
|
||||
out_arg = parsed.get("path") or parsed.get("output")
|
||||
if out_arg:
|
||||
base_args.extend(["-path", str(out_arg)])
|
||||
|
||||
table = ResultTable(table_title).set_preserve_order(True)
|
||||
table.set_table("internetarchive.formats")
|
||||
table.set_source_command("download-file", base_args)
|
||||
|
||||
rows: List[Dict[str, Any]] = []
|
||||
for f in files:
|
||||
name = str(f.get("name") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
fmt = str(f.get("format") or "").strip()
|
||||
src = str(f.get("source") or "").strip()
|
||||
direct_url = str(f.get("direct_url") or "").strip()
|
||||
if not direct_url:
|
||||
continue
|
||||
|
||||
size_val: Any = f.get("size")
|
||||
try:
|
||||
size_val = int(size_val) if size_val not in (None, "") else ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
row_item: Dict[str, Any] = {
|
||||
"table": "internetarchive",
|
||||
"title": fmt or name,
|
||||
"path": direct_url,
|
||||
"url": direct_url,
|
||||
"columns": [
|
||||
("Format", fmt),
|
||||
("Name", name),
|
||||
("Size", size_val),
|
||||
("Source", src),
|
||||
],
|
||||
"_selection_args": [direct_url],
|
||||
"full_metadata": {
|
||||
"identifier": identifier,
|
||||
"name": name,
|
||||
"format": fmt,
|
||||
"source": src,
|
||||
"size": f.get("size"),
|
||||
},
|
||||
}
|
||||
rows.append(row_item)
|
||||
table.add_result(row_item)
|
||||
|
||||
if not rows:
|
||||
log("download-file: no downloadable files found for this item", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
pipeline_context.set_last_result_table(table, rows, subject=item)
|
||||
pipeline_context.set_current_stage_table(table)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log("Internet Archive item detected: select a file with @N to download", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
|
||||
def _ia() -> Any:
|
||||
try:
|
||||
@@ -322,6 +472,7 @@ class InternetArchive(Provider):
|
||||
collection="..." # optional (upload)
|
||||
mediatype="..." # optional (upload)
|
||||
"""
|
||||
URL = ("archive.org",)
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(config)
|
||||
|
||||
@@ -665,6 +665,7 @@ class Libgen(Provider):
|
||||
"libgen.rs",
|
||||
"libgen.st",
|
||||
)
|
||||
URL = URL_DOMAINS
|
||||
"""Search provider for Library Genesis books."""
|
||||
|
||||
def search(
|
||||
|
||||
@@ -15,6 +15,7 @@ class LOC(Provider):
|
||||
"""
|
||||
|
||||
URL_DOMAINS = ["www.loc.gov"]
|
||||
URL = URL_DOMAINS
|
||||
|
||||
def validate(self) -> bool:
|
||||
return True
|
||||
|
||||
@@ -229,6 +229,30 @@ def _archive_id_from_url(url: str) -> str:
|
||||
"advancedsearch.php"}:
|
||||
return first
|
||||
|
||||
|
||||
def edition_id_from_url(u: str) -> str:
|
||||
"""Extract an OpenLibrary edition id (OL...M) from a book URL."""
|
||||
try:
|
||||
p = urlparse(str(u))
|
||||
parts = [x for x in (p.path or "").split("/") if x]
|
||||
except Exception:
|
||||
parts = []
|
||||
if len(parts) >= 2 and str(parts[0]).lower() == "books":
|
||||
return str(parts[1]).strip()
|
||||
return ""
|
||||
|
||||
|
||||
def title_hint_from_url_slug(u: str) -> str:
|
||||
"""Derive a human-friendly title hint from the URL slug."""
|
||||
try:
|
||||
p = urlparse(str(u))
|
||||
parts = [x for x in (p.path or "").split("/") if x]
|
||||
slug = parts[-1] if parts else ""
|
||||
except Exception:
|
||||
slug = ""
|
||||
slug = (slug or "").strip().replace("_", " ")
|
||||
return slug or "OpenLibrary"
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
@@ -415,6 +439,7 @@ class OpenLibrary(Provider):
|
||||
"openlibrary.org",
|
||||
"archive.org",
|
||||
)
|
||||
URL = URL_DOMAINS
|
||||
"""Search provider for OpenLibrary books + Archive.org direct/borrow download."""
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
@@ -1419,6 +1444,64 @@ class OpenLibrary(Provider):
|
||||
log("[openlibrary] Direct download failed", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# --- Convenience helpers for URL-driven downloads (used by download-file) ---
|
||||
|
||||
def search_result_from_url(self, url: str) -> Optional[SearchResult]:
|
||||
"""Build a minimal SearchResult from a bare OpenLibrary URL."""
|
||||
edition_id = edition_id_from_url(url)
|
||||
title_hint = title_hint_from_url_slug(url)
|
||||
return SearchResult(
|
||||
table="openlibrary",
|
||||
title=title_hint,
|
||||
path=str(url),
|
||||
media_kind="book",
|
||||
full_metadata={"openlibrary_id": edition_id} if edition_id else {},
|
||||
)
|
||||
|
||||
def download_url(
|
||||
self,
|
||||
url: str,
|
||||
output_dir: Path,
|
||||
progress_callback: Optional[Callable[[str, int, Optional[int], str], None]] = None,
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Download a book directly from an OpenLibrary URL.
|
||||
|
||||
Returns a dict with the downloaded path and SearchResult when successful.
|
||||
"""
|
||||
sr = self.search_result_from_url(url)
|
||||
if sr is None:
|
||||
return None
|
||||
|
||||
downloaded = self.download(sr, output_dir, progress_callback)
|
||||
if not downloaded:
|
||||
return None
|
||||
|
||||
return {
|
||||
"path": Path(downloaded),
|
||||
"search_result": sr,
|
||||
}
|
||||
try:
|
||||
if progress_callback is not None:
|
||||
progress_callback("step", 0, None, "direct download")
|
||||
except Exception:
|
||||
pass
|
||||
out_path = unique_path(output_dir / f"{safe_title}.pdf")
|
||||
ok = download_file(
|
||||
pdf_url,
|
||||
out_path,
|
||||
session=self._session,
|
||||
progress_callback=(
|
||||
(
|
||||
lambda downloaded, total, label:
|
||||
progress_callback("bytes", downloaded, total, label)
|
||||
) if progress_callback is not None else None
|
||||
),
|
||||
)
|
||||
if ok:
|
||||
return out_path
|
||||
log("[openlibrary] Direct download failed", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# 2) Borrow flow (credentials required).
|
||||
try:
|
||||
email, password = self._credential_archive(self.config or {})
|
||||
|
||||
@@ -145,7 +145,9 @@ class Telegram(Provider):
|
||||
[provider=telegram]
|
||||
app_id=
|
||||
api_hash=
|
||||
bot_token=
|
||||
"""
|
||||
URL = ("t.me", "telegram.me")
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(config)
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -53,6 +53,8 @@ class Provider(ABC):
|
||||
- validate()
|
||||
"""
|
||||
|
||||
URL: Sequence[str] = ()
|
||||
|
||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||
self.config = config or {}
|
||||
self.name = self.__class__.__name__.lower()
|
||||
@@ -107,6 +109,30 @@ class Provider(ABC):
|
||||
_ = stage_is_last
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def url_patterns(cls) -> Tuple[str, ...]:
|
||||
"""Return normalized URL patterns that this provider handles."""
|
||||
patterns: List[str] = []
|
||||
maybe_urls = getattr(cls, "URL", None)
|
||||
if isinstance(maybe_urls, (list, tuple)):
|
||||
for entry in maybe_urls:
|
||||
try:
|
||||
candidate = str(entry or "").strip().lower()
|
||||
except Exception:
|
||||
continue
|
||||
if candidate:
|
||||
patterns.append(candidate)
|
||||
maybe_domains = getattr(cls, "URL_DOMAINS", None)
|
||||
if isinstance(maybe_domains, (list, tuple)):
|
||||
for entry in maybe_domains:
|
||||
try:
|
||||
candidate = str(entry or "").strip().lower()
|
||||
except Exception:
|
||||
continue
|
||||
if candidate and candidate not in patterns:
|
||||
patterns.append(candidate)
|
||||
return tuple(patterns)
|
||||
|
||||
|
||||
class SearchProvider(Provider):
|
||||
"""Compatibility alias for older code.
|
||||
|
||||
@@ -68,6 +68,13 @@ def _supports_upload(provider: Provider) -> bool:
|
||||
return provider.__class__.upload is not Provider.upload
|
||||
|
||||
|
||||
def _provider_url_patterns(provider_class: Type[Provider]) -> Sequence[str]:
|
||||
try:
|
||||
return list(provider_class.url_patterns())
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def get_provider(name: str,
|
||||
config: Optional[Dict[str,
|
||||
Any]] = None) -> Optional[Provider]:
|
||||
@@ -166,26 +173,26 @@ def list_file_providers(config: Optional[Dict[str, Any]] = None) -> Dict[str, bo
|
||||
def match_provider_name_for_url(url: str) -> Optional[str]:
|
||||
"""Return a registered provider name that claims the URL's domain.
|
||||
|
||||
Providers can declare domains via a class attribute `URL_DOMAINS` (sequence of strings).
|
||||
Providers can declare domains via class attribute `URL` (preferred) or `URL_DOMAINS`.
|
||||
This matcher is intentionally cheap (no provider instantiation, no network).
|
||||
"""
|
||||
|
||||
raw_url = str(url or "").strip()
|
||||
raw_url_lower = raw_url.lower()
|
||||
try:
|
||||
parsed = urlparse(str(url))
|
||||
parsed = urlparse(raw_url)
|
||||
host = (parsed.hostname or "").strip().lower()
|
||||
path = (parsed.path or "").strip()
|
||||
except Exception:
|
||||
host = ""
|
||||
path = ""
|
||||
|
||||
if not host:
|
||||
return None
|
||||
|
||||
# Prefer Internet Archive for archive.org links unless the URL clearly refers
|
||||
# to a borrow/loan flow (handled by OpenLibrary provider).
|
||||
#
|
||||
# This keeps direct downloads and item pages routed to `internetarchive`, while
|
||||
# preserving OpenLibrary's scripted borrow pipeline for loan/reader URLs.
|
||||
if host:
|
||||
if host == "openlibrary.org" or host.endswith(".openlibrary.org"):
|
||||
return "openlibrary" if "openlibrary" in _PROVIDERS else None
|
||||
|
||||
@@ -200,13 +207,19 @@ def match_provider_name_for_url(url: str) -> Optional[str]:
|
||||
return "internetarchive" if "internetarchive" in _PROVIDERS else None
|
||||
|
||||
for name, provider_class in _PROVIDERS.items():
|
||||
domains = getattr(provider_class, "URL_DOMAINS", None)
|
||||
if not isinstance(domains, (list, tuple)):
|
||||
domains = _provider_url_patterns(provider_class)
|
||||
if not domains:
|
||||
continue
|
||||
for d in domains:
|
||||
dom = str(d or "").strip().lower()
|
||||
if not dom:
|
||||
continue
|
||||
if raw_url_lower.startswith(dom):
|
||||
return name
|
||||
for d in domains:
|
||||
dom = str(d or "").strip().lower()
|
||||
if not dom or not host:
|
||||
continue
|
||||
if host == dom or host.endswith("." + dom):
|
||||
return name
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from importlib import import_module
|
||||
from types import ModuleType
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
try:
|
||||
@@ -21,22 +22,36 @@ def _should_hide_db_args(config: Optional[Dict[str, Any]]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
from cmdlet import REGISTRY
|
||||
except Exception:
|
||||
REGISTRY = {} # type: ignore
|
||||
_cmdlet_pkg: ModuleType | None = None
|
||||
|
||||
|
||||
def _get_cmdlet_package() -> Optional[ModuleType]:
|
||||
global _cmdlet_pkg
|
||||
if _cmdlet_pkg is not None:
|
||||
return _cmdlet_pkg
|
||||
try:
|
||||
from cmdnat import register_native_commands as _register_native_commands
|
||||
_cmdlet_pkg = import_module("cmdlet")
|
||||
except Exception:
|
||||
_register_native_commands = None
|
||||
_cmdlet_pkg = None
|
||||
return _cmdlet_pkg
|
||||
|
||||
|
||||
def _get_registry() -> Dict[str, Any]:
|
||||
pkg = _get_cmdlet_package()
|
||||
if pkg is None:
|
||||
return {}
|
||||
return getattr(pkg, "REGISTRY", {}) or {}
|
||||
|
||||
|
||||
def ensure_registry_loaded() -> None:
|
||||
"""Ensure native commands are registered into REGISTRY (idempotent)."""
|
||||
if _register_native_commands and REGISTRY is not None:
|
||||
pkg = _get_cmdlet_package()
|
||||
if pkg is None:
|
||||
return
|
||||
ensure_fn = getattr(pkg, "ensure_cmdlet_modules_loaded", None)
|
||||
if callable(ensure_fn):
|
||||
try:
|
||||
_register_native_commands(REGISTRY)
|
||||
ensure_fn()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -105,7 +120,8 @@ def get_cmdlet_metadata(
|
||||
|
||||
if data is None:
|
||||
try:
|
||||
reg_fn = (REGISTRY or {}).get(cmd_name.replace("_", "-").lower())
|
||||
registry = _get_registry()
|
||||
reg_fn = registry.get(cmd_name.replace("_", "-").lower())
|
||||
if reg_fn:
|
||||
owner_mod = getattr(reg_fn, "__module__", "")
|
||||
if owner_mod:
|
||||
@@ -150,7 +166,8 @@ def list_cmdlet_metadata(config: Optional[Dict[str, Any]] = None) -> Dict[str, D
|
||||
"""Collect metadata for all registered cmdlet keyed by canonical name."""
|
||||
ensure_registry_loaded()
|
||||
entries: Dict[str, Dict[str, Any]] = {}
|
||||
for reg_name in (REGISTRY or {}).keys():
|
||||
registry = _get_registry()
|
||||
for reg_name in registry.keys():
|
||||
meta = get_cmdlet_metadata(reg_name, config=config)
|
||||
canonical = str(reg_name).replace("_", "-").lower()
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class PipeObject:
|
||||
return
|
||||
|
||||
# Prefer a stable, human-friendly title:
|
||||
# "1 - download-media", "2 - download-media", ...
|
||||
# "1 - download-file", "2 - download-file", ...
|
||||
# The index is preserved when possible via `pipe_index` in the PipeObject's extra.
|
||||
idx = None
|
||||
try:
|
||||
@@ -875,7 +875,7 @@ class PipelineLiveProgress:
|
||||
|
||||
# IMPORTANT: use the shared stderr Console instance so that any
|
||||
# `stderr_console().print(...)` calls from inside cmdlets (e.g. preflight
|
||||
# tables/prompts in download-media) cooperate with Rich Live rendering.
|
||||
# tables/prompts in download-file) cooperate with Rich Live rendering.
|
||||
# If we create a separate Console(file=sys.stderr), output will fight for
|
||||
# terminal cursor control and appear "blocked"/truncated.
|
||||
from SYS.rich_display import stderr_console
|
||||
|
||||
@@ -361,6 +361,8 @@ class ResultRow:
|
||||
"""Arguments to use for this row when selected via @N syntax (e.g., ['-item', '3'])"""
|
||||
source_index: Optional[int] = None
|
||||
"""Original insertion order index (used to map sorted views back to source items)."""
|
||||
payload: Optional[Any] = None
|
||||
"""Original object that contributed to this row."""
|
||||
|
||||
def add_column(self, name: str, value: Any) -> None:
|
||||
"""Add a column to this row."""
|
||||
@@ -498,6 +500,9 @@ class ResultTable:
|
||||
self.table: Optional[str] = None
|
||||
"""Table type (e.g., 'youtube', 'soulseek') for context-aware selection logic."""
|
||||
|
||||
self.table_metadata: Dict[str, Any] = {}
|
||||
"""Optional provider/table metadata (e.g., provider name, view)."""
|
||||
|
||||
self.value_case: str = "lower"
|
||||
"""Display-only value casing: 'lower' (default), 'upper', or 'preserve'."""
|
||||
|
||||
@@ -525,6 +530,18 @@ class ResultTable:
|
||||
self.table = table
|
||||
return self
|
||||
|
||||
def set_table_metadata(self, metadata: Optional[Dict[str, Any]]) -> "ResultTable":
|
||||
"""Attach provider/table metadata for downstream selection logic."""
|
||||
self.table_metadata = dict(metadata or {})
|
||||
return self
|
||||
|
||||
def get_table_metadata(self) -> Dict[str, Any]:
|
||||
"""Return attached provider/table metadata (copy to avoid mutation)."""
|
||||
try:
|
||||
return dict(self.table_metadata)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
def set_no_choice(self, no_choice: bool = True) -> "ResultTable":
|
||||
"""Mark the table as non-interactive (no row numbers, no selection parsing)."""
|
||||
self.no_choice = bool(no_choice)
|
||||
@@ -612,6 +629,9 @@ class ResultTable:
|
||||
new_table.input_options = dict(self.input_options) if self.input_options else {}
|
||||
new_table.no_choice = self.no_choice
|
||||
new_table.table = self.table
|
||||
new_table.table_metadata = (
|
||||
dict(self.table_metadata) if getattr(self, "table_metadata", None) else {}
|
||||
)
|
||||
new_table.header_lines = list(self.header_lines) if self.header_lines else []
|
||||
return new_table
|
||||
|
||||
@@ -712,6 +732,7 @@ class ResultTable:
|
||||
Self for chaining
|
||||
"""
|
||||
row = self.add_row()
|
||||
row.payload = result
|
||||
|
||||
# Handle TagItem from get_tag.py (tag display with index)
|
||||
if hasattr(result, "__class__") and result.__class__.__name__ == "TagItem":
|
||||
@@ -738,6 +759,21 @@ class ResultTable:
|
||||
|
||||
return self
|
||||
|
||||
def get_row_payload(self, row_index: int) -> Optional[Any]:
|
||||
"""Return the original payload for the row at ``row_index`` if available."""
|
||||
if 0 <= row_index < len(self.rows):
|
||||
return getattr(self.rows[row_index], "payload", None)
|
||||
return None
|
||||
|
||||
def get_payloads(self) -> List[Any]:
|
||||
"""Return the payloads for every row, preserving table order."""
|
||||
payloads: List[Any] = []
|
||||
for row in self.rows:
|
||||
payload = getattr(row, "payload", None)
|
||||
if payload is not None:
|
||||
payloads.append(payload)
|
||||
return payloads
|
||||
|
||||
def _add_search_result(self, row: ResultRow, result: Any) -> None:
|
||||
"""Extract and add SearchResult fields to row."""
|
||||
# If provider supplied explicit columns, render those and skip legacy defaults
|
||||
|
||||
@@ -11,9 +11,11 @@ from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import sys
|
||||
from typing import Any, Iterator, TextIO
|
||||
from typing import Any, Iterator, Sequence, TextIO
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
|
||||
# Configure Rich pretty-printing to avoid truncating long strings (hashes/paths).
|
||||
# This is version-safe: older Rich versions may not support the max_* arguments.
|
||||
@@ -70,3 +72,33 @@ def capture_rich_output(*, stdout: TextIO, stderr: TextIO) -> Iterator[None]:
|
||||
finally:
|
||||
_STDOUT_CONSOLE = previous_stdout
|
||||
_STDERR_CONSOLE = previous_stderr
|
||||
|
||||
|
||||
def show_provider_config_panel(
|
||||
provider_name: str,
|
||||
keys: Sequence[str] | None = None,
|
||||
*,
|
||||
config_hint: str = "config.conf"
|
||||
) -> None:
|
||||
"""Show a Rich panel explaining how to configure a provider."""
|
||||
|
||||
normalized = str(provider_name or "").strip() or "provider"
|
||||
pre = Text("Add this to your config", style="bold")
|
||||
footer = Text(
|
||||
f"Place this block in {config_hint} or config.d/*.conf",
|
||||
style="dim"
|
||||
)
|
||||
body = Text()
|
||||
body.append(f"[provider={normalized}]\n", style="bold cyan")
|
||||
for key in keys or []:
|
||||
body.append(f'{key}=""\n', style="yellow")
|
||||
|
||||
stderr_console().print(pre)
|
||||
stderr_console().print(
|
||||
Panel(
|
||||
body,
|
||||
title=f"{normalized} configuration",
|
||||
expand=False
|
||||
)
|
||||
)
|
||||
stderr_console().print(footer)
|
||||
|
||||
@@ -1389,19 +1389,51 @@ class HydrusNetwork(Store):
|
||||
if not tags_to_add and not tags_to_remove:
|
||||
return True
|
||||
|
||||
service_key: Optional[str] = None
|
||||
try:
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
|
||||
service_key = hydrus_wrapper.get_tag_service_key(
|
||||
client, service_name
|
||||
)
|
||||
except Exception:
|
||||
service_key = None
|
||||
|
||||
mutate_success = False
|
||||
if service_key:
|
||||
try:
|
||||
client.mutate_tags_by_key(
|
||||
file_hash,
|
||||
service_key,
|
||||
add_tags=tags_to_add,
|
||||
remove_tags=tags_to_remove,
|
||||
)
|
||||
mutate_success = True
|
||||
except Exception as exc:
|
||||
debug(
|
||||
f"{self._log_prefix()} add_tag: mutate_tags_by_key failed: {exc}"
|
||||
)
|
||||
|
||||
did_any = False
|
||||
if not mutate_success:
|
||||
if tags_to_remove:
|
||||
try:
|
||||
client.delete_tag(file_hash, tags_to_remove, service_name)
|
||||
did_any = True
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} add_tag: delete_tag failed: {exc}")
|
||||
debug(
|
||||
f"{self._log_prefix()} add_tag: delete_tag failed: {exc}"
|
||||
)
|
||||
if tags_to_add:
|
||||
try:
|
||||
client.add_tag(file_hash, tags_to_add, service_name)
|
||||
did_any = True
|
||||
except Exception as exc:
|
||||
debug(f"{self._log_prefix()} add_tag: add_tag failed: {exc}")
|
||||
debug(
|
||||
f"{self._log_prefix()} add_tag: add_tag failed: {exc}"
|
||||
)
|
||||
else:
|
||||
did_any = bool(tags_to_add or tags_to_remove)
|
||||
|
||||
return did_any
|
||||
except Exception as exc:
|
||||
|
||||
265
TUI.py
265
TUI.py
@@ -6,7 +6,7 @@ import json
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional, Sequence, Tuple
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
from textual import on, work
|
||||
from textual.app import App, ComposeResult
|
||||
@@ -28,6 +28,7 @@ from textual.widgets import (
|
||||
)
|
||||
from textual.widgets.option_list import Option
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
REPO_ROOT = BASE_DIR
|
||||
TUI_DIR = REPO_ROOT / "TUI"
|
||||
@@ -37,7 +38,7 @@ for path in (REPO_ROOT, TUI_DIR):
|
||||
sys.path.insert(0, str_path)
|
||||
|
||||
from TUI.pipeline_runner import PipelineRunResult # type: ignore # noqa: E402
|
||||
from SYS.result_table import ResultTable # type: ignore # noqa: E402
|
||||
from SYS.result_table import ResultTable, extract_hash_value, extract_store_value # type: ignore # noqa: E402
|
||||
|
||||
from SYS.config import load_config # type: ignore # noqa: E402
|
||||
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402
|
||||
@@ -68,14 +69,23 @@ def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
|
||||
try:
|
||||
if hasattr(obj, "tag_name"):
|
||||
val = getattr(obj, "tag_name")
|
||||
if val:
|
||||
tags.append(str(val))
|
||||
if val and isinstance(val, str):
|
||||
tags.append(val)
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(obj, dict):
|
||||
for k in ("tag_name", "tag", "name", "value"):
|
||||
# Prefer explicit tag lists
|
||||
tag_list = obj.get("tag")
|
||||
if isinstance(tag_list, (list, tuple)):
|
||||
for t in tag_list:
|
||||
if isinstance(t, str) and t.strip():
|
||||
tags.append(t.strip())
|
||||
if tag_list:
|
||||
continue
|
||||
# Fall back to individual tag_name/value/name strings
|
||||
for k in ("tag_name", "value", "name"):
|
||||
v = obj.get(k)
|
||||
if isinstance(v, str) and v.strip():
|
||||
tags.append(v.strip())
|
||||
@@ -84,6 +94,30 @@ def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
|
||||
return _dedup_preserve_order(tags)
|
||||
|
||||
|
||||
def _extract_tag_names_from_table(table: Any) -> List[str]:
|
||||
if not table:
|
||||
return []
|
||||
sources: List[Any] = []
|
||||
get_payloads = getattr(table, "get_payloads", None)
|
||||
if callable(get_payloads):
|
||||
try:
|
||||
payloads = get_payloads()
|
||||
if payloads:
|
||||
sources.extend(payloads)
|
||||
except Exception:
|
||||
pass
|
||||
rows = getattr(table, "rows", []) or []
|
||||
for row in rows:
|
||||
for col in getattr(row, "columns", []) or []:
|
||||
if str(getattr(col, "name", "") or "").strip().lower() == "tag":
|
||||
val = getattr(col, "value", None)
|
||||
if val:
|
||||
sources.append({"tag_name": val})
|
||||
if not sources:
|
||||
return []
|
||||
return _extract_tag_names(sources)
|
||||
|
||||
|
||||
class TextPopup(ModalScreen[None]):
|
||||
|
||||
def __init__(self, *, title: str, text: str) -> None:
|
||||
@@ -139,11 +173,15 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
@work(thread=True)
|
||||
def _load_tags_background(self) -> None:
|
||||
app = self.app # PipelineHubApp
|
||||
tags = self._fetch_tags_from_store()
|
||||
if not tags:
|
||||
try:
|
||||
runner: PipelineRunner = getattr(app, "executor")
|
||||
cmd = f"@1 | get-tag -emit"
|
||||
cmd = "@1 | get-tag"
|
||||
res = runner.run_pipeline(cmd, seeds=self._seeds, isolate=True)
|
||||
tags = _extract_tag_names(res.emitted)
|
||||
tags = _extract_tag_names_from_table(getattr(res, "result_table", None))
|
||||
if not tags:
|
||||
tags = _extract_tag_names(getattr(res, "emitted", []))
|
||||
except Exception as exc:
|
||||
tags = []
|
||||
try:
|
||||
@@ -164,6 +202,42 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
self._editor.text = "\n".join(tags)
|
||||
self._set_status(f"Loaded {len(tags)} tag(s)")
|
||||
|
||||
def _fetch_tags_from_store(self) -> Optional[List[str]]:
|
||||
if not self._store or not self._hash:
|
||||
return None
|
||||
try:
|
||||
cfg = load_config() or {}
|
||||
except Exception:
|
||||
cfg = {}
|
||||
store_key = str(self._store or "").strip()
|
||||
hash_value = str(self._hash or "").strip().lower()
|
||||
if not store_key or not hash_value:
|
||||
return None
|
||||
try:
|
||||
registry = StoreRegistry(config=cfg, suppress_debug=True)
|
||||
except Exception:
|
||||
return []
|
||||
match = None
|
||||
normalized = store_key.lower()
|
||||
for name in registry.list_backends():
|
||||
if str(name or "").strip().lower() == normalized:
|
||||
match = name
|
||||
break
|
||||
if match is None:
|
||||
return None
|
||||
try:
|
||||
backend = registry[match]
|
||||
except KeyError:
|
||||
return None
|
||||
try:
|
||||
tags, _src = backend.get_tag(hash_value, config=cfg)
|
||||
if not tags:
|
||||
return []
|
||||
filtered = [str(t).strip() for t in tags if str(t).strip()]
|
||||
return _dedup_preserve_order(filtered)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _parse_editor_tags(self) -> List[str]:
|
||||
raw = ""
|
||||
try:
|
||||
@@ -207,6 +281,33 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
desired: List[str]
|
||||
) -> None:
|
||||
app = self.app # PipelineHubApp
|
||||
def _log_message(msg: str) -> None:
|
||||
if not msg:
|
||||
return
|
||||
try:
|
||||
app.call_from_thread(app._append_log_line, msg)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _log_pipeline_command(stage: str, cmd: str) -> None:
|
||||
if not cmd:
|
||||
return
|
||||
_log_message(f"tags-save: {stage}: {cmd}")
|
||||
|
||||
def _log_pipeline_result(stage: str, result: PipelineRunResult | None) -> None:
|
||||
if result is None:
|
||||
return
|
||||
status = "success" if getattr(result, "success", False) else "failed"
|
||||
_log_message(f"tags-save: {stage} result: {status}")
|
||||
error = str(getattr(result, "error", "") or "").strip()
|
||||
if error:
|
||||
_log_message(f"tags-save: {stage} error: {error}")
|
||||
for attr in ("stdout", "stderr"):
|
||||
raw = str(getattr(result, attr, "") or "").strip()
|
||||
if not raw:
|
||||
continue
|
||||
for line in raw.splitlines():
|
||||
_log_message(f"tags-save: {stage} {attr}: {line}")
|
||||
try:
|
||||
runner: PipelineRunner = getattr(app, "executor")
|
||||
store_tok = json.dumps(self._store)
|
||||
@@ -216,8 +317,10 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
|
||||
if to_del:
|
||||
del_args = " ".join(json.dumps(t) for t in to_del)
|
||||
del_cmd = f"@1 | delete-tag -store {store_tok}{query_chunk} {del_args}"
|
||||
del_cmd = f"delete-tag -store {store_tok}{query_chunk} {del_args}"
|
||||
_log_pipeline_command("delete-tag", del_cmd)
|
||||
del_res = runner.run_pipeline(del_cmd, seeds=self._seeds, isolate=True)
|
||||
_log_pipeline_result("delete-tag", del_res)
|
||||
if not getattr(del_res, "success", False):
|
||||
failures.append(
|
||||
str(
|
||||
@@ -231,8 +334,10 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
|
||||
if to_add:
|
||||
add_args = " ".join(json.dumps(t) for t in to_add)
|
||||
add_cmd = f"@1 | add-tag -store {store_tok}{query_chunk} {add_args}"
|
||||
add_cmd = f"add-tag -store {store_tok}{query_chunk} {add_args}"
|
||||
_log_pipeline_command("add-tag", add_cmd)
|
||||
add_res = runner.run_pipeline(add_cmd, seeds=self._seeds, isolate=True)
|
||||
_log_pipeline_result("add-tag", add_res)
|
||||
if not getattr(add_res, "success", False):
|
||||
failures.append(
|
||||
str(
|
||||
@@ -252,14 +357,38 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
self._set_status(f"Error: {msg}")
|
||||
return
|
||||
|
||||
self._original_tags = list(desired)
|
||||
reloaded = self._fetch_tags_from_store()
|
||||
refreshed = reloaded is not None
|
||||
tags_to_show = list(reloaded or []) if refreshed else list(desired)
|
||||
self._original_tags = list(tags_to_show)
|
||||
try:
|
||||
app.call_from_thread(
|
||||
self._set_status,
|
||||
f"Saved (+{len(to_add)}, -{len(to_del)})"
|
||||
app.call_from_thread(self._apply_loaded_tags, tags_to_show)
|
||||
except Exception:
|
||||
self._apply_loaded_tags(tags_to_show)
|
||||
|
||||
def _refresh_overlay() -> None:
|
||||
try:
|
||||
app.refresh_tag_overlay(
|
||||
self._store,
|
||||
self._hash,
|
||||
tags_to_show,
|
||||
self._seeds,
|
||||
)
|
||||
except Exception:
|
||||
self._set_status(f"Saved (+{len(to_add)}, -{len(to_del)})")
|
||||
pass
|
||||
|
||||
try:
|
||||
app.call_from_thread(_refresh_overlay)
|
||||
except Exception:
|
||||
_refresh_overlay()
|
||||
|
||||
status_msg = f"Saved (+{len(to_add)}, -{len(to_del)})"
|
||||
if refreshed:
|
||||
status_msg += f"; loaded {len(tags_to_show)} tag(s)"
|
||||
try:
|
||||
app.call_from_thread(self._set_status, status_msg)
|
||||
except Exception:
|
||||
self._set_status(status_msg)
|
||||
except Exception as exc:
|
||||
try:
|
||||
app.call_from_thread(
|
||||
@@ -547,7 +676,7 @@ class PipelineHubApp(App):
|
||||
"""Apply store/path/tags UI fields to the pipeline text.
|
||||
|
||||
Rules (simple + non-destructive):
|
||||
- If output path is set and the first stage is download-media and has no -path/--path, append -path.
|
||||
- If output path is set and the first stage is download-file and has no -path/--path, append -path.
|
||||
- If a store is selected and pipeline has no add-file stage, append add-file -store <store>.
|
||||
"""
|
||||
base = str(pipeline_text or "").strip()
|
||||
@@ -578,11 +707,11 @@ class PipelineHubApp(App):
|
||||
except Exception:
|
||||
first_stage_cmd = ""
|
||||
|
||||
# Apply -path to download-media first stage (only if missing)
|
||||
# Apply -path to download-file first stage (only if missing)
|
||||
if output_path:
|
||||
first = stages[0]
|
||||
low = first.lower()
|
||||
if low.startswith("download-media"
|
||||
if low.startswith("download-file"
|
||||
) and " -path" not in low and " --path" not in low:
|
||||
stages[0] = f"{first} -path {json.dumps(output_path)}"
|
||||
|
||||
@@ -594,9 +723,7 @@ class PipelineHubApp(App):
|
||||
should_auto_add_file = bool(
|
||||
selected_store and ("add-file" not in low_joined) and (
|
||||
first_stage_cmd
|
||||
in {"download-media",
|
||||
"download-file",
|
||||
"download-torrent"}
|
||||
in {"download-file"}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -714,6 +841,42 @@ class PipelineHubApp(App):
|
||||
key=str(idx - 1)
|
||||
)
|
||||
|
||||
def refresh_tag_overlay(self,
|
||||
store_name: str,
|
||||
file_hash: str,
|
||||
tags: List[str],
|
||||
subject: Any) -> None:
|
||||
"""Update the shared get-tag overlay after manual tag edits."""
|
||||
if not store_name or not file_hash:
|
||||
return
|
||||
try:
|
||||
from cmdlet.get_tag import _emit_tags_as_table
|
||||
except Exception:
|
||||
return
|
||||
|
||||
try:
|
||||
cfg = load_config() or {}
|
||||
except Exception:
|
||||
cfg = {}
|
||||
|
||||
payload_subject = subject if subject is not None else None
|
||||
if not isinstance(payload_subject, dict):
|
||||
payload_subject = {
|
||||
"store": store_name,
|
||||
"hash": file_hash,
|
||||
}
|
||||
|
||||
try:
|
||||
_emit_tags_as_table(
|
||||
list(tags),
|
||||
file_hash=file_hash,
|
||||
store=store_name,
|
||||
config=cfg,
|
||||
subject=payload_subject,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _load_cmdlet_names(self) -> None:
|
||||
try:
|
||||
ensure_registry_loaded()
|
||||
@@ -825,6 +988,10 @@ class PipelineHubApp(App):
|
||||
index = 0
|
||||
|
||||
item: Any = None
|
||||
row_payload: Any = None
|
||||
row = None
|
||||
column_store: Optional[str] = None
|
||||
column_hash: Optional[str] = None
|
||||
|
||||
# Prefer mapping displayed table row -> source item.
|
||||
if self.current_result_table and 0 <= index < len(
|
||||
@@ -832,30 +999,66 @@ class PipelineHubApp(App):
|
||||
"rows",
|
||||
[]) or []):
|
||||
row = self.current_result_table.rows[index]
|
||||
row_payload = getattr(row, "payload", None)
|
||||
src_idx = getattr(row, "source_index", None)
|
||||
if isinstance(src_idx, int) and 0 <= src_idx < len(self.result_items):
|
||||
item = self.result_items[src_idx]
|
||||
for col in getattr(row, "columns", []) or []:
|
||||
name = str(getattr(col, "name", "") or "").strip().lower()
|
||||
value = str(getattr(col, "value", "") or "").strip()
|
||||
if not column_store and name in {"store", "storage", "source", "table"}:
|
||||
column_store = value
|
||||
if not column_hash and name in {"hash", "hash_hex", "file_hash", "sha256"}:
|
||||
column_hash = value
|
||||
|
||||
if item is None and 0 <= index < len(self.result_items):
|
||||
item = self.result_items[index]
|
||||
|
||||
store_name = None
|
||||
file_hash = None
|
||||
if isinstance(item, dict):
|
||||
store_name = item.get("store")
|
||||
file_hash = item.get("hash")
|
||||
else:
|
||||
store_name = getattr(item, "store", None)
|
||||
file_hash = getattr(item, "hash", None)
|
||||
def _pick_from_candidates(
|
||||
candidates: List[Any], extractor: Callable[[Any], str]
|
||||
) -> str:
|
||||
for candidate in candidates:
|
||||
if candidate is None:
|
||||
continue
|
||||
try:
|
||||
value = extractor(candidate)
|
||||
except Exception:
|
||||
value = ""
|
||||
if value and str(value).strip():
|
||||
return str(value).strip()
|
||||
return ""
|
||||
|
||||
store_text = str(store_name).strip() if store_name is not None else ""
|
||||
hash_text = str(file_hash).strip() if file_hash is not None else ""
|
||||
candidate_sources: List[Any] = []
|
||||
if row_payload is not None:
|
||||
candidate_sources.append(row_payload)
|
||||
if item is not None:
|
||||
candidate_sources.append(item)
|
||||
|
||||
store_name = _pick_from_candidates(candidate_sources, extract_store_value)
|
||||
file_hash = _pick_from_candidates(candidate_sources, extract_hash_value)
|
||||
|
||||
if not store_name and column_store:
|
||||
store_name = column_store
|
||||
if not file_hash and column_hash:
|
||||
file_hash = column_hash
|
||||
|
||||
store_text = str(store_name).strip() if store_name else ""
|
||||
hash_text = str(file_hash).strip() if file_hash else ""
|
||||
|
||||
if not store_text:
|
||||
# Fallback to UI store selection when item doesn't carry it.
|
||||
store_text = self._get_selected_store() or ""
|
||||
|
||||
return item, (store_text or None), (hash_text or None)
|
||||
final_item = row_payload if row_payload is not None else item
|
||||
if final_item is None and (store_text or hash_text):
|
||||
fallback: Dict[str, str] = {}
|
||||
if store_text:
|
||||
fallback["store"] = store_text
|
||||
if hash_text:
|
||||
fallback["hash"] = hash_text
|
||||
final_item = fallback
|
||||
|
||||
return final_item, (store_text or None), (hash_text or None)
|
||||
|
||||
def _open_tags_popup(self) -> None:
|
||||
if self._pipeline_running:
|
||||
|
||||
@@ -30,15 +30,15 @@ PIPELINE_PRESETS: List[PipelinePreset] = [
|
||||
PipelinePreset(
|
||||
label="Download → Merge → Local",
|
||||
description=
|
||||
"Use download-media with playlist auto-selection, merge the pieces, tag, then import into local storage.",
|
||||
"Use download-file with playlist auto-selection, merge the pieces, tag, then import into local storage.",
|
||||
pipeline=
|
||||
'download-media "<url>" | merge-file | add-tags -store local | add-file -storage local',
|
||||
'download-file "<url>" | merge-file | add-tags -store local | add-file -storage local',
|
||||
),
|
||||
PipelinePreset(
|
||||
label="Download → Hydrus",
|
||||
description="Fetch media, auto-tag, and push directly into Hydrus.",
|
||||
pipeline=
|
||||
'download-media "<url>" | merge-file | add-tags -store hydrus | add-file -storage hydrus',
|
||||
'download-file "<url>" | merge-file | add-tags -store hydrus | add-file -storage hydrus',
|
||||
),
|
||||
PipelinePreset(
|
||||
label="Search Local Library",
|
||||
|
||||
@@ -405,7 +405,7 @@ class DownloadModal(ModalScreen):
|
||||
download_succeeded = False
|
||||
download_stderr_text = "" # Store for merge stage
|
||||
if download_enabled:
|
||||
download_cmdlet_name = "download-media" if self.is_playlist else "download-file"
|
||||
download_cmdlet_name = "download-file"
|
||||
download_cmdlet = get_cmdlet(download_cmdlet_name)
|
||||
if download_cmdlet:
|
||||
logger.info(f"📥 Executing {download_cmdlet_name} stage")
|
||||
@@ -416,9 +416,9 @@ class DownloadModal(ModalScreen):
|
||||
if worker:
|
||||
worker.log_step(f"Starting {download_cmdlet_name} stage...")
|
||||
|
||||
# Build arguments for download-media (yt-dlp) playlists; download-file takes no yt-dlp args.
|
||||
# Build yt-dlp playlist arguments for download-file streaming (if applicable).
|
||||
cmdlet_args = []
|
||||
if download_cmdlet_name == "download-media" and self.is_playlist:
|
||||
if self.is_playlist:
|
||||
# Always use yt-dlp's native --playlist-items for playlists
|
||||
if playlist_selection:
|
||||
# User provided specific selection
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Callable, Dict, Iterable, Sequence
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Iterable, Iterator, Sequence
|
||||
from importlib import import_module as _import_module
|
||||
|
||||
# A cmdlet is a callable taking (result, args, config) -> int
|
||||
@@ -47,51 +49,71 @@ def get(cmd_name: str) -> Cmdlet | None:
|
||||
return REGISTRY.get(_normalize_cmd_name(cmd_name))
|
||||
|
||||
|
||||
# Dynamically import all cmdlet modules in this directory (ignore files starting with _ and __init__.py)
|
||||
# cmdlet self-register when instantiated via their __init__ method
|
||||
import os
|
||||
_MODULES_LOADED = False
|
||||
|
||||
def _iter_cmdlet_module_names() -> Iterator[str]:
|
||||
cmdlet_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(cmdlet_dir):
|
||||
try:
|
||||
entries = os.listdir(cmdlet_dir)
|
||||
except Exception:
|
||||
return iter(())
|
||||
|
||||
def _generator() -> Iterator[str]:
|
||||
for filename in entries:
|
||||
if not (filename.endswith(".py") and not filename.startswith("_")
|
||||
and filename != "__init__.py"):
|
||||
continue
|
||||
|
||||
mod_name = filename[:-3]
|
||||
|
||||
# Enforce Powershell-style two-word cmdlet naming (e.g., add_file, get_file)
|
||||
# Skip native/utility scripts that are not cmdlet (e.g., adjective, worker, matrix, pipe)
|
||||
if "_" not in mod_name:
|
||||
continue
|
||||
yield mod_name
|
||||
|
||||
return _generator()
|
||||
|
||||
|
||||
def _load_cmdlet_module(mod_name: str) -> None:
|
||||
try:
|
||||
_import_module(f".{mod_name}", __name__)
|
||||
except Exception as e:
|
||||
import sys
|
||||
except Exception as exc:
|
||||
print(f"Error importing cmdlet '{mod_name}': {exc}", file=sys.stderr)
|
||||
|
||||
print(f"Error importing cmdlet '{mod_name}': {e}", file=sys.stderr)
|
||||
|
||||
def _load_root_modules() -> None:
|
||||
for root in ("select_cmdlet",):
|
||||
try:
|
||||
_import_module(root)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Import and register native commands that are not considered cmdlet
|
||||
try:
|
||||
from cmdnat import register_native_commands as _register_native_commands
|
||||
|
||||
_register_native_commands(REGISTRY)
|
||||
except Exception:
|
||||
# Native commands are optional; ignore if unavailable
|
||||
pass
|
||||
|
||||
# Import root-level modules that also register cmdlet
|
||||
for _root_mod in ("select_cmdlet",
|
||||
):
|
||||
try:
|
||||
_import_module(_root_mod)
|
||||
except Exception:
|
||||
# Allow missing optional modules
|
||||
continue
|
||||
|
||||
# Also import helper modules that register cmdlet
|
||||
def _load_helper_modules() -> None:
|
||||
try:
|
||||
import API.alldebrid as _alldebrid
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _register_native_commands() -> None:
|
||||
try:
|
||||
from cmdnat import register_native_commands
|
||||
except Exception:
|
||||
return
|
||||
try:
|
||||
register_native_commands(REGISTRY)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def ensure_cmdlet_modules_loaded() -> None:
|
||||
global _MODULES_LOADED
|
||||
|
||||
if _MODULES_LOADED:
|
||||
return
|
||||
|
||||
for mod_name in _iter_cmdlet_module_names():
|
||||
_load_cmdlet_module(mod_name)
|
||||
|
||||
_load_root_modules()
|
||||
_load_helper_modules()
|
||||
_register_native_commands()
|
||||
_MODULES_LOADED = True
|
||||
|
||||
@@ -519,8 +519,11 @@ class Add_File(Cmdlet):
|
||||
# - If the sample URL only has one available format, force it for the batch.
|
||||
# - If the sample URL appears audio-only (no video codecs), prefer audio mode.
|
||||
try:
|
||||
from cmdlet.download_media import is_url_supported_by_ytdlp, list_formats
|
||||
from tool.ytdlp import YtDlpTool
|
||||
from tool.ytdlp import (
|
||||
YtDlpTool,
|
||||
is_url_supported_by_ytdlp,
|
||||
list_formats,
|
||||
)
|
||||
|
||||
sample_url = unique_urls[0] if unique_urls else None
|
||||
if sample_url and is_url_supported_by_ytdlp(str(sample_url)):
|
||||
@@ -677,6 +680,59 @@ class Add_File(Cmdlet):
|
||||
# Update pipe_obj with resolved path
|
||||
pipe_obj.path = str(media_path_or_url)
|
||||
|
||||
table = None
|
||||
full_metadata = None
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
table = pipe_obj.extra.get("table")
|
||||
full_metadata = pipe_obj.extra.get("full_metadata")
|
||||
|
||||
provider_table = str(
|
||||
table or getattr(pipe_obj, "provider", "")
|
||||
).strip().lower()
|
||||
if (provider_table == "alldebrid"
|
||||
and isinstance(media_path_or_url, str)
|
||||
and media_path_or_url.lower().startswith(
|
||||
("http://", "https://"))
|
||||
and (provider_name or location)):
|
||||
url_str = str(media_path_or_url)
|
||||
if url_str in skip_url_downloads:
|
||||
log(
|
||||
f"Skipping download (already stored): {url_str}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
successes += 1
|
||||
continue
|
||||
|
||||
temp_dir_candidate = Path(
|
||||
tempfile.mkdtemp(prefix="medios_alldebrid_")
|
||||
)
|
||||
downloaded_path: Optional[Path] = None
|
||||
try:
|
||||
from ProviderCore.registry import get_search_provider
|
||||
|
||||
provider = get_search_provider("alldebrid", config)
|
||||
if provider is not None:
|
||||
downloaded = provider.download(
|
||||
pipe_obj,
|
||||
temp_dir_candidate,
|
||||
)
|
||||
if downloaded:
|
||||
downloaded_path = Path(downloaded)
|
||||
except Exception as exc:
|
||||
log(
|
||||
f"[add-file] AllDebrid download failed: {exc}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if downloaded_path and downloaded_path.exists():
|
||||
media_path_or_url = downloaded_path
|
||||
pipe_obj.path = str(downloaded_path)
|
||||
pipe_obj.is_temp = True
|
||||
delete_after_item = True
|
||||
temp_dir_to_cleanup = temp_dir_candidate
|
||||
processed_url_items.add(url_str)
|
||||
else:
|
||||
shutil.rmtree(temp_dir_candidate, ignore_errors=True)
|
||||
|
||||
# URL targets: prefer provider-aware download for OpenLibrary selections.
|
||||
if isinstance(media_path_or_url,
|
||||
str) and media_path_or_url.lower().startswith(
|
||||
@@ -684,12 +740,6 @@ class Add_File(Cmdlet):
|
||||
"https://",
|
||||
"magnet:",
|
||||
"torrent:")):
|
||||
table = None
|
||||
full_metadata = None
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
table = pipe_obj.extra.get("table")
|
||||
full_metadata = pipe_obj.extra.get("full_metadata")
|
||||
|
||||
is_openlibrary = (str(table or "").lower() == "openlibrary") or (
|
||||
"openlibrary.org/books/" in media_path_or_url.lower()
|
||||
)
|
||||
@@ -1079,7 +1129,7 @@ class Add_File(Cmdlet):
|
||||
continue
|
||||
|
||||
# No destination specified: keep legacy behavior (download-media only).
|
||||
code = self._delegate_to_download_media(
|
||||
code = self._delegate_to_download_file(
|
||||
item,
|
||||
url_str,
|
||||
location,
|
||||
@@ -2052,7 +2102,7 @@ class Add_File(Cmdlet):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _delegate_to_download_media(
|
||||
def _delegate_to_download_file(
|
||||
self,
|
||||
result: Any,
|
||||
url_str: str,
|
||||
@@ -2062,13 +2112,13 @@ class Add_File(Cmdlet):
|
||||
config: Dict[str,
|
||||
Any],
|
||||
) -> int:
|
||||
"""Delegate URL handling to download-media cmdlet."""
|
||||
"""Delegate URL handling to download-file cmdlet (yt-dlp path)."""
|
||||
log(
|
||||
f"Target is a URL, delegating to download-media: {url_str}",
|
||||
f"Target is a URL, delegating to download-file: {url_str}",
|
||||
file=sys.stderr
|
||||
)
|
||||
# Reuse the globally-registered cmdlet instance to avoid duplicative registration
|
||||
from cmdlet.download_media import CMDLET as dl_cmdlet
|
||||
from cmdlet.download_file import CMDLET as dl_cmdlet
|
||||
|
||||
dl_args = list(args) if args else []
|
||||
|
||||
@@ -2087,11 +2137,11 @@ class Add_File(Cmdlet):
|
||||
if selection_args:
|
||||
dl_args.extend(selection_args)
|
||||
|
||||
# download-media doesn't support -storage flag
|
||||
# download-file doesn't support -storage flag
|
||||
# It downloads to the configured directory, then add-file will handle storage
|
||||
# Note: Provider uploads (0x0) are not supported via this path
|
||||
|
||||
# Call download-media with the URL in args
|
||||
# Call download-file with the URL in args
|
||||
return dl_cmdlet.run(None, dl_args, config)
|
||||
|
||||
@staticmethod
|
||||
@@ -2832,17 +2882,16 @@ class Add_File(Cmdlet):
|
||||
return []
|
||||
|
||||
try:
|
||||
from cmdlet.download_media import (
|
||||
CMDLET as dl_cmdlet,
|
||||
from SYS.models import DownloadOptions
|
||||
from tool.ytdlp import (
|
||||
YtDlpTool,
|
||||
_best_subtitle_sidecar,
|
||||
_download_with_timeout,
|
||||
_format_chapters_note,
|
||||
_read_text_file,
|
||||
is_url_supported_by_ytdlp,
|
||||
list_formats,
|
||||
_format_chapters_note,
|
||||
_best_subtitle_sidecar,
|
||||
_read_text_file,
|
||||
)
|
||||
from SYS.models import DownloadOptions
|
||||
from tool.ytdlp import YtDlpTool
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
@@ -540,9 +540,11 @@ class Add_Tag(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
hash_override = normalize_hash(query_hash) if query_hash else None
|
||||
|
||||
# If add-tag is in the middle of a pipeline (has downstream stages), default to
|
||||
# including temp files. This enables common flows like:
|
||||
# @N | download-media | add-tag ... | add-file ...
|
||||
# @N | download-file | add-tag ... | add-file ...
|
||||
store_override = parsed.get("store")
|
||||
stage_ctx = ctx.get_stage_context()
|
||||
has_downstream = bool(
|
||||
@@ -562,6 +564,10 @@ class Add_Tag(Cmdlet):
|
||||
if not include_temp:
|
||||
results = filter_results_by_temp(results, include_temp=False)
|
||||
|
||||
# When no pipeline payload is present but -query/-store pinpoints a hash, tag it directly.
|
||||
if not results and hash_override and store_override:
|
||||
results = [{"hash": hash_override, "store": store_override}]
|
||||
|
||||
if not results:
|
||||
log(
|
||||
"No valid files to tag (all results were temporary; use --all to include temporary files)",
|
||||
@@ -628,7 +634,6 @@ class Add_Tag(Cmdlet):
|
||||
return 1
|
||||
|
||||
# Get other flags
|
||||
hash_override = normalize_hash(query_hash) if query_hash else None
|
||||
duplicate_arg = parsed.get("duplicate")
|
||||
|
||||
# tag ARE provided - apply them to each store-backed result
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,280 +0,0 @@
|
||||
"""Download torrent/magnet links via AllDebrid in a dedicated cmdlet.
|
||||
|
||||
Features:
|
||||
- Accepts magnet links and .torrent files/url
|
||||
- Uses AllDebrid API for background downloads
|
||||
- Progress tracking and worker management
|
||||
- Self-registering class-based cmdlet
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
import uuid
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
|
||||
from SYS.logger import log
|
||||
from . import _shared as sh
|
||||
|
||||
|
||||
class Download_Torrent(sh.Cmdlet):
|
||||
"""Class-based download-torrent cmdlet with self-registration."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="download-torrent",
|
||||
summary="Download torrent/magnet links via AllDebrid",
|
||||
usage="download-torrent <magnet|.torrent> [options]",
|
||||
alias=["torrent",
|
||||
"magnet"],
|
||||
arg=[
|
||||
sh.CmdletArg(
|
||||
name="magnet",
|
||||
type="string",
|
||||
required=False,
|
||||
description="Magnet link or .torrent file/URL",
|
||||
variadic=True,
|
||||
),
|
||||
sh.CmdletArg(
|
||||
name="output",
|
||||
type="string",
|
||||
description="Output directory for downloaded files",
|
||||
),
|
||||
sh.CmdletArg(
|
||||
name="wait",
|
||||
type="float",
|
||||
description="Wait time (seconds) for magnet processing timeout",
|
||||
),
|
||||
sh.CmdletArg(
|
||||
name="background",
|
||||
type="flag",
|
||||
alias="bg",
|
||||
description="Start download in background",
|
||||
),
|
||||
],
|
||||
detail=["Download torrents/magnets via AllDebrid API."],
|
||||
exec=self.run,
|
||||
)
|
||||
self.register()
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
parsed = sh.parse_cmdlet_args(args, self)
|
||||
magnet_args = parsed.get("magnet", [])
|
||||
output_dir = Path(parsed.get("output") or Path.home() / "Downloads")
|
||||
wait_timeout = int(float(parsed.get("wait", 600)))
|
||||
background_mode = parsed.get("background", False)
|
||||
api_key = None
|
||||
try:
|
||||
from Provider.alldebrid import _get_debrid_api_key # type: ignore
|
||||
|
||||
api_key = _get_debrid_api_key(config)
|
||||
except Exception:
|
||||
api_key = None
|
||||
if not api_key:
|
||||
log(
|
||||
"AllDebrid API key not configured (check config.conf [provider=alldebrid] api_key=...)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
for magnet_url in magnet_args:
|
||||
if background_mode:
|
||||
self._start_background_worker(
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout
|
||||
)
|
||||
log(f"⧗ Torrent download queued in background: {magnet_url}")
|
||||
else:
|
||||
# Foreground mode: submit quickly, then continue processing in background
|
||||
# so we return control to the REPL immediately.
|
||||
worker_id = str(uuid.uuid4())
|
||||
magnet_id = self._submit_magnet(worker_id, magnet_url, api_key)
|
||||
if magnet_id <= 0:
|
||||
continue
|
||||
self._start_background_magnet_worker(
|
||||
worker_id,
|
||||
magnet_id,
|
||||
output_dir,
|
||||
api_key,
|
||||
wait_timeout
|
||||
)
|
||||
log(f"⧗ Torrent processing started (ID: {magnet_id})")
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _submit_magnet(worker_id: str, magnet_url: str, api_key: str) -> int:
|
||||
"""Submit a magnet and return its AllDebrid magnet ID.
|
||||
|
||||
This is intentionally fast so the caller can return to the REPL.
|
||||
"""
|
||||
try:
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
client = AllDebridClient(api_key)
|
||||
log(f"[Worker {worker_id}] Submitting magnet to AllDebrid...")
|
||||
magnet_info = client.magnet_add(magnet_url)
|
||||
magnet_id = int(magnet_info.get("id", 0))
|
||||
if magnet_id <= 0:
|
||||
log(f"[Worker {worker_id}] Magnet add failed", file=sys.stderr)
|
||||
return 0
|
||||
log(f"[Worker {worker_id}] ✓ Magnet added (ID: {magnet_id})")
|
||||
return magnet_id
|
||||
except Exception as e:
|
||||
log(f"[Worker {worker_id}] Magnet submit failed: {e}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
def _start_background_magnet_worker(
|
||||
self,
|
||||
worker_id: str,
|
||||
magnet_id: int,
|
||||
output_dir: Path,
|
||||
api_key: str,
|
||||
wait_timeout: int
|
||||
) -> None:
|
||||
thread = threading.Thread(
|
||||
target=self._download_magnet_worker,
|
||||
args=(worker_id,
|
||||
magnet_id,
|
||||
output_dir,
|
||||
api_key,
|
||||
wait_timeout),
|
||||
daemon=True,
|
||||
name=f"TorrentWorker_{worker_id}",
|
||||
)
|
||||
thread.start()
|
||||
|
||||
@staticmethod
|
||||
def _download_magnet_worker(
|
||||
worker_id: str,
|
||||
magnet_id: int,
|
||||
output_dir: Path,
|
||||
api_key: str,
|
||||
wait_timeout: int = 600,
|
||||
) -> None:
|
||||
"""Poll AllDebrid magnet status until ready, then download the files."""
|
||||
try:
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
client = AllDebridClient(api_key)
|
||||
|
||||
# Poll for ready status (simplified)
|
||||
import time
|
||||
|
||||
elapsed = 0
|
||||
while elapsed < wait_timeout:
|
||||
status = client.magnet_status(magnet_id)
|
||||
if status.get("ready"):
|
||||
break
|
||||
time.sleep(5)
|
||||
elapsed += 5
|
||||
if elapsed >= wait_timeout:
|
||||
log(f"[Worker {worker_id}] Timeout waiting for magnet", file=sys.stderr)
|
||||
return
|
||||
|
||||
files_result = client.magnet_links([magnet_id])
|
||||
magnet_files = files_result.get(str(magnet_id),
|
||||
{})
|
||||
files_array = magnet_files.get("files", [])
|
||||
if not files_array:
|
||||
log(f"[Worker {worker_id}] No files found", file=sys.stderr)
|
||||
return
|
||||
for file_info in files_array:
|
||||
file_url = file_info.get("link")
|
||||
file_name = file_info.get("name")
|
||||
if file_url and file_name:
|
||||
Download_Torrent._download_file(file_url, output_dir / file_name)
|
||||
log(f"[Worker {worker_id}] ✓ Downloaded {file_name}")
|
||||
except Exception as e:
|
||||
log(f"[Worker {worker_id}] Torrent download failed: {e}", file=sys.stderr)
|
||||
|
||||
@staticmethod
|
||||
def _download_torrent_worker(
|
||||
worker_id: str,
|
||||
magnet_url: str,
|
||||
output_dir: Path,
|
||||
config: Dict[str,
|
||||
Any],
|
||||
api_key: str,
|
||||
wait_timeout: int = 600,
|
||||
worker_manager: Optional[Any] = None,
|
||||
) -> None:
|
||||
try:
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
client = AllDebridClient(api_key)
|
||||
log(f"[Worker {worker_id}] Submitting magnet to AllDebrid...")
|
||||
magnet_info = client.magnet_add(magnet_url)
|
||||
magnet_id = int(magnet_info.get("id", 0))
|
||||
if magnet_id <= 0:
|
||||
log(f"[Worker {worker_id}] Magnet add failed", file=sys.stderr)
|
||||
return
|
||||
log(f"[Worker {worker_id}] ✓ Magnet added (ID: {magnet_id})")
|
||||
# Poll for ready status (simplified)
|
||||
import time
|
||||
|
||||
elapsed = 0
|
||||
while elapsed < wait_timeout:
|
||||
status = client.magnet_status(magnet_id)
|
||||
if status.get("ready"):
|
||||
break
|
||||
time.sleep(5)
|
||||
elapsed += 5
|
||||
if elapsed >= wait_timeout:
|
||||
log(f"[Worker {worker_id}] Timeout waiting for magnet", file=sys.stderr)
|
||||
return
|
||||
files_result = client.magnet_links([magnet_id])
|
||||
magnet_files = files_result.get(str(magnet_id),
|
||||
{})
|
||||
files_array = magnet_files.get("files", [])
|
||||
if not files_array:
|
||||
log(f"[Worker {worker_id}] No files found", file=sys.stderr)
|
||||
return
|
||||
for file_info in files_array:
|
||||
file_url = file_info.get("link")
|
||||
file_name = file_info.get("name")
|
||||
if file_url:
|
||||
Download_Torrent._download_file(file_url, output_dir / file_name)
|
||||
log(f"[Worker {worker_id}] ✓ Downloaded {file_name}")
|
||||
except Exception as e:
|
||||
log(f"[Worker {worker_id}] Torrent download failed: {e}", file=sys.stderr)
|
||||
|
||||
@staticmethod
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
try:
|
||||
import requests
|
||||
|
||||
resp = requests.get(url, stream=True)
|
||||
with open(dest, "wb") as f:
|
||||
for chunk in resp.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
except Exception as e:
|
||||
log(f"File download failed: {e}", file=sys.stderr)
|
||||
|
||||
def _start_background_worker(
|
||||
self,
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout
|
||||
):
|
||||
worker_id = f"torrent_{uuid.uuid4().hex[:6]}"
|
||||
thread = threading.Thread(
|
||||
target=self._download_torrent_worker,
|
||||
args=(worker_id,
|
||||
magnet_url,
|
||||
output_dir,
|
||||
config,
|
||||
api_key,
|
||||
wait_timeout),
|
||||
daemon=True,
|
||||
name=f"TorrentWorker_{worker_id}",
|
||||
)
|
||||
thread.start()
|
||||
|
||||
|
||||
CMDLET = Download_Torrent()
|
||||
@@ -155,8 +155,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
mode_hint: Optional[str] = None
|
||||
forced_format: Optional[str] = None
|
||||
try:
|
||||
from cmdlet.download_media import list_formats
|
||||
from tool.ytdlp import YtDlpTool
|
||||
from tool.ytdlp import YtDlpTool, list_formats
|
||||
|
||||
sample_url = urls_to_download[0]
|
||||
cookiefile = None
|
||||
|
||||
@@ -209,6 +209,18 @@ class search_file(Cmdlet):
|
||||
|
||||
provider_text = str(provider_name or "").strip()
|
||||
provider_lower = provider_text.lower()
|
||||
id_match = re.search(r"\bid\s*[=:]\s*(\d+)", query, flags=re.IGNORECASE)
|
||||
parsed_open_id = open_id
|
||||
if id_match and parsed_open_id is None:
|
||||
try:
|
||||
parsed_open_id = int(id_match.group(1))
|
||||
except Exception:
|
||||
parsed_open_id = None
|
||||
query = re.sub(r"\bid\s*[=:]\s*\d+", "", query, flags=re.IGNORECASE).strip()
|
||||
if not query:
|
||||
query = "*"
|
||||
|
||||
effective_open_id = parsed_open_id if parsed_open_id is not None else open_id
|
||||
if provider_lower == "youtube":
|
||||
provider_label = "Youtube"
|
||||
elif provider_lower == "openlibrary":
|
||||
@@ -218,22 +230,32 @@ class search_file(Cmdlet):
|
||||
else:
|
||||
provider_label = provider_text[:1].upper() + provider_text[1:] if provider_text else "Provider"
|
||||
|
||||
if provider_lower == "alldebrid" and open_id is not None:
|
||||
table_title = f"{provider_label} Files: {open_id}".strip().rstrip(":")
|
||||
if provider_lower == "alldebrid" and effective_open_id is not None:
|
||||
table_title = f"{provider_label} Files: {effective_open_id}".strip().rstrip(":")
|
||||
else:
|
||||
table_title = f"{provider_label}: {query}".strip().rstrip(":")
|
||||
|
||||
preserve_order = provider_lower in {"youtube", "openlibrary", "loc"}
|
||||
table = ResultTable(table_title).set_preserve_order(preserve_order)
|
||||
table.set_table(provider_name)
|
||||
table_meta: Dict[str, Any] = {"provider": provider_name}
|
||||
if provider_lower == "alldebrid":
|
||||
table_meta["view"] = "files" if effective_open_id is not None else "folders"
|
||||
if effective_open_id is not None:
|
||||
table_meta["magnet_id"] = effective_open_id
|
||||
try:
|
||||
table.set_table_metadata(table_meta)
|
||||
except Exception:
|
||||
pass
|
||||
table.set_source_command("search-file", list(args_list))
|
||||
|
||||
debug(f"[search-file] Calling {provider_name}.search()")
|
||||
if provider_lower == "alldebrid":
|
||||
if open_id is not None:
|
||||
results = provider.search(query, limit=limit, filters={"view": "files", "magnet_id": open_id})
|
||||
else:
|
||||
results = provider.search(query, limit=limit, filters={"view": "folders"})
|
||||
filters = {"view": "folders"}
|
||||
search_open_id = parsed_open_id if parsed_open_id is not None else open_id
|
||||
if search_open_id is not None:
|
||||
filters = {"view": "files", "magnet_id": search_open_id}
|
||||
results = provider.search(query, limit=limit, filters=filters)
|
||||
else:
|
||||
results = provider.search(query, limit=limit)
|
||||
debug(f"[search-file] {provider_name} -> {len(results or [])} result(s)")
|
||||
@@ -260,17 +282,6 @@ class search_file(Cmdlet):
|
||||
row_index = len(table.rows)
|
||||
table.add_result(search_result)
|
||||
|
||||
try:
|
||||
if provider_lower == "alldebrid" and getattr(search_result, "media_kind", "") == "folder":
|
||||
magnet_id = None
|
||||
meta = getattr(search_result, "full_metadata", None)
|
||||
if isinstance(meta, dict):
|
||||
magnet_id = meta.get("magnet_id")
|
||||
if magnet_id is not None:
|
||||
table.set_row_selection_args(row_index, ["-open", str(magnet_id), "-query", "*"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
@@ -199,27 +200,40 @@ def _render_worker_list(db, status_filter: str | None, limit: int) -> int:
|
||||
date_str = _extract_date(started)
|
||||
start_time = _format_event_timestamp(started)
|
||||
end_time = _format_event_timestamp(ended)
|
||||
worker_id = str(worker.get("worker_id") or worker.get("id") or "unknown")
|
||||
status = str(worker.get("status") or "unknown")
|
||||
result_state = str(worker.get("result") or "")
|
||||
status_label = status
|
||||
if result_state and result_state.lower() not in {"", status.lower()}:
|
||||
status_label = f"{status_label} ({result_state})"
|
||||
pipe_display = _summarize_pipe(worker.get("pipe"))
|
||||
error_message = _normalize_text(worker.get("error_message"))
|
||||
description = _normalize_text(worker.get("description"))
|
||||
|
||||
columns = [
|
||||
("ID", worker_id[:8]),
|
||||
("Status", status_label),
|
||||
("Pipe", pipe_display),
|
||||
("Date", date_str),
|
||||
("Start", start_time),
|
||||
("End", end_time),
|
||||
]
|
||||
if error_message:
|
||||
columns.append(("Error", error_message[:140]))
|
||||
if description and description != error_message:
|
||||
columns.append(("Details", description[:200]))
|
||||
|
||||
item = {
|
||||
"columns": [
|
||||
("Status",
|
||||
worker.get("status",
|
||||
"")),
|
||||
("Pipe",
|
||||
_summarize_pipe(worker.get("pipe"))),
|
||||
("Date",
|
||||
date_str),
|
||||
("Start Time",
|
||||
start_time),
|
||||
("End Time",
|
||||
end_time),
|
||||
],
|
||||
"__worker_metadata":
|
||||
worker,
|
||||
"_selection_args": ["-id",
|
||||
worker.get("worker_id")],
|
||||
"columns": columns,
|
||||
"__worker_metadata": worker,
|
||||
"_selection_args": ["-id", worker.get("worker_id")],
|
||||
}
|
||||
ctx.emit(item)
|
||||
log(
|
||||
f"Worker {worker_id[:8]} status={status_label} pipe={pipe_display} "
|
||||
f"error={error_message or 'none'}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
@@ -311,11 +325,25 @@ def _emit_worker_detail(worker: Dict[str, Any], events: List[Dict[str, Any]]) ->
|
||||
# Events are already always derived from stdout for now.
|
||||
|
||||
|
||||
def _summarize_pipe(pipe_value: Any, limit: int = 60) -> str:
|
||||
text = str(pipe_value or "").strip()
|
||||
def _summarize_pipe(pipe_value: Any, limit: int = 200) -> str:
|
||||
text = _normalize_text(pipe_value)
|
||||
if not text:
|
||||
return "(none)"
|
||||
return text if len(text) <= limit else text[:limit - 3] + "..."
|
||||
|
||||
stage_count = text.count("|") + 1 if text else 0
|
||||
display = text
|
||||
if len(display) > limit:
|
||||
trimmed = display[:max(limit - 3, 0)].rstrip()
|
||||
if not trimmed:
|
||||
trimmed = display[:limit]
|
||||
display = f"{trimmed}..."
|
||||
if stage_count > 1:
|
||||
suffix = f" ({stage_count} stages)"
|
||||
if not display.endswith("..."):
|
||||
display = f"{display}{suffix}"
|
||||
else:
|
||||
display = f"{display}{suffix}"
|
||||
return display
|
||||
|
||||
|
||||
def _format_event_timestamp(raw_timestamp: Any) -> str:
|
||||
@@ -378,3 +406,24 @@ def _extract_date(raw_timestamp: Any) -> str:
|
||||
except Exception:
|
||||
pass
|
||||
return date_part
|
||||
|
||||
|
||||
def _normalize_text(value: Any) -> str:
|
||||
text = str(value or "").strip()
|
||||
if not text:
|
||||
return ""
|
||||
# collapse whitespace to keep table columns aligned
|
||||
normalized = re.sub(r"\s+", " ", text)
|
||||
return normalized
|
||||
|
||||
|
||||
def _truncate_text(value: str, limit: int) -> str:
|
||||
if limit <= 0:
|
||||
return ""
|
||||
if len(value) <= limit:
|
||||
return value
|
||||
cutoff = max(limit - 3, 0)
|
||||
trimmed = value[:cutoff].rstrip()
|
||||
if not trimmed:
|
||||
return value[:limit]
|
||||
return f"{trimmed}..."
|
||||
|
||||
@@ -121,7 +121,7 @@
|
||||
<g transform="translate(9, 41)" clip-path="url(#terminal-3450042185-clip-terminal)">
|
||||
|
||||
<g class="terminal-3450042185-matrix">
|
||||
<text class="terminal-3450042185-r1" x="0" y="20" textLength="24.4" clip-path="url(#terminal-3450042185-line-0)">╭─</text><text class="terminal-3450042185-r1" x="24.4" y="20" textLength="2488.8" clip-path="url(#terminal-3450042185-line-0)">──────────────────────────────────────────── download-media -url "https://altrusiangrace.bandcamp.com/album/zetetic-astronomy-earth-not-a-globe-full-audiobook" ────────────────────────────────────────────</text><text class="terminal-3450042185-r1" x="2513.2" y="20" textLength="24.4" clip-path="url(#terminal-3450042185-line-0)">─╮</text><text class="terminal-3450042185-r1" x="2537.6" y="20" textLength="12.2" clip-path="url(#terminal-3450042185-line-0)">
|
||||
</text><text class="terminal-3450042185-r1" x="0" y="20" textLength="24.4" clip-path="url(#terminal-3450042185-line-0)">╭─</text><text class="terminal-3450042185-r1" x="24.4" y="20" textLength="2488.8" clip-path="url(#terminal-3450042185-line-0)">──────────────────────────────────────────── download-file -url "https://altrusiangrace.bandcamp.com/album/zetetic-astronomy-earth-not-a-globe-full-audiobook" ────────────────────────────────────────────</text><text class="terminal-3450042185-r1" x="2513.2" y="20" textLength="24.4" clip-path="url(#terminal-3450042185-line-0)">─╮</text><text class="terminal-3450042185-r1" x="2537.6" y="20" textLength="12.2" clip-path="url(#terminal-3450042185-line-0)">
|
||||
</text><text class="terminal-3450042185-r1" x="0" y="44.4" textLength="12.2" clip-path="url(#terminal-3450042185-line-1)">│</text><text class="terminal-3450042185-r1" x="2525.4" y="44.4" textLength="12.2" clip-path="url(#terminal-3450042185-line-1)">│</text><text class="terminal-3450042185-r1" x="2537.6" y="44.4" textLength="12.2" clip-path="url(#terminal-3450042185-line-1)">
|
||||
</text><text class="terminal-3450042185-r1" x="0" y="68.8" textLength="12.2" clip-path="url(#terminal-3450042185-line-2)">│</text><text class="terminal-3450042185-r2" x="48.8" y="68.8" textLength="195.2" clip-path="url(#terminal-3450042185-line-2)">               #</text><text class="terminal-3450042185-r2" x="280.6" y="68.8" textLength="1110.2" clip-path="url(#terminal-3450042185-line-2)">TITLE                                                                                      </text><text class="terminal-3450042185-r2" x="1427.4" y="68.8" textLength="512.4" clip-path="url(#terminal-3450042185-line-2)">DURATION                                  </text><text class="terminal-3450042185-r2" x="1976.4" y="68.8" textLength="512.4" clip-path="url(#terminal-3450042185-line-2)">UPLOADER                                  </text><text class="terminal-3450042185-r1" x="2525.4" y="68.8" textLength="12.2" clip-path="url(#terminal-3450042185-line-2)">│</text><text class="terminal-3450042185-r1" x="2537.6" y="68.8" textLength="12.2" clip-path="url(#terminal-3450042185-line-2)">
|
||||
</text><text class="terminal-3450042185-r1" x="0" y="93.2" textLength="12.2" clip-path="url(#terminal-3450042185-line-3)">│</text><text class="terminal-3450042185-r1" x="24.4" y="93.2" textLength="2488.8" clip-path="url(#terminal-3450042185-line-3)"> ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── </text><text class="terminal-3450042185-r1" x="2525.4" y="93.2" textLength="12.2" clip-path="url(#terminal-3450042185-line-3)">│</text><text class="terminal-3450042185-r1" x="2537.6" y="93.2" textLength="12.2" clip-path="url(#terminal-3450042185-line-3)">
|
||||
|
||||
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
226
docs/result_table.md
Normal file
226
docs/result_table.md
Normal file
@@ -0,0 +1,226 @@
|
||||
# ResultTable system — Overview & usage
|
||||
|
||||
This document explains the `ResultTable` system used across the CLI and TUI: how tables are built, how providers integrate with them, and how `@N` selection/expansion and provider selectors work.
|
||||
|
||||
## TL;DR ✅
|
||||
- `ResultTable` is the unified object used to render tabular results and drive selection (`@N`) behavior.
|
||||
- Providers should return `SearchResult` objects (or dicts) and can either supply `selection_args` per row or implement a `selector()` method to handle `@N` selections.
|
||||
- Table metadata (`set_table_metadata`) helps providers attach context (e.g., `provider_view`, `magnet_id`) that selectors can use.
|
||||
|
||||
---
|
||||
|
||||
## Key concepts
|
||||
|
||||
- **ResultTable** (`SYS/result_table.py`)
|
||||
- Renders rows as a rich table and stores metadata used for selection expansion.
|
||||
- Important APIs: `add_result()`, `set_table()`, `set_source_command()`, `set_row_selection_args()`, `set_table_metadata()`, and `select_interactive()`.
|
||||
|
||||
- **ResultRow**
|
||||
- Holds columns plus `selection_args` (used for `@N` expansion) and `payload` (original object).
|
||||
|
||||
- **Provider selector**
|
||||
- If a provider implements `selector(selected_items, ctx=..., stage_is_last=True)`, it is run first when `@N` is used; if the selector returns `True` it has handled the selection (e.g., drilling into a folder and publishing a new ResultTable).
|
||||
|
||||
- **Pipeline / CLI expansion**
|
||||
- When the user types `@N`, CLI tries provider selectors first. If none handle it, CLI re-runs `source_command + source_args + row_selection_args` (for single-selection) or pipes items downstream for multi-selection.
|
||||
|
||||
- **Table metadata**
|
||||
- `ResultTable.set_table_metadata(dict)` allows attaching provider-specific context (for example: `{"provider":"alldebrid","view":"files","magnet_id":123}`) for selectors and other code to use.
|
||||
|
||||
---
|
||||
|
||||
## How to build a table (provider pattern)
|
||||
|
||||
Typical provider flow (pseudocode):
|
||||
|
||||
```py
|
||||
from SYS.result_table import ResultTable
|
||||
|
||||
table = ResultTable("Provider: X result").set_preserve_order(True)
|
||||
table.set_table("provider_name")
|
||||
table.set_table_metadata({"provider":"provider_name","view":"folders"})
|
||||
table.set_source_command("search-file", ["-provider","provider_name","query"])
|
||||
|
||||
for r in results:
|
||||
table.add_result(r) # r can be a SearchResult, dict, or PipeObject
|
||||
|
||||
ctx.set_last_result_table(table, payloads)
|
||||
ctx.set_current_stage_table(table)
|
||||
```
|
||||
|
||||
Notes:
|
||||
- To drive a direct `@N` re-run, call `table.set_row_selection_args(row_index, ["-open", "<id>"])`.
|
||||
- For more advanced or interactive behavior (e.g., drill-into, fetch more rows), implement `provider.selector()` and return `True` when handled.
|
||||
|
||||
---
|
||||
|
||||
## Selection (@N) flow (brief)
|
||||
|
||||
1. User enters `@N` in the CLI.
|
||||
2. CLI chooses the appropriate table (overlay > last table > history) and gathers the selected payload(s).
|
||||
3. `PipelineExecutor._maybe_run_class_selector()` runs provider `selector()` hooks for the provider inferred from table or payloads. If any selector returns `True`, expansion stops.
|
||||
4. Otherwise, for single selections, CLI grabs `row.selection_args` and expands: `source_command + source_args + row_selection_args` and inserts it as the expanded stage. For multi-selections, items are piped downstream.
|
||||
|
||||
---
|
||||
|
||||
## Columns & display
|
||||
|
||||
- Providers can pass a `columns` list ([(name, value), ...]) in the result dict/SearchResult to control which columns are shown and their order.
|
||||
- Otherwise, `ResultTable` uses a priority list (title/store/size/ext) and sensible defaults.
|
||||
- The table rendering functions (`to_rich`, `format_json`, `format_compact`) are available for different UIs.
|
||||
|
||||
---
|
||||
|
||||
## Provider-specific examples
|
||||
|
||||
### AllDebrid (debrid file hosting)
|
||||
|
||||
AllDebrid exposes a list of magnets (folder rows) and the files inside each magnet. The provider returns `folder` SearchResults for magnets and `file` SearchResults for individual files. The provider includes a `selector()` that drills into a magnet by calling `search(..., filters={"view":"files","magnet_id":...})` and builds a new `ResultTable` of files.
|
||||
|
||||
Example commands:
|
||||
|
||||
```
|
||||
# List magnets in your account
|
||||
search-file -provider alldebrid "*"
|
||||
|
||||
# Open magnet id 123 and list its files
|
||||
search-file -provider alldebrid -open 123 "*"
|
||||
|
||||
# Or expand via @ selection (selector handles drilling):
|
||||
search-file -provider alldebrid "*"
|
||||
@3 # selector will open the magnet referenced by row #3 and show the file table
|
||||
```
|
||||
|
||||
Illustrative folder (magnet) SearchResult:
|
||||
|
||||
```py
|
||||
SearchResult(
|
||||
table="alldebrid",
|
||||
title="My Magnet Title",
|
||||
path="alldebrid:magnet:123",
|
||||
detail="OK",
|
||||
annotations=["folder", "ready"],
|
||||
media_kind="folder",
|
||||
columns=[("Folder", "My Magnet Title"), ("ID", "123"), ("Status", "ready"), ("Ready", "yes")],
|
||||
full_metadata={
|
||||
"magnet": {...},
|
||||
"magnet_id": 123,
|
||||
"provider": "alldebrid",
|
||||
"provider_view": "folders",
|
||||
"magnet_name": "My Magnet Title",
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
Illustrative file SearchResult (after drilling):
|
||||
|
||||
```py
|
||||
SearchResult(
|
||||
table="alldebrid",
|
||||
title="Episode 01.mkv",
|
||||
path="https://.../unlocked_direct_url",
|
||||
detail="My Magnet Title",
|
||||
annotations=["file"],
|
||||
media_kind="file",
|
||||
size_bytes=123456789,
|
||||
columns=[("File", "Episode 01.mkv"), ("Folder", "My Magnet Title"), ("ID", "123")],
|
||||
full_metadata={
|
||||
"magnet": {...},
|
||||
"magnet_id": 123,
|
||||
"magnet_name": "My Magnet Title",
|
||||
"relpath": "Season 1/E01.mkv",
|
||||
"provider": "alldebrid",
|
||||
"provider_view": "files",
|
||||
"file": {...},
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
Selection & download flows
|
||||
|
||||
- Drill-in (selector): `@3` on a magnet row runs the provider's `selector()` to build a new file table and show it. The selector uses `search(..., filters={"view":"files","magnet_id":...})` to fetch file rows.
|
||||
|
||||
- `download-file` integration: With a file row (http(s) path), `@2 | download-file` will download the file. The `download-file` cmdlet expands AllDebrid magnet folders and will call the provider layer to fetch file bytes as appropriate.
|
||||
|
||||
- `add-file` convenience: Piping a file row into `add-file -path <dest>` will trigger add-file's provider-aware logic. If the piped item has `table == 'alldebrid'` and a http(s) `path`, `add-file` will call `provider.download()` into a temporary directory and then ingest the downloaded file, cleaning up the temp when done. Example:
|
||||
|
||||
```
|
||||
# Expand magnet and add first file to local directory
|
||||
search-file -provider alldebrid "*"
|
||||
@3 # view files
|
||||
@1 | add-file -path C:\mydir
|
||||
```
|
||||
|
||||
Notes & troubleshooting
|
||||
|
||||
- Configure an AllDebrid API key (see `Provider/alldebrid._get_debrid_api_key()`).
|
||||
- If a magnet isn't ready the selector or `download-file` will log the magnet status and avoid attempting file downloads.
|
||||
|
||||
---
|
||||
|
||||
### Bandcamp (artist → discography drill-in)
|
||||
|
||||
Bandcamp search supports `artist:` queries. Bandcamp's provider implements a `selector()` that detects `artist` results and scrapes the artist's page using Playwright to build a discography `ResultTable`.
|
||||
|
||||
Example usage:
|
||||
|
||||
```
|
||||
# Search for an artist
|
||||
search-file -provider bandcamp "artist:radiohead"
|
||||
|
||||
# Select an artist row to expand into releases
|
||||
@1
|
||||
```
|
||||
|
||||
Bandcamp SearchResult (artist / album rows):
|
||||
|
||||
```py
|
||||
SearchResult(
|
||||
table="bandcamp",
|
||||
title="Album Title",
|
||||
path="https://bandcamp.com/album_url",
|
||||
detail="By: Artist",
|
||||
annotations=["album"],
|
||||
media_kind="audio",
|
||||
columns=[("Title","Album Title"), ("Location","Artist"), ("Type","album"), ("Url","https://...")],
|
||||
full_metadata={"artist":"Artist","type":"album","url":"https://..."}
|
||||
)
|
||||
```
|
||||
|
||||
Notes:
|
||||
- Playwright is required for Bandcamp scraping. The selector will log an informative message if Playwright is missing.
|
||||
- Provider selectors are ideal when you need to replace one table with another (artist → discography).
|
||||
|
||||
---
|
||||
|
||||
## Provider author checklist (short)
|
||||
|
||||
- Implement `search(query, limit, filters)` and return `SearchResult` objects or dicts; include useful `full_metadata` (IDs, view names) for selection/drilling.
|
||||
- If you support fetching downloadable file bytes, implement `download(result, output_dir) -> Optional[Path]`.
|
||||
- For drill-in or interactive transforms, implement `selector(selected_items, ctx=..., stage_is_last=True)` and call `ctx.set_last_result_table(...)` / `ctx.set_current_stage_table(...)`; return `True` when handled.
|
||||
- Add tests (unit/integration) that exercise search → select → download flows.
|
||||
|
||||
---
|
||||
|
||||
## Debugging tips
|
||||
- Use `ctx.set_last_result_table(table, payloads)` to immediately show a table while developing a selector.
|
||||
- Add `log(...)` messages in provider code to capture fail points.
|
||||
- Check `full_metadata` attached to SearchResults to pass extra context (IDs, view names, provider names).
|
||||
|
||||
---
|
||||
|
||||
## Quick reference
|
||||
- ResultTable location: `SYS/result_table.py`
|
||||
- Pipeline helpers: `SYS/pipeline.py` (`set_last_result_table`, `set_current_stage_table`, `get_current_stage_table_row_selection_args`)
|
||||
- CLI expansion: `CLI.py` (handles `@N`, provider selectors, and insertion of expanded stages)
|
||||
- Provider selector pattern: Implement `.selector(selected_items, ctx=..., stage_is_last=True)` in provider class.
|
||||
|
||||
---
|
||||
|
||||
If you'd like, I can also:
|
||||
- Add provider-specific examples (AllDebrid, Bandcamp) into this doc ✅
|
||||
- Add a short checklist for PR reviewers when adding new providers
|
||||
|
||||
---
|
||||
|
||||
Created by GitHub Copilot (Raptor mini - Preview) — brief guide to the ResultTable system. Feedback welcome!
|
||||
@@ -1,7 +1,5 @@
|
||||
# Medeia-Macina
|
||||
|
||||
This repository now keeps its detailed guide inside `scripts/README.md` to keep the root folder lean. See that file for installation, usage, and contributor information.# Medios-Macina
|
||||
|
||||
Medios-Macina is a CLI media manager and toolkit focused on downloading, tagging, and media storage (audio, video, images, and text) from a variety of providers and sources. It is designed around a compact, pipeable command language ("cmdlets") so complex workflows can be composed simply and repeatably.
|
||||
|
||||
## Features
|
||||
@@ -22,6 +20,7 @@ GIT CLONE https://code.glowers.club/goyimnose/Medios-Macina
|
||||
|
||||
2. rename config.conf.remove to config.conf, [config tutorial](https://code.glowers.club/goyimnose/Medios-Macina/wiki/Config.conf)
|
||||
|
||||
### MINIMAL EXAMPLE CONFIG - CHANGE VALUES
|
||||
```Minimal config
|
||||
temp="C:\\Users\\Admin\\Downloads"
|
||||
|
||||
|
||||
Binary file not shown.
992
tool/ytdlp.py
992
tool/ytdlp.py
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user