2025-12-16 01:45:01 -08:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
2026-01-01 20:37:27 -08:00
|
|
|
import hashlib
|
2026-01-04 02:23:50 -08:00
|
|
|
import json
|
2025-12-16 01:45:01 -08:00
|
|
|
import sys
|
2026-01-01 20:37:27 -08:00
|
|
|
import time
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Callable, Tuple
|
|
|
|
|
from urllib.parse import urlparse
|
2025-12-16 01:45:01 -08:00
|
|
|
|
2026-01-05 07:51:19 -08:00
|
|
|
from API.HTTP import HTTPClient, _download_direct_file
|
2026-01-04 02:23:50 -08:00
|
|
|
from API.alldebrid import AllDebridClient, parse_magnet_or_hash, is_torrent_file
|
2025-12-19 02:29:42 -08:00
|
|
|
from ProviderCore.base import Provider, SearchResult
|
2026-01-06 01:38:59 -08:00
|
|
|
from SYS.utils import sanitize_filename
|
2026-01-05 07:51:19 -08:00
|
|
|
from SYS.logger import log, debug
|
2026-01-04 02:23:50 -08:00
|
|
|
from SYS.models import DownloadError
|
|
|
|
|
|
|
|
|
|
_HOSTS_CACHE_TTL_SECONDS = 24 * 60 * 60
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _repo_root() -> Path:
|
|
|
|
|
try:
|
|
|
|
|
return Path(__file__).resolve().parents[1]
|
|
|
|
|
except Exception:
|
|
|
|
|
return Path(".")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _hosts_cache_path() -> Path:
|
|
|
|
|
# Keep this local to the repo so it works in portable installs.
|
|
|
|
|
# The registry's URL routing can read this file without instantiating providers.
|
|
|
|
|
#
|
|
|
|
|
# This file is expected to be the JSON payload shape from AllDebrid:
|
|
|
|
|
# {"status":"success","data":{"hosts":[...],"streams":[...],"redirectors":[...]}}
|
|
|
|
|
return _repo_root() / "API" / "data" / "alldebrid.json"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _load_cached_domains(category: str) -> List[str]:
|
|
|
|
|
"""Load cached domain list from API/data/alldebrid.json.
|
|
|
|
|
|
|
|
|
|
category: "hosts" | "streams" | "redirectors"
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
wanted = str(category or "").strip().lower()
|
|
|
|
|
if wanted not in {"hosts", "streams", "redirectors"}:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
path = _hosts_cache_path()
|
|
|
|
|
try:
|
|
|
|
|
if not path.exists() or not path.is_file():
|
|
|
|
|
return []
|
|
|
|
|
payload = json.loads(path.read_text(encoding="utf-8"))
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
if not isinstance(payload, dict):
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
data = payload.get("data")
|
|
|
|
|
if not isinstance(data, dict):
|
|
|
|
|
# Back-compat for older cache shapes.
|
|
|
|
|
data = payload
|
|
|
|
|
if not isinstance(data, dict):
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
raw_list = data.get(wanted)
|
|
|
|
|
if not isinstance(raw_list, list):
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
out: List[str] = []
|
|
|
|
|
seen: set[str] = set()
|
|
|
|
|
for d in raw_list:
|
|
|
|
|
try:
|
|
|
|
|
dom = str(d or "").strip().lower()
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
if not dom:
|
|
|
|
|
continue
|
|
|
|
|
if dom.startswith("http://") or dom.startswith("https://"):
|
|
|
|
|
# Accidentally stored as a URL; normalize to hostname.
|
|
|
|
|
try:
|
|
|
|
|
p = urlparse(dom)
|
|
|
|
|
dom = str(p.hostname or "").strip().lower()
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
if dom.startswith("www."):
|
|
|
|
|
dom = dom[4:]
|
|
|
|
|
if not dom or dom in seen:
|
|
|
|
|
continue
|
|
|
|
|
seen.add(dom)
|
|
|
|
|
out.append(dom)
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _load_cached_hoster_domains() -> List[str]:
|
|
|
|
|
# For URL routing (download-file), we intentionally use only the "hosts" list.
|
|
|
|
|
# The "streams" list is extremely broad and would steal URLs from other providers.
|
|
|
|
|
return _load_cached_domains("hosts")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _save_cached_hosts_payload(payload: Dict[str, Any]) -> None:
|
|
|
|
|
path = _hosts_cache_path()
|
|
|
|
|
try:
|
|
|
|
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
except Exception:
|
|
|
|
|
return
|
|
|
|
|
try:
|
|
|
|
|
path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
|
|
|
|
|
except Exception:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _cache_is_fresh() -> bool:
|
|
|
|
|
path = _hosts_cache_path()
|
|
|
|
|
try:
|
|
|
|
|
if not path.exists() or not path.is_file():
|
|
|
|
|
return False
|
|
|
|
|
mtime = float(path.stat().st_mtime)
|
|
|
|
|
return (time.time() - mtime) < _HOSTS_CACHE_TTL_SECONDS
|
|
|
|
|
except Exception:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _fetch_hosts_payload_v4_hosts() -> Optional[Dict[str, Any]]:
|
|
|
|
|
"""Fetch the public AllDebrid hosts payload.
|
|
|
|
|
|
|
|
|
|
This intentionally does NOT require an API key.
|
|
|
|
|
Endpoint referenced by user: https://api.alldebrid.com/v4/hosts
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
url = "https://api.alldebrid.com/v4/hosts"
|
|
|
|
|
try:
|
|
|
|
|
with HTTPClient(timeout=20.0) as client:
|
|
|
|
|
resp = client.get(url)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
data = resp.json()
|
|
|
|
|
return data if isinstance(data, dict) else None
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"[alldebrid] Failed to fetch hosts list: {exc}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def refresh_alldebrid_hoster_cache(*, force: bool = False) -> None:
|
|
|
|
|
"""Refresh the on-disk cache of host domains (best-effort)."""
|
|
|
|
|
if (not force) and _cache_is_fresh():
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
payload = _fetch_hosts_payload_v4_hosts()
|
|
|
|
|
if isinstance(payload, dict) and payload:
|
|
|
|
|
_save_cached_hosts_payload(payload)
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_debrid_api_key(config: Dict[str, Any]) -> Optional[str]:
|
|
|
|
|
"""Read AllDebrid API key from config.
|
|
|
|
|
|
|
|
|
|
Preferred formats:
|
|
|
|
|
- config.conf provider block:
|
|
|
|
|
[provider=alldebrid]
|
|
|
|
|
api_key=...
|
|
|
|
|
-> config["provider"]["alldebrid"]["api_key"]
|
|
|
|
|
|
|
|
|
|
- store-style debrid block:
|
|
|
|
|
config["store"]["debrid"]["all-debrid"]["api_key"]
|
|
|
|
|
|
|
|
|
|
Falls back to some legacy keys if present.
|
|
|
|
|
"""
|
|
|
|
|
# 1) provider block: [provider=alldebrid]
|
|
|
|
|
provider = config.get("provider")
|
|
|
|
|
if isinstance(provider, dict):
|
|
|
|
|
entry = provider.get("alldebrid")
|
|
|
|
|
if isinstance(entry, dict):
|
|
|
|
|
for k in ("api_key", "apikey", "API_KEY", "APIKEY"):
|
|
|
|
|
val = entry.get(k)
|
|
|
|
|
if isinstance(val, str) and val.strip():
|
|
|
|
|
return val.strip()
|
|
|
|
|
if isinstance(entry, str) and entry.strip():
|
|
|
|
|
return entry.strip()
|
|
|
|
|
|
|
|
|
|
# 2) store.debrid block (canonical for debrid store configuration)
|
|
|
|
|
try:
|
2025-12-29 18:42:02 -08:00
|
|
|
from SYS.config import get_debrid_api_key
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
key = get_debrid_api_key(config, service="All-debrid")
|
|
|
|
|
return key.strip() if key else None
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Legacy fallback (kept permissive so older configs still work)
|
|
|
|
|
for legacy_key in ("alldebrid_api_key", "AllDebrid", "all_debrid_api_key"):
|
|
|
|
|
val = config.get(legacy_key)
|
|
|
|
|
if isinstance(val, str) and val.strip():
|
|
|
|
|
return val.strip()
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-01-01 20:37:27 -08:00
|
|
|
def _consume_bencoded_value(data: bytes, pos: int) -> int:
|
|
|
|
|
if pos >= len(data):
|
|
|
|
|
raise ValueError("Unexpected end of bencode")
|
|
|
|
|
token = data[pos:pos + 1]
|
|
|
|
|
if token == b"i":
|
|
|
|
|
end = data.find(b"e", pos + 1)
|
|
|
|
|
if end == -1:
|
|
|
|
|
raise ValueError("Unterminated integer")
|
|
|
|
|
return end + 1
|
|
|
|
|
if token == b"l" or token == b"d":
|
|
|
|
|
cursor = pos + 1
|
|
|
|
|
while cursor < len(data):
|
|
|
|
|
if data[cursor:cursor + 1] == b"e":
|
|
|
|
|
return cursor + 1
|
|
|
|
|
cursor = _consume_bencoded_value(data, cursor)
|
|
|
|
|
raise ValueError("Unterminated list/dict")
|
|
|
|
|
if token and b"0" <= token <= b"9":
|
|
|
|
|
colon = data.find(b":", pos)
|
|
|
|
|
if colon == -1:
|
|
|
|
|
raise ValueError("Invalid string length")
|
|
|
|
|
length = int(data[pos:colon])
|
|
|
|
|
return colon + 1 + length
|
|
|
|
|
raise ValueError("Unknown bencode token")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _info_hash_from_torrent_bytes(data: bytes) -> Optional[str]:
|
|
|
|
|
needle = b"4:info"
|
|
|
|
|
idx = data.find(needle)
|
|
|
|
|
if idx == -1:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
start = idx + len(needle)
|
|
|
|
|
try:
|
|
|
|
|
end = _consume_bencoded_value(data, start)
|
|
|
|
|
except ValueError:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
info_bytes = data[start:end]
|
|
|
|
|
try:
|
|
|
|
|
return hashlib.sha1(info_bytes).hexdigest()
|
|
|
|
|
except Exception:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _fetch_torrent_bytes(target: str) -> Optional[bytes]:
|
|
|
|
|
path_obj = Path(str(target))
|
|
|
|
|
try:
|
|
|
|
|
if path_obj.exists() and path_obj.is_file():
|
|
|
|
|
return path_obj.read_bytes()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
parsed = urlparse(target)
|
|
|
|
|
except Exception:
|
|
|
|
|
parsed = None
|
|
|
|
|
|
|
|
|
|
if parsed is None or not parsed.scheme or parsed.scheme.lower() not in {"http", "https"}:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if not target.lower().endswith(".torrent"):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
with HTTPClient(timeout=30.0) as client:
|
|
|
|
|
response = client.get(target)
|
|
|
|
|
return response.content
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to download .torrent from {target}: {exc}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def resolve_magnet_spec(target: str) -> Optional[str]:
|
|
|
|
|
"""Resolve a magnet/hash/torrent URL into a magnet/hash string."""
|
|
|
|
|
candidate = str(target or "").strip()
|
|
|
|
|
if not candidate:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
parsed = parse_magnet_or_hash(candidate)
|
|
|
|
|
if parsed:
|
|
|
|
|
return parsed
|
|
|
|
|
|
|
|
|
|
if is_torrent_file(candidate):
|
|
|
|
|
torrent_bytes = _fetch_torrent_bytes(candidate)
|
|
|
|
|
if not torrent_bytes:
|
|
|
|
|
return None
|
|
|
|
|
hash_value = _info_hash_from_torrent_bytes(torrent_bytes)
|
|
|
|
|
if hash_value:
|
|
|
|
|
return hash_value
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _dispatch_alldebrid_magnet_search(
|
|
|
|
|
magnet_id: int,
|
|
|
|
|
config: Dict[str, Any],
|
|
|
|
|
) -> None:
|
|
|
|
|
try:
|
|
|
|
|
from cmdlet.search_file import CMDLET as _SEARCH_FILE_CMDLET
|
|
|
|
|
|
|
|
|
|
exec_fn = getattr(_SEARCH_FILE_CMDLET, "exec", None)
|
|
|
|
|
if callable(exec_fn):
|
|
|
|
|
exec_fn(
|
|
|
|
|
None,
|
|
|
|
|
["-provider", "alldebrid", f"ID={magnet_id}"],
|
|
|
|
|
config,
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2026-01-05 07:51:19 -08:00
|
|
|
debug(f"[alldebrid] Sent magnet {magnet_id} to AllDebrid for download")
|
2026-01-01 20:37:27 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def prepare_magnet(
|
|
|
|
|
magnet_spec: str,
|
|
|
|
|
config: Dict[str, Any],
|
|
|
|
|
) -> tuple[Optional[AllDebridClient], Optional[int]]:
|
|
|
|
|
api_key = _get_debrid_api_key(config or {})
|
|
|
|
|
if not api_key:
|
|
|
|
|
try:
|
2026-01-04 02:23:50 -08:00
|
|
|
from SYS.rich_display import show_provider_config_panel
|
2026-01-01 20:37:27 -08:00
|
|
|
|
|
|
|
|
show_provider_config_panel("alldebrid", ["api_key"])
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
log("AllDebrid API key not configured (provider.alldebrid.api_key)", file=sys.stderr)
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to initialize AllDebrid client: {exc}", file=sys.stderr)
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
magnet_info = client.magnet_add(magnet_spec)
|
2026-01-04 02:23:50 -08:00
|
|
|
magnet_id_val = magnet_info.get("id") or 0
|
|
|
|
|
magnet_id = int(magnet_id_val)
|
2026-01-01 20:37:27 -08:00
|
|
|
if magnet_id <= 0:
|
|
|
|
|
log(f"AllDebrid magnet submission failed: {magnet_info}", file=sys.stderr)
|
|
|
|
|
return None, None
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to submit magnet to AllDebrid: {exc}", file=sys.stderr)
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
_dispatch_alldebrid_magnet_search(magnet_id, config)
|
|
|
|
|
return client, magnet_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _flatten_files_with_relpath(items: Any) -> Iterable[Dict[str, Any]]:
|
|
|
|
|
for node in AllDebrid._flatten_files(items):
|
|
|
|
|
enriched = dict(node)
|
|
|
|
|
rel = node.get("_relpath") or node.get("relpath")
|
|
|
|
|
if not rel:
|
|
|
|
|
name = node.get("n") or node.get("name")
|
|
|
|
|
rel = str(name or "").strip()
|
|
|
|
|
enriched["relpath"] = rel
|
|
|
|
|
yield enriched
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def download_magnet(
|
|
|
|
|
magnet_spec: str,
|
|
|
|
|
original_url: str,
|
|
|
|
|
final_output_dir: Path,
|
|
|
|
|
config: Dict[str, Any],
|
|
|
|
|
progress: Any,
|
|
|
|
|
quiet_mode: bool,
|
|
|
|
|
path_from_result: Callable[[Any], Path],
|
|
|
|
|
on_emit: Callable[[Path, str, str, Dict[str, Any]], None],
|
|
|
|
|
) -> tuple[int, Optional[int]]:
|
|
|
|
|
client, magnet_id = prepare_magnet(magnet_spec, config)
|
|
|
|
|
if client is None or magnet_id is None:
|
|
|
|
|
return 0, None
|
|
|
|
|
|
|
|
|
|
wait_timeout = 300
|
|
|
|
|
try:
|
|
|
|
|
streaming_config = config.get("streaming", {}) if isinstance(config, dict) else {}
|
|
|
|
|
wait_timeout = int(streaming_config.get("wait_timeout", 300))
|
|
|
|
|
except Exception:
|
|
|
|
|
wait_timeout = 300
|
|
|
|
|
|
|
|
|
|
elapsed = 0
|
|
|
|
|
while elapsed < wait_timeout:
|
|
|
|
|
try:
|
|
|
|
|
status = client.magnet_status(magnet_id)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to read magnet status {magnet_id}: {exc}", file=sys.stderr)
|
|
|
|
|
return 0, magnet_id
|
|
|
|
|
ready = bool(status.get("ready")) or status.get("statusCode") == 4
|
|
|
|
|
if ready:
|
|
|
|
|
break
|
|
|
|
|
time.sleep(5)
|
|
|
|
|
elapsed += 5
|
|
|
|
|
else:
|
|
|
|
|
log(f"AllDebrid magnet {magnet_id} timed out after {wait_timeout}s", file=sys.stderr)
|
|
|
|
|
return 0, magnet_id
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
files_result = client.magnet_links([magnet_id])
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to list AllDebrid magnet files: {exc}", file=sys.stderr)
|
|
|
|
|
return 0, magnet_id
|
|
|
|
|
|
|
|
|
|
magnet_files = files_result.get(str(magnet_id), {}) if isinstance(files_result, dict) else {}
|
|
|
|
|
file_nodes = magnet_files.get("files") if isinstance(magnet_files, dict) else []
|
|
|
|
|
if not file_nodes:
|
|
|
|
|
log(f"AllDebrid magnet {magnet_id} produced no files", file=sys.stderr)
|
|
|
|
|
return 0, magnet_id
|
|
|
|
|
|
|
|
|
|
downloaded = 0
|
|
|
|
|
for node in _flatten_files_with_relpath(file_nodes):
|
|
|
|
|
file_url = str(node.get("link") or "").strip()
|
|
|
|
|
file_name = str(node.get("name") or "").strip()
|
|
|
|
|
relpath = str(node.get("relpath") or file_name).strip()
|
|
|
|
|
if not file_url or not relpath:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
target_path = final_output_dir
|
|
|
|
|
rel_path_obj = Path(relpath)
|
|
|
|
|
output_dir = target_path
|
|
|
|
|
if rel_path_obj.parent:
|
|
|
|
|
output_dir = target_path / rel_path_obj.parent
|
|
|
|
|
try:
|
|
|
|
|
output_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
except Exception:
|
|
|
|
|
output_dir = target_path
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
result_obj = _download_direct_file(
|
|
|
|
|
file_url,
|
|
|
|
|
output_dir,
|
|
|
|
|
quiet=quiet_mode,
|
|
|
|
|
suggested_filename=rel_path_obj.name,
|
|
|
|
|
pipeline_progress=progress,
|
|
|
|
|
)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"Failed to download AllDebrid file {file_url}: {exc}", file=sys.stderr)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
downloaded_path = path_from_result(result_obj)
|
|
|
|
|
metadata = {
|
|
|
|
|
"magnet_id": magnet_id,
|
|
|
|
|
"relpath": relpath,
|
|
|
|
|
"name": file_name,
|
|
|
|
|
}
|
|
|
|
|
on_emit(downloaded_path, file_url or original_url, relpath, metadata)
|
|
|
|
|
downloaded += 1
|
|
|
|
|
|
|
|
|
|
return downloaded, magnet_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def expand_folder_item(
|
|
|
|
|
item: Any,
|
|
|
|
|
get_search_provider: Optional[Callable[[str, Dict[str, Any]], Any]],
|
|
|
|
|
config: Dict[str, Any],
|
|
|
|
|
) -> Tuple[List[Any], Optional[str]]:
|
|
|
|
|
table = getattr(item, "table", None) if not isinstance(item, dict) else item.get("table")
|
|
|
|
|
media_kind = getattr(item, "media_kind", None) if not isinstance(item, dict) else item.get("media_kind")
|
|
|
|
|
full_metadata = getattr(item, "full_metadata", None) if not isinstance(item, dict) else item.get("full_metadata")
|
|
|
|
|
target = None
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
target = item.get("path") or item.get("url")
|
|
|
|
|
else:
|
|
|
|
|
target = getattr(item, "path", None) or getattr(item, "url", None)
|
|
|
|
|
|
|
|
|
|
if (str(table or "").lower() != "alldebrid") or (str(media_kind or "").lower() != "folder"):
|
|
|
|
|
return [], None
|
|
|
|
|
|
|
|
|
|
magnet_id = None
|
|
|
|
|
if isinstance(full_metadata, dict):
|
|
|
|
|
magnet_id = full_metadata.get("magnet_id")
|
|
|
|
|
if magnet_id is None and isinstance(target, str) and target.lower().startswith("alldebrid:magnet:"):
|
|
|
|
|
try:
|
|
|
|
|
magnet_id = int(target.split(":")[-1])
|
|
|
|
|
except Exception:
|
|
|
|
|
magnet_id = None
|
|
|
|
|
|
|
|
|
|
if magnet_id is None or get_search_provider is None:
|
|
|
|
|
return [], None
|
|
|
|
|
|
|
|
|
|
provider = get_search_provider("alldebrid", config) if get_search_provider else None
|
|
|
|
|
if provider is None:
|
|
|
|
|
return [], None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
files = provider.search("*", limit=10_000, filters={"view": "files", "magnet_id": int(magnet_id)})
|
|
|
|
|
except Exception:
|
|
|
|
|
files = []
|
|
|
|
|
|
|
|
|
|
if files and len(files) == 1 and getattr(files[0], "media_kind", "") == "folder":
|
|
|
|
|
detail = getattr(files[0], "detail", "")
|
|
|
|
|
return [], str(detail or "unknown")
|
|
|
|
|
|
|
|
|
|
expanded: List[Any] = []
|
|
|
|
|
for sr in files:
|
|
|
|
|
expanded.append(sr.to_dict() if hasattr(sr, "to_dict") else sr)
|
|
|
|
|
return expanded, None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def adjust_output_dir_for_alldebrid(
|
|
|
|
|
base_output_dir: Path,
|
|
|
|
|
full_metadata: Optional[Dict[str, Any]],
|
|
|
|
|
item: Any,
|
|
|
|
|
) -> Path:
|
2026-01-06 01:38:59 -08:00
|
|
|
from SYS.utils import sanitize_filename as _sf
|
2026-01-01 20:37:27 -08:00
|
|
|
|
|
|
|
|
output_dir = base_output_dir
|
|
|
|
|
md = full_metadata if isinstance(full_metadata, dict) else {}
|
|
|
|
|
magnet_name = md.get("magnet_name") or md.get("folder")
|
|
|
|
|
if not magnet_name:
|
|
|
|
|
try:
|
|
|
|
|
detail_val = getattr(item, "detail", None) if not isinstance(item, dict) else item.get("detail")
|
|
|
|
|
magnet_name = str(detail_val or "").strip() or None
|
|
|
|
|
except Exception:
|
|
|
|
|
magnet_name = None
|
|
|
|
|
|
|
|
|
|
magnet_dir_name = _sf(str(magnet_name)) if magnet_name else ""
|
|
|
|
|
try:
|
|
|
|
|
base_tail = str(Path(output_dir).name or "")
|
|
|
|
|
except Exception:
|
|
|
|
|
base_tail = ""
|
|
|
|
|
base_tail_norm = _sf(base_tail).lower() if base_tail.strip() else ""
|
|
|
|
|
magnet_dir_norm = magnet_dir_name.lower() if magnet_dir_name else ""
|
|
|
|
|
|
|
|
|
|
if magnet_dir_name and (not base_tail_norm or base_tail_norm != magnet_dir_norm):
|
|
|
|
|
output_dir = Path(output_dir) / magnet_dir_name
|
|
|
|
|
|
|
|
|
|
relpath = md.get("relpath") if isinstance(md, dict) else None
|
|
|
|
|
if (not relpath) and isinstance(md.get("file"), dict):
|
|
|
|
|
relpath = md["file"].get("_relpath")
|
|
|
|
|
|
|
|
|
|
if relpath:
|
|
|
|
|
parts = [p for p in str(relpath).replace("\\", "/").split("/") if p and p not in {".", ".."}]
|
|
|
|
|
if magnet_dir_name and parts:
|
|
|
|
|
try:
|
|
|
|
|
if _sf(parts[0]).lower() == magnet_dir_norm:
|
|
|
|
|
parts = parts[1:]
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
for part in parts[:-1]:
|
|
|
|
|
output_dir = Path(output_dir) / _sf(part)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
|
|
|
except Exception:
|
|
|
|
|
output_dir = base_output_dir
|
|
|
|
|
|
|
|
|
|
return output_dir
|
|
|
|
|
|
|
|
|
|
|
2025-12-19 02:29:42 -08:00
|
|
|
class AllDebrid(Provider):
|
2026-01-01 20:37:27 -08:00
|
|
|
# Magnet URIs should be routed through this provider.
|
2026-01-07 05:09:59 -08:00
|
|
|
TABLE_AUTO_STAGES = {"alldebrid": ["download-file"]}
|
2026-01-01 20:37:27 -08:00
|
|
|
URL = ("magnet:",)
|
2026-01-04 02:23:50 -08:00
|
|
|
URL_DOMAINS = ()
|
|
|
|
|
|
2026-01-07 05:09:59 -08:00
|
|
|
@staticmethod
|
|
|
|
|
def _resolve_magnet_spec_from_result(result: Any) -> Optional[str]:
|
|
|
|
|
table = getattr(result, "table", None)
|
|
|
|
|
media_kind = getattr(result, "media_kind", None)
|
|
|
|
|
tags = getattr(result, "tag", None)
|
|
|
|
|
full_metadata = getattr(result, "full_metadata", None)
|
|
|
|
|
target = getattr(result, "path", None) or getattr(result, "url", None)
|
|
|
|
|
|
|
|
|
|
if not table or str(table).strip().lower() != "alldebrid":
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
kind_val = str(media_kind or "").strip().lower()
|
|
|
|
|
is_folder = kind_val == "folder"
|
|
|
|
|
if not is_folder and isinstance(tags, (list, set)):
|
|
|
|
|
for tag in tags:
|
|
|
|
|
if str(tag or "").strip().lower() == "folder":
|
|
|
|
|
is_folder = True
|
|
|
|
|
break
|
|
|
|
|
if not is_folder:
|
|
|
|
|
return resolve_magnet_spec(str(target or "")) if isinstance(target, str) else None
|
|
|
|
|
|
|
|
|
|
metadata = full_metadata if isinstance(full_metadata, dict) else {}
|
|
|
|
|
candidates: List[str] = []
|
|
|
|
|
|
|
|
|
|
def _maybe_add(value: Any) -> None:
|
|
|
|
|
if isinstance(value, str):
|
|
|
|
|
cleaned = value.strip()
|
|
|
|
|
if cleaned:
|
|
|
|
|
candidates.append(cleaned)
|
|
|
|
|
|
|
|
|
|
magnet_block = metadata.get("magnet")
|
|
|
|
|
if isinstance(magnet_block, dict):
|
|
|
|
|
for inner in ("magnet", "magnet_link", "link", "url"):
|
|
|
|
|
_maybe_add(magnet_block.get(inner))
|
|
|
|
|
for inner in ("hash", "info_hash", "torrenthash", "magnethash"):
|
|
|
|
|
_maybe_add(magnet_block.get(inner))
|
|
|
|
|
else:
|
|
|
|
|
_maybe_add(magnet_block)
|
|
|
|
|
|
|
|
|
|
for extra in ("magnet_link", "magnet_url", "magnet_spec"):
|
|
|
|
|
_maybe_add(metadata.get(extra))
|
|
|
|
|
_maybe_add(metadata.get("hash"))
|
|
|
|
|
_maybe_add(metadata.get("info_hash"))
|
|
|
|
|
|
|
|
|
|
for candidate in candidates:
|
|
|
|
|
spec = resolve_magnet_spec(candidate)
|
|
|
|
|
if spec:
|
|
|
|
|
return spec
|
|
|
|
|
return resolve_magnet_spec(str(target)) if isinstance(target, str) else None
|
|
|
|
|
|
|
|
|
|
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]:
|
|
|
|
|
spec = resolve_magnet_spec(url)
|
|
|
|
|
if not spec:
|
|
|
|
|
return False, None
|
|
|
|
|
|
|
|
|
|
cfg = self.config if isinstance(self.config, dict) else {}
|
|
|
|
|
try:
|
|
|
|
|
prepare_magnet(spec, cfg)
|
|
|
|
|
return True, None
|
|
|
|
|
except Exception:
|
|
|
|
|
return False, None
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
@classmethod
|
|
|
|
|
def url_patterns(cls) -> Tuple[str, ...]:
|
|
|
|
|
# Combine static patterns with cached host domains.
|
|
|
|
|
patterns = list(super().url_patterns())
|
|
|
|
|
try:
|
|
|
|
|
cached = _load_cached_hoster_domains()
|
|
|
|
|
for d in cached:
|
|
|
|
|
dom = str(d or "").strip().lower()
|
|
|
|
|
if dom and dom not in patterns:
|
|
|
|
|
patterns.append(dom)
|
|
|
|
|
log(
|
|
|
|
|
f"[alldebrid] url_patterns loaded {len(cached)} cached host domains; total patterns={len(patterns)}",
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
return tuple(patterns)
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
"""Search provider for AllDebrid account content.
|
|
|
|
|
|
|
|
|
|
This provider lists and searches the files/magnets already present in the
|
|
|
|
|
user's AllDebrid account.
|
|
|
|
|
|
|
|
|
|
Query behavior:
|
|
|
|
|
- "*" / "all" / "list": list recent files from ready magnets
|
|
|
|
|
- otherwise: substring match on file name OR magnet name, or exact magnet id
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def validate(self) -> bool:
|
|
|
|
|
# Consider "available" when configured; actual API connectivity can vary.
|
2026-01-04 02:23:50 -08:00
|
|
|
ok = bool(_get_debrid_api_key(self.config or {}))
|
|
|
|
|
if ok:
|
|
|
|
|
# Best-effort: refresh cached host domains so future URL routing can
|
|
|
|
|
# route supported hosters through this provider.
|
|
|
|
|
try:
|
|
|
|
|
refresh_alldebrid_hoster_cache(force=False)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
return ok
|
2025-12-16 01:45:01 -08:00
|
|
|
|
2025-12-18 22:50:21 -08:00
|
|
|
def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]:
|
|
|
|
|
"""Download an AllDebrid SearchResult into output_dir.
|
|
|
|
|
|
|
|
|
|
AllDebrid magnet file listings often provide links that require an API
|
|
|
|
|
"unlock" step to produce a true direct-download URL. Without unlocking,
|
|
|
|
|
callers may download a small HTML/redirect page instead of file bytes.
|
|
|
|
|
|
|
|
|
|
This is used by the download-file cmdlet when a provider item is piped.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
api_key = _get_debrid_api_key(self.config or {})
|
|
|
|
|
if not api_key:
|
2026-01-04 02:23:50 -08:00
|
|
|
log("[alldebrid] download skipped: missing api_key", file=sys.stderr)
|
2025-12-18 22:50:21 -08:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
target = str(getattr(result, "path", "") or "").strip()
|
|
|
|
|
if not target.startswith(("http://", "https://")):
|
2026-01-04 02:23:50 -08:00
|
|
|
log(f"[alldebrid] download skipped: target not http(s): {target}", file=sys.stderr)
|
2025-12-18 22:50:21 -08:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
log(f"[alldebrid] download routing target={target}", file=sys.stderr)
|
2025-12-18 22:50:21 -08:00
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
# Prefer provider title as the output filename; later we may override if unlocked URL has a better basename.
|
|
|
|
|
suggested = sanitize_filename(str(getattr(result, "title", "") or "").strip())
|
2025-12-18 22:50:21 -08:00
|
|
|
suggested_name = suggested if suggested else None
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
# Quiet mode when download-file is mid-pipeline.
|
|
|
|
|
quiet = bool(self.config.get("_quiet_background_output")) if isinstance(self.config, dict) else False
|
2025-12-18 22:50:21 -08:00
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
def _html_guard(path: Path) -> bool:
|
|
|
|
|
try:
|
|
|
|
|
if path.exists():
|
|
|
|
|
size = path.stat().st_size
|
|
|
|
|
if size > 0 and size <= 250_000 and path.suffix.lower() not in (".html", ".htm"):
|
|
|
|
|
head = path.read_bytes()[:512]
|
|
|
|
|
try:
|
|
|
|
|
text = head.decode("utf-8", errors="ignore").lower()
|
|
|
|
|
except Exception:
|
|
|
|
|
text = ""
|
|
|
|
|
if "<html" in text or "<!doctype html" in text:
|
|
|
|
|
return True
|
|
|
|
|
except Exception:
|
|
|
|
|
return False
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def _download_unlocked(unlocked_url: str, *, allow_html: bool = False) -> Optional[Path]:
|
|
|
|
|
# If this is an unlocked debrid link (allow_html=True), stream it directly and skip
|
|
|
|
|
# the generic HTML guard to avoid falling back to the public hoster.
|
|
|
|
|
if allow_html:
|
|
|
|
|
try:
|
|
|
|
|
from API.HTTP import HTTPClient
|
|
|
|
|
|
|
|
|
|
fname = suggested_name or sanitize_filename(Path(urlparse(unlocked_url).path).name)
|
|
|
|
|
if not fname:
|
|
|
|
|
fname = "download"
|
|
|
|
|
if not Path(fname).suffix:
|
|
|
|
|
fname = f"{fname}.bin"
|
|
|
|
|
dest = Path(output_dir) / fname
|
|
|
|
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
with HTTPClient(timeout=30.0) as client:
|
|
|
|
|
with client._request_stream("GET", unlocked_url, follow_redirects=True) as resp:
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
with dest.open("wb") as fh:
|
|
|
|
|
for chunk in resp.iter_bytes():
|
|
|
|
|
if not chunk:
|
|
|
|
|
continue
|
|
|
|
|
fh.write(chunk)
|
|
|
|
|
return dest if dest.exists() else None
|
|
|
|
|
except Exception as exc2:
|
|
|
|
|
log(f"[alldebrid] raw stream (unlocked) failed: {exc2}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# Otherwise, use standard downloader with guardrails.
|
2025-12-28 04:13:11 -08:00
|
|
|
pipe_progress = None
|
|
|
|
|
try:
|
|
|
|
|
if isinstance(self.config, dict):
|
|
|
|
|
pipe_progress = self.config.get("_pipeline_progress")
|
|
|
|
|
except Exception:
|
|
|
|
|
pipe_progress = None
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
try:
|
|
|
|
|
dl_res = _download_direct_file(
|
|
|
|
|
unlocked_url,
|
|
|
|
|
Path(output_dir),
|
|
|
|
|
quiet=quiet,
|
|
|
|
|
suggested_filename=suggested_name,
|
|
|
|
|
pipeline_progress=pipe_progress,
|
|
|
|
|
)
|
|
|
|
|
downloaded_path = getattr(dl_res, "path", None)
|
|
|
|
|
if downloaded_path is None:
|
|
|
|
|
return None
|
|
|
|
|
downloaded_path = Path(str(downloaded_path))
|
|
|
|
|
except DownloadError as exc:
|
|
|
|
|
log(
|
|
|
|
|
f"[alldebrid] _download_direct_file rejected URL ({exc}); no further fallback", file=sys.stderr
|
|
|
|
|
)
|
2025-12-18 22:50:21 -08:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
try:
|
2026-01-04 02:23:50 -08:00
|
|
|
if _html_guard(downloaded_path):
|
|
|
|
|
log(
|
|
|
|
|
"[alldebrid] Download returned HTML page (not file bytes). Try again or check AllDebrid link status.",
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
)
|
|
|
|
|
return None
|
2025-12-18 22:50:21 -08:00
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return downloaded_path if downloaded_path.exists() else None
|
2026-01-04 02:23:50 -08:00
|
|
|
|
|
|
|
|
unlocked_url = target
|
|
|
|
|
try:
|
|
|
|
|
unlocked = client.resolve_unlock_link(target, poll=True, max_wait_seconds=45, poll_interval_seconds=5)
|
|
|
|
|
if isinstance(unlocked, str) and unlocked.strip().startswith(("http://", "https://")):
|
|
|
|
|
unlocked_url = unlocked.strip()
|
|
|
|
|
log(f"[alldebrid] unlock -> {unlocked_url}", file=sys.stderr)
|
2025-12-18 22:50:21 -08:00
|
|
|
except Exception as exc:
|
2026-01-04 02:23:50 -08:00
|
|
|
log(f"[alldebrid] Failed to unlock link: {exc}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
if unlocked_url != target:
|
|
|
|
|
# Prefer filename from unlocked URL path.
|
|
|
|
|
try:
|
|
|
|
|
unlocked_name = sanitize_filename(Path(urlparse(unlocked_url).path).name)
|
|
|
|
|
if unlocked_name:
|
|
|
|
|
suggested_name = unlocked_name
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# When using an unlocked URL different from the original hoster, stream it directly and do NOT fall back to the public URL.
|
|
|
|
|
allow_html = unlocked_url != target
|
|
|
|
|
log(
|
|
|
|
|
f"[alldebrid] downloading from {unlocked_url} (allow_html={allow_html})",
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
)
|
|
|
|
|
downloaded = _download_unlocked(unlocked_url, allow_html=allow_html)
|
|
|
|
|
if downloaded:
|
|
|
|
|
log(f"[alldebrid] downloaded -> {downloaded}", file=sys.stderr)
|
|
|
|
|
return downloaded
|
|
|
|
|
|
|
|
|
|
# If unlock failed entirely and we never changed URL, allow a single attempt on the original target.
|
|
|
|
|
if unlocked_url == target:
|
|
|
|
|
downloaded = _download_unlocked(target, allow_html=False)
|
|
|
|
|
if downloaded:
|
|
|
|
|
log(f"[alldebrid] downloaded (original target) -> {downloaded}", file=sys.stderr)
|
|
|
|
|
return downloaded
|
|
|
|
|
|
|
|
|
|
return None
|
2025-12-18 22:50:21 -08:00
|
|
|
except Exception:
|
|
|
|
|
return None
|
|
|
|
|
|
2026-01-07 05:09:59 -08:00
|
|
|
def download_items(
|
|
|
|
|
self,
|
|
|
|
|
result: SearchResult,
|
|
|
|
|
output_dir: Path,
|
|
|
|
|
*,
|
|
|
|
|
emit: Callable[[Path, str, str, Dict[str, Any]], None],
|
|
|
|
|
progress: Any,
|
|
|
|
|
quiet_mode: bool,
|
|
|
|
|
path_from_result: Callable[[Any], Path],
|
|
|
|
|
config: Optional[Dict[str, Any]] = None,
|
|
|
|
|
) -> int:
|
|
|
|
|
spec = self._resolve_magnet_spec_from_result(result)
|
|
|
|
|
if not spec:
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
cfg = config if isinstance(config, dict) else (self.config or {})
|
|
|
|
|
|
|
|
|
|
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
|
|
|
|
|
emit(path, file_url, relpath, metadata)
|
|
|
|
|
|
|
|
|
|
downloaded, _ = download_magnet(
|
|
|
|
|
spec,
|
|
|
|
|
str(getattr(result, "path", "") or ""),
|
|
|
|
|
output_dir,
|
|
|
|
|
cfg,
|
|
|
|
|
progress,
|
|
|
|
|
quiet_mode,
|
|
|
|
|
path_from_result,
|
|
|
|
|
_on_emit,
|
|
|
|
|
)
|
|
|
|
|
return downloaded
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
@staticmethod
|
2025-12-29 18:42:02 -08:00
|
|
|
def _flatten_files(items: Any,
|
|
|
|
|
*,
|
2026-01-07 05:09:59 -08:00
|
|
|
_prefix: Optional[List[str]] = None) -> Iterable[Dict[str, Any]]:
|
2025-12-28 03:51:48 -08:00
|
|
|
"""Flatten AllDebrid magnet file tree into file dicts, preserving relative paths.
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
API commonly returns:
|
|
|
|
|
- file: {n: name, s: size, l: link}
|
|
|
|
|
- folder: {n: name, e: [sub_items]}
|
|
|
|
|
|
2025-12-28 03:51:48 -08:00
|
|
|
This flattener attaches a best-effort relative path to each yielded file node
|
|
|
|
|
as `_relpath` using POSIX separators (e.g., "Season 1/E01.mkv").
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
Some call sites in this repo also expect {name, size, link}, so we accept both.
|
|
|
|
|
"""
|
2025-12-28 03:51:48 -08:00
|
|
|
prefix = list(_prefix or [])
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
if not items:
|
|
|
|
|
return
|
|
|
|
|
if isinstance(items, dict):
|
|
|
|
|
items = [items]
|
|
|
|
|
if not isinstance(items, list):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
for node in items:
|
|
|
|
|
if not isinstance(node, dict):
|
|
|
|
|
continue
|
|
|
|
|
|
2025-12-29 17:05:03 -08:00
|
|
|
children = node.get("e") or node.get("children")
|
2025-12-16 01:45:01 -08:00
|
|
|
if isinstance(children, list):
|
2025-12-29 17:05:03 -08:00
|
|
|
folder_name = node.get("n") or node.get("name")
|
2025-12-28 03:51:48 -08:00
|
|
|
next_prefix = prefix
|
|
|
|
|
if isinstance(folder_name, str) and folder_name.strip():
|
|
|
|
|
next_prefix = prefix + [folder_name.strip()]
|
|
|
|
|
yield from AllDebrid._flatten_files(children, _prefix=next_prefix)
|
2025-12-16 01:45:01 -08:00
|
|
|
continue
|
|
|
|
|
|
2025-12-29 17:05:03 -08:00
|
|
|
name = node.get("n") or node.get("name")
|
|
|
|
|
link = node.get("l") or node.get("link")
|
2026-01-07 05:09:59 -08:00
|
|
|
if isinstance(name, str) and name.strip() and isinstance(link, str) and link.strip():
|
2025-12-28 03:51:48 -08:00
|
|
|
rel_parts = prefix + [name.strip()]
|
|
|
|
|
relpath = "/".join([p for p in rel_parts if p])
|
|
|
|
|
enriched = dict(node)
|
|
|
|
|
enriched["_relpath"] = relpath
|
|
|
|
|
yield enriched
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
def search(
|
|
|
|
|
self,
|
|
|
|
|
query: str,
|
|
|
|
|
limit: int = 50,
|
2025-12-29 18:42:02 -08:00
|
|
|
filters: Optional[Dict[str,
|
|
|
|
|
Any]] = None,
|
2025-12-16 01:45:01 -08:00
|
|
|
**kwargs: Any,
|
|
|
|
|
) -> List[SearchResult]:
|
|
|
|
|
q = (query or "").strip()
|
|
|
|
|
if not q:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
api_key = _get_debrid_api_key(self.config or {})
|
|
|
|
|
if not api_key:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
view = None
|
|
|
|
|
if isinstance(filters, dict):
|
|
|
|
|
view = str(filters.get("view") or "").strip().lower() or None
|
|
|
|
|
view = view or "folders"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr)
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
q_lower = q.lower()
|
2025-12-29 18:42:02 -08:00
|
|
|
needle = "" if q_lower in {"*",
|
|
|
|
|
"all",
|
|
|
|
|
"list"} else q_lower
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
# Second-stage: list files for a specific magnet id.
|
|
|
|
|
if view == "files":
|
|
|
|
|
magnet_id_val = None
|
|
|
|
|
if isinstance(filters, dict):
|
|
|
|
|
magnet_id_val = filters.get("magnet_id")
|
|
|
|
|
if magnet_id_val is None:
|
|
|
|
|
magnet_id_val = kwargs.get("magnet_id")
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
if magnet_id_val is None:
|
|
|
|
|
return []
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
try:
|
|
|
|
|
magnet_id = int(magnet_id_val)
|
2026-01-04 02:23:50 -08:00
|
|
|
except (TypeError, ValueError):
|
2025-12-16 01:45:01 -08:00
|
|
|
return []
|
|
|
|
|
|
2025-12-29 18:42:02 -08:00
|
|
|
magnet_status: Dict[str,
|
|
|
|
|
Any] = {}
|
2025-12-16 01:45:01 -08:00
|
|
|
try:
|
|
|
|
|
magnet_status = client.magnet_status(magnet_id)
|
|
|
|
|
except Exception:
|
|
|
|
|
magnet_status = {}
|
|
|
|
|
|
2025-12-29 17:05:03 -08:00
|
|
|
magnet_name = str(
|
2025-12-29 18:42:02 -08:00
|
|
|
magnet_status.get("filename") or magnet_status.get("name")
|
|
|
|
|
or magnet_status.get("hash") or f"magnet-{magnet_id}"
|
2025-12-29 17:05:03 -08:00
|
|
|
)
|
|
|
|
|
status_code = magnet_status.get("statusCode")
|
|
|
|
|
status_text = str(magnet_status.get("status") or "").strip() or "unknown"
|
|
|
|
|
ready = status_code == 4 or bool(magnet_status.get("ready"))
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
if not ready:
|
|
|
|
|
return [
|
|
|
|
|
SearchResult(
|
|
|
|
|
table="alldebrid",
|
|
|
|
|
title=magnet_name,
|
|
|
|
|
path=f"alldebrid:magnet:{magnet_id}",
|
|
|
|
|
detail=status_text,
|
2025-12-29 18:42:02 -08:00
|
|
|
annotations=["folder",
|
|
|
|
|
"not-ready"],
|
2025-12-16 01:45:01 -08:00
|
|
|
media_kind="folder",
|
2025-12-29 18:42:02 -08:00
|
|
|
tag={"alldebrid",
|
|
|
|
|
"folder",
|
|
|
|
|
str(magnet_id),
|
|
|
|
|
"not-ready"},
|
2025-12-16 01:45:01 -08:00
|
|
|
columns=[
|
2025-12-29 18:42:02 -08:00
|
|
|
("Folder",
|
|
|
|
|
magnet_name),
|
|
|
|
|
("ID",
|
|
|
|
|
str(magnet_id)),
|
|
|
|
|
("Status",
|
|
|
|
|
status_text),
|
|
|
|
|
("Ready",
|
|
|
|
|
"no"),
|
2025-12-16 01:45:01 -08:00
|
|
|
],
|
2025-12-29 18:42:02 -08:00
|
|
|
full_metadata={
|
|
|
|
|
"magnet": magnet_status,
|
2026-01-01 20:37:27 -08:00
|
|
|
"magnet_id": magnet_id,
|
|
|
|
|
"provider": "alldebrid",
|
|
|
|
|
"provider_view": "files",
|
|
|
|
|
"magnet_name": magnet_name,
|
2025-12-29 18:42:02 -08:00
|
|
|
},
|
2025-12-16 01:45:01 -08:00
|
|
|
)
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
files_result = client.magnet_links([magnet_id])
|
2025-12-29 17:05:03 -08:00
|
|
|
magnet_files = (
|
2025-12-29 18:42:02 -08:00
|
|
|
files_result.get(str(magnet_id),
|
|
|
|
|
{}) if isinstance(files_result,
|
|
|
|
|
dict) else {}
|
2025-12-29 17:05:03 -08:00
|
|
|
)
|
2025-12-29 18:42:02 -08:00
|
|
|
file_tree = magnet_files.get("files",
|
|
|
|
|
[]) if isinstance(magnet_files,
|
|
|
|
|
dict) else []
|
2025-12-16 01:45:01 -08:00
|
|
|
except Exception as exc:
|
2025-12-29 17:05:03 -08:00
|
|
|
log(
|
|
|
|
|
f"[alldebrid] Failed to list files for magnet {magnet_id}: {exc}",
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
)
|
2025-12-16 01:45:01 -08:00
|
|
|
file_tree = []
|
|
|
|
|
|
|
|
|
|
results: List[SearchResult] = []
|
|
|
|
|
for file_node in self._flatten_files(file_tree):
|
2025-12-29 18:42:02 -08:00
|
|
|
file_name = str(file_node.get("n") or file_node.get("name")
|
|
|
|
|
or "").strip()
|
|
|
|
|
file_url = str(file_node.get("l") or file_node.get("link")
|
|
|
|
|
or "").strip()
|
2025-12-29 17:05:03 -08:00
|
|
|
relpath = str(file_node.get("_relpath") or file_name or "").strip()
|
|
|
|
|
file_size = file_node.get("s") or file_node.get("size")
|
2025-12-16 01:45:01 -08:00
|
|
|
if not file_name or not file_url:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if needle and needle not in file_name.lower():
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
size_bytes: Optional[int] = None
|
|
|
|
|
try:
|
|
|
|
|
if isinstance(file_size, (int, float)):
|
|
|
|
|
size_bytes = int(file_size)
|
|
|
|
|
elif isinstance(file_size, str) and file_size.isdigit():
|
|
|
|
|
size_bytes = int(file_size)
|
|
|
|
|
except Exception:
|
|
|
|
|
size_bytes = None
|
|
|
|
|
|
2026-01-07 05:09:59 -08:00
|
|
|
metadata = {
|
|
|
|
|
"magnet": magnet_status,
|
|
|
|
|
"magnet_id": magnet_id,
|
|
|
|
|
"magnet_name": magnet_name,
|
|
|
|
|
"relpath": relpath,
|
|
|
|
|
"file": file_node,
|
|
|
|
|
"provider": "alldebrid",
|
|
|
|
|
"provider_view": "files",
|
|
|
|
|
}
|
|
|
|
|
if file_url:
|
|
|
|
|
metadata["_selection_args"] = ["-url", file_url]
|
|
|
|
|
metadata["_selection_action"] = ["download-file", "-url", file_url]
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
results.append(
|
|
|
|
|
SearchResult(
|
|
|
|
|
table="alldebrid",
|
|
|
|
|
title=file_name,
|
|
|
|
|
path=file_url,
|
|
|
|
|
detail=magnet_name,
|
|
|
|
|
annotations=["file"],
|
|
|
|
|
media_kind="file",
|
|
|
|
|
size_bytes=size_bytes,
|
2025-12-29 18:42:02 -08:00
|
|
|
tag={"alldebrid",
|
|
|
|
|
"file",
|
|
|
|
|
str(magnet_id)},
|
2025-12-16 01:45:01 -08:00
|
|
|
columns=[
|
2025-12-29 18:42:02 -08:00
|
|
|
("File",
|
|
|
|
|
file_name),
|
|
|
|
|
("Folder",
|
|
|
|
|
magnet_name),
|
|
|
|
|
("ID",
|
|
|
|
|
str(magnet_id)),
|
2025-12-16 01:45:01 -08:00
|
|
|
],
|
2026-01-07 05:09:59 -08:00
|
|
|
full_metadata=metadata,
|
2025-12-16 01:45:01 -08:00
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
if len(results) >= max(1, limit):
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
# Default: folders view (magnets)
|
|
|
|
|
try:
|
|
|
|
|
magnets = client.magnet_list() or []
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
log(f"[alldebrid] Failed to list account magnets: {exc}", file=sys.stderr)
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
wanted_id: Optional[int] = None
|
|
|
|
|
if needle.isdigit():
|
|
|
|
|
try:
|
|
|
|
|
wanted_id = int(needle)
|
|
|
|
|
except Exception:
|
|
|
|
|
wanted_id = None
|
|
|
|
|
|
|
|
|
|
results: List[SearchResult] = []
|
|
|
|
|
for magnet in magnets:
|
|
|
|
|
if not isinstance(magnet, dict):
|
|
|
|
|
continue
|
|
|
|
|
|
2026-01-04 02:23:50 -08:00
|
|
|
magnet_id_val = magnet.get("id")
|
|
|
|
|
if magnet_id_val is None:
|
|
|
|
|
continue
|
2025-12-16 01:45:01 -08:00
|
|
|
try:
|
2026-01-04 02:23:50 -08:00
|
|
|
magnet_id = int(magnet_id_val)
|
|
|
|
|
except (TypeError, ValueError):
|
2025-12-16 01:45:01 -08:00
|
|
|
continue
|
|
|
|
|
|
2025-12-29 17:05:03 -08:00
|
|
|
magnet_name = str(
|
2025-12-29 18:42:02 -08:00
|
|
|
magnet.get("filename") or magnet.get("name") or magnet.get("hash")
|
2025-12-29 17:05:03 -08:00
|
|
|
or f"magnet-{magnet_id}"
|
|
|
|
|
)
|
2025-12-16 01:45:01 -08:00
|
|
|
magnet_name_lower = magnet_name.lower()
|
|
|
|
|
|
2025-12-29 17:05:03 -08:00
|
|
|
status_text = str(magnet.get("status") or "").strip() or "unknown"
|
|
|
|
|
status_code = magnet.get("statusCode")
|
|
|
|
|
ready = status_code == 4 or bool(magnet.get("ready"))
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
if wanted_id is not None:
|
|
|
|
|
if magnet_id != wanted_id:
|
|
|
|
|
continue
|
|
|
|
|
elif needle and (needle not in magnet_name_lower):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
size_bytes: Optional[int] = None
|
|
|
|
|
try:
|
2025-12-29 17:05:03 -08:00
|
|
|
size_val = magnet.get("size")
|
2025-12-16 01:45:01 -08:00
|
|
|
if isinstance(size_val, (int, float)):
|
|
|
|
|
size_bytes = int(size_val)
|
|
|
|
|
elif isinstance(size_val, str) and size_val.isdigit():
|
|
|
|
|
size_bytes = int(size_val)
|
|
|
|
|
except Exception:
|
|
|
|
|
size_bytes = None
|
|
|
|
|
|
|
|
|
|
results.append(
|
|
|
|
|
SearchResult(
|
|
|
|
|
table="alldebrid",
|
|
|
|
|
title=magnet_name,
|
|
|
|
|
path=f"alldebrid:magnet:{magnet_id}",
|
|
|
|
|
detail=status_text,
|
|
|
|
|
annotations=["folder"],
|
|
|
|
|
media_kind="folder",
|
|
|
|
|
size_bytes=size_bytes,
|
2025-12-29 18:42:02 -08:00
|
|
|
tag={"alldebrid",
|
|
|
|
|
"folder",
|
|
|
|
|
str(magnet_id)}
|
2025-12-29 17:05:03 -08:00
|
|
|
| ({"ready"} if ready else {"not-ready"}),
|
2025-12-16 01:45:01 -08:00
|
|
|
columns=[
|
2025-12-29 18:42:02 -08:00
|
|
|
("Folder",
|
|
|
|
|
magnet_name),
|
|
|
|
|
("ID",
|
|
|
|
|
str(magnet_id)),
|
|
|
|
|
("Status",
|
|
|
|
|
status_text),
|
|
|
|
|
("Ready",
|
|
|
|
|
"yes" if ready else "no"),
|
2025-12-16 01:45:01 -08:00
|
|
|
],
|
2025-12-29 18:42:02 -08:00
|
|
|
full_metadata={
|
|
|
|
|
"magnet": magnet,
|
2026-01-01 20:37:27 -08:00
|
|
|
"magnet_id": magnet_id,
|
|
|
|
|
"provider": "alldebrid",
|
|
|
|
|
"provider_view": "folders",
|
|
|
|
|
"magnet_name": magnet_name,
|
2025-12-29 18:42:02 -08:00
|
|
|
},
|
2025-12-16 01:45:01 -08:00
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if len(results) >= max(1, limit):
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
return results
|
2026-01-01 20:37:27 -08:00
|
|
|
|
|
|
|
|
def selector(
|
|
|
|
|
self,
|
|
|
|
|
selected_items: List[Any],
|
|
|
|
|
*,
|
|
|
|
|
ctx: Any,
|
|
|
|
|
stage_is_last: bool = True,
|
|
|
|
|
**_kwargs: Any,
|
|
|
|
|
) -> bool:
|
|
|
|
|
"""Handle AllDebrid `@N` selection by drilling into magnet files."""
|
|
|
|
|
if not stage_is_last:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def _as_payload(item: Any) -> Dict[str, Any]:
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
return dict(item)
|
|
|
|
|
try:
|
|
|
|
|
if hasattr(item, "to_dict"):
|
|
|
|
|
maybe = item.to_dict() # type: ignore[attr-defined]
|
|
|
|
|
if isinstance(maybe, dict):
|
|
|
|
|
return maybe
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
payload: Dict[str, Any] = {}
|
|
|
|
|
try:
|
|
|
|
|
payload = {
|
|
|
|
|
"title": getattr(item, "title", None),
|
|
|
|
|
"path": getattr(item, "path", None),
|
|
|
|
|
"table": getattr(item, "table", None),
|
|
|
|
|
"annotations": getattr(item, "annotations", None),
|
|
|
|
|
"media_kind": getattr(item, "media_kind", None),
|
|
|
|
|
"full_metadata": getattr(item, "full_metadata", None),
|
|
|
|
|
}
|
|
|
|
|
except Exception:
|
|
|
|
|
payload = {}
|
|
|
|
|
return payload
|
|
|
|
|
|
|
|
|
|
chosen: List[Dict[str, Any]] = []
|
|
|
|
|
for item in selected_items or []:
|
|
|
|
|
payload = _as_payload(item)
|
|
|
|
|
meta = payload.get("full_metadata") or payload.get("metadata") or {}
|
|
|
|
|
if not isinstance(meta, dict):
|
|
|
|
|
meta = {}
|
|
|
|
|
|
|
|
|
|
ann_set: set[str] = set()
|
|
|
|
|
for ann_source in (payload.get("annotations"), meta.get("annotations")):
|
|
|
|
|
if isinstance(ann_source, (list, tuple, set)):
|
|
|
|
|
for ann in ann_source:
|
|
|
|
|
ann_text = str(ann or "").strip().lower()
|
|
|
|
|
if ann_text:
|
|
|
|
|
ann_set.add(ann_text)
|
|
|
|
|
|
|
|
|
|
media_kind = str(payload.get("media_kind") or meta.get("media_kind") or "").strip().lower()
|
|
|
|
|
is_folder = (media_kind == "folder") or ("folder" in ann_set)
|
|
|
|
|
magnet_id = meta.get("magnet_id")
|
|
|
|
|
if magnet_id is None or (not is_folder):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
title = str(payload.get("title") or meta.get("magnet_name") or meta.get("name") or "").strip()
|
|
|
|
|
if not title:
|
|
|
|
|
title = f"magnet-{magnet_id}"
|
|
|
|
|
|
|
|
|
|
chosen.append({
|
|
|
|
|
"magnet_id": magnet_id,
|
|
|
|
|
"title": title,
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
if not chosen:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
target = chosen[0]
|
|
|
|
|
magnet_id = target.get("magnet_id")
|
|
|
|
|
title = target.get("title") or f"magnet-{magnet_id}"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
files = self.search("*", limit=200, filters={"view": "files", "magnet_id": magnet_id})
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
print(f"alldebrid selector failed: {exc}\n")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from SYS.result_table import ResultTable
|
|
|
|
|
from SYS.rich_display import stdout_console
|
|
|
|
|
except Exception:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
table = ResultTable(f"AllDebrid Files: {title}").set_preserve_order(True)
|
|
|
|
|
table.set_table("alldebrid")
|
|
|
|
|
try:
|
|
|
|
|
table.set_table_metadata({"provider": "alldebrid", "view": "files", "magnet_id": magnet_id})
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
table.set_source_command(
|
|
|
|
|
"search-file",
|
|
|
|
|
["-provider", "alldebrid", "-open", str(magnet_id), "-query", "*"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
results_payload: List[Dict[str, Any]] = []
|
|
|
|
|
for r in files or []:
|
|
|
|
|
table.add_result(r)
|
|
|
|
|
try:
|
|
|
|
|
results_payload.append(r.to_dict())
|
|
|
|
|
except Exception:
|
|
|
|
|
results_payload.append(
|
|
|
|
|
{
|
|
|
|
|
"table": getattr(r, "table", "alldebrid"),
|
|
|
|
|
"title": getattr(r, "title", ""),
|
|
|
|
|
"path": getattr(r, "path", ""),
|
|
|
|
|
"full_metadata": getattr(r, "full_metadata", None),
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
ctx.set_last_result_table(table, results_payload)
|
|
|
|
|
ctx.set_current_stage_table(table)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
stdout_console().print()
|
|
|
|
|
stdout_console().print(table)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return True
|