Files
Medios-Macina/Provider/alldebrid.py

1633 lines
59 KiB
Python
Raw Normal View History

2025-12-16 01:45:01 -08:00
from __future__ import annotations
2026-01-01 20:37:27 -08:00
import hashlib
2026-01-04 02:23:50 -08:00
import json
2025-12-16 01:45:01 -08:00
import sys
2026-01-01 20:37:27 -08:00
import time
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Callable, Tuple
from urllib.parse import urlparse
2025-12-16 01:45:01 -08:00
2026-01-05 07:51:19 -08:00
from API.HTTP import HTTPClient, _download_direct_file
2026-01-04 02:23:50 -08:00
from API.alldebrid import AllDebridClient, parse_magnet_or_hash, is_torrent_file
2025-12-19 02:29:42 -08:00
from ProviderCore.base import Provider, SearchResult
2026-01-09 01:22:06 -08:00
from SYS.provider_helpers import TableProviderMixin
from SYS.utils import sanitize_filename
2026-01-05 07:51:19 -08:00
from SYS.logger import log, debug
2026-01-04 02:23:50 -08:00
from SYS.models import DownloadError
_HOSTS_CACHE_TTL_SECONDS = 24 * 60 * 60
def _repo_root() -> Path:
try:
return Path(__file__).resolve().parents[1]
except Exception:
return Path(".")
def _hosts_cache_path() -> Path:
# Keep this local to the repo so it works in portable installs.
# The registry's URL routing can read this file without instantiating providers.
#
# This file is expected to be the JSON payload shape from AllDebrid:
# {"status":"success","data":{"hosts":[...],"streams":[...],"redirectors":[...]}}
return _repo_root() / "API" / "data" / "alldebrid.json"
def _load_cached_domains(category: str) -> List[str]:
"""Load cached domain list from API/data/alldebrid.json.
category: "hosts" | "streams" | "redirectors"
"""
wanted = str(category or "").strip().lower()
if wanted not in {"hosts", "streams", "redirectors"}:
return []
path = _hosts_cache_path()
try:
if not path.exists() or not path.is_file():
return []
payload = json.loads(path.read_text(encoding="utf-8"))
except Exception:
return []
if not isinstance(payload, dict):
return []
data = payload.get("data")
if not isinstance(data, dict):
# Back-compat for older cache shapes.
data = payload
if not isinstance(data, dict):
return []
raw_list = data.get(wanted)
if not isinstance(raw_list, list):
return []
out: List[str] = []
seen: set[str] = set()
for d in raw_list:
try:
dom = str(d or "").strip().lower()
except Exception:
continue
if not dom:
continue
if dom.startswith("http://") or dom.startswith("https://"):
# Accidentally stored as a URL; normalize to hostname.
try:
p = urlparse(dom)
dom = str(p.hostname or "").strip().lower()
except Exception:
continue
if dom.startswith("www."):
dom = dom[4:]
if not dom or dom in seen:
continue
seen.add(dom)
out.append(dom)
return out
def _load_cached_hoster_domains() -> List[str]:
# For URL routing (download-file), we intentionally use only the "hosts" list.
# The "streams" list is extremely broad and would steal URLs from other providers.
return _load_cached_domains("hosts")
def _save_cached_hosts_payload(payload: Dict[str, Any]) -> None:
path = _hosts_cache_path()
try:
path.parent.mkdir(parents=True, exist_ok=True)
except Exception:
return
try:
path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
except Exception:
return
def _cache_is_fresh() -> bool:
path = _hosts_cache_path()
try:
if not path.exists() or not path.is_file():
return False
mtime = float(path.stat().st_mtime)
return (time.time() - mtime) < _HOSTS_CACHE_TTL_SECONDS
except Exception:
return False
def _fetch_hosts_payload_v4_hosts() -> Optional[Dict[str, Any]]:
"""Fetch the public AllDebrid hosts payload.
This intentionally does NOT require an API key.
Endpoint referenced by user: https://api.alldebrid.com/v4/hosts
"""
url = "https://api.alldebrid.com/v4/hosts"
try:
with HTTPClient(timeout=20.0) as client:
resp = client.get(url)
resp.raise_for_status()
data = resp.json()
return data if isinstance(data, dict) else None
except Exception as exc:
log(f"[alldebrid] Failed to fetch hosts list: {exc}", file=sys.stderr)
return None
def refresh_alldebrid_hoster_cache(*, force: bool = False) -> None:
"""Refresh the on-disk cache of host domains (best-effort)."""
if (not force) and _cache_is_fresh():
return
payload = _fetch_hosts_payload_v4_hosts()
if isinstance(payload, dict) and payload:
_save_cached_hosts_payload(payload)
2025-12-16 01:45:01 -08:00
def _get_debrid_api_key(config: Dict[str, Any]) -> Optional[str]:
"""Read AllDebrid API key from config.
Preferred formats:
- config.conf provider block:
[provider=alldebrid]
api_key=...
-> config["provider"]["alldebrid"]["api_key"]
- store-style debrid block:
config["store"]["debrid"]["all-debrid"]["api_key"]
Falls back to some legacy keys if present.
"""
# 1) provider block: [provider=alldebrid]
provider = config.get("provider")
if isinstance(provider, dict):
entry = provider.get("alldebrid")
if isinstance(entry, dict):
for k in ("api_key", "apikey", "API_KEY", "APIKEY"):
val = entry.get(k)
if isinstance(val, str) and val.strip():
return val.strip()
if isinstance(entry, str) and entry.strip():
return entry.strip()
# 2) store.debrid block (canonical for debrid store configuration)
try:
from SYS.config import get_debrid_api_key
2025-12-16 01:45:01 -08:00
key = get_debrid_api_key(config, service="All-debrid")
return key.strip() if key else None
except Exception:
pass
# Legacy fallback (kept permissive so older configs still work)
for legacy_key in ("alldebrid_api_key", "AllDebrid", "all_debrid_api_key"):
val = config.get(legacy_key)
if isinstance(val, str) and val.strip():
return val.strip()
return None
2026-01-01 20:37:27 -08:00
def _consume_bencoded_value(data: bytes, pos: int) -> int:
if pos >= len(data):
raise ValueError("Unexpected end of bencode")
token = data[pos:pos + 1]
if token == b"i":
end = data.find(b"e", pos + 1)
if end == -1:
raise ValueError("Unterminated integer")
return end + 1
if token == b"l" or token == b"d":
cursor = pos + 1
while cursor < len(data):
if data[cursor:cursor + 1] == b"e":
return cursor + 1
cursor = _consume_bencoded_value(data, cursor)
raise ValueError("Unterminated list/dict")
if token and b"0" <= token <= b"9":
colon = data.find(b":", pos)
if colon == -1:
raise ValueError("Invalid string length")
length = int(data[pos:colon])
return colon + 1 + length
raise ValueError("Unknown bencode token")
def _info_hash_from_torrent_bytes(data: bytes) -> Optional[str]:
needle = b"4:info"
idx = data.find(needle)
if idx == -1:
return None
start = idx + len(needle)
try:
end = _consume_bencoded_value(data, start)
except ValueError:
return None
info_bytes = data[start:end]
try:
return hashlib.sha1(info_bytes).hexdigest()
except Exception:
return None
def _fetch_torrent_bytes(target: str) -> Optional[bytes]:
path_obj = Path(str(target))
try:
if path_obj.exists() and path_obj.is_file():
return path_obj.read_bytes()
except Exception:
pass
try:
parsed = urlparse(target)
except Exception:
parsed = None
if parsed is None or not parsed.scheme or parsed.scheme.lower() not in {"http", "https"}:
return None
if not target.lower().endswith(".torrent"):
return None
try:
with HTTPClient(timeout=30.0) as client:
response = client.get(target)
return response.content
except Exception as exc:
log(f"Failed to download .torrent from {target}: {exc}", file=sys.stderr)
return None
def resolve_magnet_spec(target: str) -> Optional[str]:
"""Resolve a magnet/hash/torrent URL into a magnet/hash string."""
candidate = str(target or "").strip()
if not candidate:
return None
parsed = parse_magnet_or_hash(candidate)
if parsed:
return parsed
if is_torrent_file(candidate):
torrent_bytes = _fetch_torrent_bytes(candidate)
if not torrent_bytes:
return None
hash_value = _info_hash_from_torrent_bytes(torrent_bytes)
if hash_value:
return hash_value
return None
def _dispatch_alldebrid_magnet_search(
magnet_id: int,
config: Dict[str, Any],
) -> None:
try:
from cmdlet.search_file import CMDLET as _SEARCH_FILE_CMDLET
exec_fn = getattr(_SEARCH_FILE_CMDLET, "exec", None)
if callable(exec_fn):
exec_fn(
None,
["-provider", "alldebrid", f"ID={magnet_id}"],
config,
)
except Exception:
pass
2026-01-05 07:51:19 -08:00
debug(f"[alldebrid] Sent magnet {magnet_id} to AllDebrid for download")
2026-01-01 20:37:27 -08:00
def prepare_magnet(
magnet_spec: str,
config: Dict[str, Any],
) -> tuple[Optional[AllDebridClient], Optional[int]]:
api_key = _get_debrid_api_key(config or {})
if not api_key:
2026-01-11 01:14:45 -08:00
log("AllDebrid API key not configured. Use .config to set it.", file=sys.stderr)
2026-01-01 20:37:27 -08:00
return None, None
try:
client = AllDebridClient(api_key)
except Exception as exc:
log(f"Failed to initialize AllDebrid client: {exc}", file=sys.stderr)
return None, None
try:
magnet_info = client.magnet_add(magnet_spec)
2026-01-04 02:23:50 -08:00
magnet_id_val = magnet_info.get("id") or 0
magnet_id = int(magnet_id_val)
2026-01-01 20:37:27 -08:00
if magnet_id <= 0:
log(f"AllDebrid magnet submission failed: {magnet_info}", file=sys.stderr)
return None, None
except Exception as exc:
log(f"Failed to submit magnet to AllDebrid: {exc}", file=sys.stderr)
return None, None
_dispatch_alldebrid_magnet_search(magnet_id, config)
return client, magnet_id
def _flatten_files_with_relpath(items: Any) -> Iterable[Dict[str, Any]]:
for node in AllDebrid._flatten_files(items):
enriched = dict(node)
rel = node.get("_relpath") or node.get("relpath")
if not rel:
name = node.get("n") or node.get("name")
rel = str(name or "").strip()
enriched["relpath"] = rel
yield enriched
def download_magnet(
magnet_spec: str,
original_url: str,
final_output_dir: Path,
config: Dict[str, Any],
progress: Any,
quiet_mode: bool,
path_from_result: Callable[[Any], Path],
on_emit: Callable[[Path, str, str, Dict[str, Any]], None],
) -> tuple[int, Optional[int]]:
client, magnet_id = prepare_magnet(magnet_spec, config)
if client is None or magnet_id is None:
return 0, None
wait_timeout = 300
try:
streaming_config = config.get("streaming", {}) if isinstance(config, dict) else {}
wait_timeout = int(streaming_config.get("wait_timeout", 300))
except Exception:
wait_timeout = 300
elapsed = 0
while elapsed < wait_timeout:
try:
status = client.magnet_status(magnet_id)
except Exception as exc:
log(f"Failed to read magnet status {magnet_id}: {exc}", file=sys.stderr)
return 0, magnet_id
ready = bool(status.get("ready")) or status.get("statusCode") == 4
if ready:
break
time.sleep(5)
elapsed += 5
else:
log(f"AllDebrid magnet {magnet_id} timed out after {wait_timeout}s", file=sys.stderr)
return 0, magnet_id
try:
files_result = client.magnet_links([magnet_id])
except Exception as exc:
log(f"Failed to list AllDebrid magnet files: {exc}", file=sys.stderr)
return 0, magnet_id
magnet_files = files_result.get(str(magnet_id), {}) if isinstance(files_result, dict) else {}
file_nodes = magnet_files.get("files") if isinstance(magnet_files, dict) else []
if not file_nodes:
log(f"AllDebrid magnet {magnet_id} produced no files", file=sys.stderr)
return 0, magnet_id
downloaded = 0
for node in _flatten_files_with_relpath(file_nodes):
file_url = str(node.get("link") or "").strip()
file_name = str(node.get("name") or "").strip()
relpath = str(node.get("relpath") or file_name).strip()
if not file_url or not relpath:
continue
target_path = final_output_dir
rel_path_obj = Path(relpath)
output_dir = target_path
if rel_path_obj.parent:
output_dir = target_path / rel_path_obj.parent
try:
output_dir.mkdir(parents=True, exist_ok=True)
except Exception:
output_dir = target_path
try:
result_obj = _download_direct_file(
file_url,
output_dir,
quiet=quiet_mode,
suggested_filename=rel_path_obj.name,
pipeline_progress=progress,
)
except Exception as exc:
log(f"Failed to download AllDebrid file {file_url}: {exc}", file=sys.stderr)
continue
downloaded_path = path_from_result(result_obj)
metadata = {
"magnet_id": magnet_id,
"relpath": relpath,
"name": file_name,
}
on_emit(downloaded_path, file_url or original_url, relpath, metadata)
downloaded += 1
return downloaded, magnet_id
def expand_folder_item(
item: Any,
get_search_provider: Optional[Callable[[str, Dict[str, Any]], Any]],
config: Dict[str, Any],
) -> Tuple[List[Any], Optional[str]]:
table = getattr(item, "table", None) if not isinstance(item, dict) else item.get("table")
media_kind = getattr(item, "media_kind", None) if not isinstance(item, dict) else item.get("media_kind")
full_metadata = getattr(item, "full_metadata", None) if not isinstance(item, dict) else item.get("full_metadata")
target = None
if isinstance(item, dict):
target = item.get("path") or item.get("url")
else:
target = getattr(item, "path", None) or getattr(item, "url", None)
if (str(table or "").lower() != "alldebrid") or (str(media_kind or "").lower() != "folder"):
return [], None
magnet_id = None
if isinstance(full_metadata, dict):
magnet_id = full_metadata.get("magnet_id")
if magnet_id is None and isinstance(target, str) and target.lower().startswith("alldebrid:magnet:"):
try:
magnet_id = int(target.split(":")[-1])
except Exception:
magnet_id = None
if magnet_id is None or get_search_provider is None:
return [], None
provider = get_search_provider("alldebrid", config) if get_search_provider else None
if provider is None:
return [], None
try:
files = provider.search("*", limit=10_000, filters={"view": "files", "magnet_id": int(magnet_id)})
except Exception:
files = []
if files and len(files) == 1 and getattr(files[0], "media_kind", "") == "folder":
detail = getattr(files[0], "detail", "")
return [], str(detail or "unknown")
expanded: List[Any] = []
for sr in files:
expanded.append(sr.to_dict() if hasattr(sr, "to_dict") else sr)
return expanded, None
def adjust_output_dir_for_alldebrid(
base_output_dir: Path,
full_metadata: Optional[Dict[str, Any]],
item: Any,
) -> Path:
from SYS.utils import sanitize_filename as _sf
2026-01-01 20:37:27 -08:00
output_dir = base_output_dir
md = full_metadata if isinstance(full_metadata, dict) else {}
magnet_name = md.get("magnet_name") or md.get("folder")
if not magnet_name:
try:
detail_val = getattr(item, "detail", None) if not isinstance(item, dict) else item.get("detail")
magnet_name = str(detail_val or "").strip() or None
except Exception:
magnet_name = None
magnet_dir_name = _sf(str(magnet_name)) if magnet_name else ""
try:
base_tail = str(Path(output_dir).name or "")
except Exception:
base_tail = ""
base_tail_norm = _sf(base_tail).lower() if base_tail.strip() else ""
magnet_dir_norm = magnet_dir_name.lower() if magnet_dir_name else ""
if magnet_dir_name and (not base_tail_norm or base_tail_norm != magnet_dir_norm):
output_dir = Path(output_dir) / magnet_dir_name
relpath = md.get("relpath") if isinstance(md, dict) else None
if (not relpath) and isinstance(md.get("file"), dict):
relpath = md["file"].get("_relpath")
if relpath:
parts = [p for p in str(relpath).replace("\\", "/").split("/") if p and p not in {".", ".."}]
if magnet_dir_name and parts:
try:
if _sf(parts[0]).lower() == magnet_dir_norm:
parts = parts[1:]
except Exception:
pass
for part in parts[:-1]:
output_dir = Path(output_dir) / _sf(part)
try:
Path(output_dir).mkdir(parents=True, exist_ok=True)
except Exception:
output_dir = base_output_dir
return output_dir
2026-01-09 01:22:06 -08:00
class AllDebrid(TableProviderMixin, Provider):
"""AllDebrid account provider with magnet folder/file browsing and downloads.
This provider uses the new table system (strict ResultTable adapter pattern) for
consistent selection and auto-stage integration across all providers. It exposes
magnets as folder rows and files as file rows, with metadata enrichment for:
- magnet_id: For routing to _download_magnet_by_id
- status/ready: For showing sync state
- _selection_args/_selection_action: For @N expansion control
- relpath: For proper file hierarchy in downloads
KEY FEATURES:
- Table system: Using ResultTable adapter for strict column/metadata handling
- Selection override: Full metadata control via _selection_args/_selection_action
- Auto-stages: download-file is auto-inserted when @N is used on magnet folders
- File unlocking: URLs with /f/ paths are automatically unlocked via API before download
- Drill-down: Selecting a folder row (@N) fetches and displays all files
SELECTION FLOW:
1. User runs: search-file -provider alldebrid "ubuntu"
2. Results show magnet folders and (optionally) files
3. User selects a row: @1
4. Selection metadata routes to download-file with -magnet-id
5. download-file calls provider.download_items() with magnet_id
6. Provider fetches files, unlocks locked URLs, and downloads
"""
2026-01-01 20:37:27 -08:00
# Magnet URIs should be routed through this provider.
2026-01-07 05:09:59 -08:00
TABLE_AUTO_STAGES = {"alldebrid": ["download-file"]}
2026-01-07 11:01:13 -08:00
AUTO_STAGE_USE_SELECTION_ARGS = True
2026-01-01 20:37:27 -08:00
URL = ("magnet:",)
2026-01-04 02:23:50 -08:00
URL_DOMAINS = ()
2026-01-11 03:24:49 -08:00
@classmethod
def config(cls) -> List[Dict[str, Any]]:
return [
{
"key": "api_key",
"label": "API Key",
"default": "",
"required": True,
"secret": True
}
]
2026-01-07 05:09:59 -08:00
@staticmethod
def _resolve_magnet_spec_from_result(result: Any) -> Optional[str]:
table = getattr(result, "table", None)
media_kind = getattr(result, "media_kind", None)
tags = getattr(result, "tag", None)
full_metadata = getattr(result, "full_metadata", None)
target = getattr(result, "path", None) or getattr(result, "url", None)
if not table or str(table).strip().lower() != "alldebrid":
return None
kind_val = str(media_kind or "").strip().lower()
is_folder = kind_val == "folder"
if not is_folder and isinstance(tags, (list, set)):
for tag in tags:
if str(tag or "").strip().lower() == "folder":
is_folder = True
break
if not is_folder:
return resolve_magnet_spec(str(target or "")) if isinstance(target, str) else None
metadata = full_metadata if isinstance(full_metadata, dict) else {}
candidates: List[str] = []
def _maybe_add(value: Any) -> None:
if isinstance(value, str):
cleaned = value.strip()
if cleaned:
candidates.append(cleaned)
magnet_block = metadata.get("magnet")
if isinstance(magnet_block, dict):
for inner in ("magnet", "magnet_link", "link", "url"):
_maybe_add(magnet_block.get(inner))
for inner in ("hash", "info_hash", "torrenthash", "magnethash"):
_maybe_add(magnet_block.get(inner))
else:
_maybe_add(magnet_block)
for extra in ("magnet_link", "magnet_url", "magnet_spec"):
_maybe_add(metadata.get(extra))
_maybe_add(metadata.get("hash"))
_maybe_add(metadata.get("info_hash"))
for candidate in candidates:
spec = resolve_magnet_spec(candidate)
if spec:
return spec
return resolve_magnet_spec(str(target)) if isinstance(target, str) else None
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]:
spec = resolve_magnet_spec(url)
if not spec:
return False, None
cfg = self.config if isinstance(self.config, dict) else {}
try:
prepare_magnet(spec, cfg)
return True, None
except Exception:
return False, None
2026-01-04 02:23:50 -08:00
@classmethod
def url_patterns(cls) -> Tuple[str, ...]:
# Combine static patterns with cached host domains.
patterns = list(super().url_patterns())
try:
cached = _load_cached_hoster_domains()
for d in cached:
dom = str(d or "").strip().lower()
if dom and dom not in patterns:
patterns.append(dom)
log(
f"[alldebrid] url_patterns loaded {len(cached)} cached host domains; total patterns={len(patterns)}",
file=sys.stderr,
)
except Exception:
pass
return tuple(patterns)
2025-12-16 01:45:01 -08:00
"""Search provider for AllDebrid account content.
This provider lists and searches the files/magnets already present in the
user's AllDebrid account.
Query behavior:
- "*" / "all" / "list": list recent files from ready magnets
- otherwise: substring match on file name OR magnet name, or exact magnet id
"""
def validate(self) -> bool:
# Consider "available" when configured; actual API connectivity can vary.
2026-01-04 02:23:50 -08:00
ok = bool(_get_debrid_api_key(self.config or {}))
if ok:
# Best-effort: refresh cached host domains so future URL routing can
# route supported hosters through this provider.
try:
refresh_alldebrid_hoster_cache(force=False)
except Exception:
pass
return ok
2025-12-16 01:45:01 -08:00
2025-12-18 22:50:21 -08:00
def download(self, result: SearchResult, output_dir: Path) -> Optional[Path]:
"""Download an AllDebrid SearchResult into output_dir.
AllDebrid magnet file listings often provide links that require an API
"unlock" step to produce a true direct-download URL. Without unlocking,
callers may download a small HTML/redirect page instead of file bytes.
This is used by the download-file cmdlet when a provider item is piped.
"""
try:
api_key = _get_debrid_api_key(self.config or {})
if not api_key:
2026-01-04 02:23:50 -08:00
log("[alldebrid] download skipped: missing api_key", file=sys.stderr)
2025-12-18 22:50:21 -08:00
return None
target = str(getattr(result, "path", "") or "").strip()
if not target.startswith(("http://", "https://")):
2026-01-04 02:23:50 -08:00
log(f"[alldebrid] download skipped: target not http(s): {target}", file=sys.stderr)
2025-12-18 22:50:21 -08:00
return None
try:
from API.alldebrid import AllDebridClient
client = AllDebridClient(api_key)
except Exception as exc:
log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr)
return None
2026-01-04 02:23:50 -08:00
log(f"[alldebrid] download routing target={target}", file=sys.stderr)
2025-12-18 22:50:21 -08:00
2026-01-04 02:23:50 -08:00
# Prefer provider title as the output filename; later we may override if unlocked URL has a better basename.
suggested = sanitize_filename(str(getattr(result, "title", "") or "").strip())
2025-12-18 22:50:21 -08:00
suggested_name = suggested if suggested else None
2026-01-04 02:23:50 -08:00
# Quiet mode when download-file is mid-pipeline.
quiet = bool(self.config.get("_quiet_background_output")) if isinstance(self.config, dict) else False
2025-12-18 22:50:21 -08:00
2026-01-04 02:23:50 -08:00
def _html_guard(path: Path) -> bool:
try:
if path.exists():
size = path.stat().st_size
if size > 0 and size <= 250_000 and path.suffix.lower() not in (".html", ".htm"):
head = path.read_bytes()[:512]
try:
text = head.decode("utf-8", errors="ignore").lower()
except Exception:
text = ""
if "<html" in text or "<!doctype html" in text:
return True
except Exception:
return False
return False
def _download_unlocked(unlocked_url: str, *, allow_html: bool = False) -> Optional[Path]:
# If this is an unlocked debrid link (allow_html=True), stream it directly and skip
# the generic HTML guard to avoid falling back to the public hoster.
if allow_html:
try:
from API.HTTP import HTTPClient
fname = suggested_name or sanitize_filename(Path(urlparse(unlocked_url).path).name)
if not fname:
fname = "download"
if not Path(fname).suffix:
fname = f"{fname}.bin"
dest = Path(output_dir) / fname
dest.parent.mkdir(parents=True, exist_ok=True)
with HTTPClient(timeout=30.0) as client:
with client._request_stream("GET", unlocked_url, follow_redirects=True) as resp:
resp.raise_for_status()
with dest.open("wb") as fh:
for chunk in resp.iter_bytes():
if not chunk:
continue
fh.write(chunk)
return dest if dest.exists() else None
except Exception as exc2:
log(f"[alldebrid] raw stream (unlocked) failed: {exc2}", file=sys.stderr)
return None
# Otherwise, use standard downloader with guardrails.
2025-12-28 04:13:11 -08:00
pipe_progress = None
try:
if isinstance(self.config, dict):
pipe_progress = self.config.get("_pipeline_progress")
except Exception:
pipe_progress = None
2026-01-04 02:23:50 -08:00
try:
dl_res = _download_direct_file(
unlocked_url,
Path(output_dir),
quiet=quiet,
suggested_filename=suggested_name,
pipeline_progress=pipe_progress,
)
downloaded_path = getattr(dl_res, "path", None)
if downloaded_path is None:
return None
downloaded_path = Path(str(downloaded_path))
except DownloadError as exc:
log(
f"[alldebrid] _download_direct_file rejected URL ({exc}); no further fallback", file=sys.stderr
)
2025-12-18 22:50:21 -08:00
return None
try:
2026-01-04 02:23:50 -08:00
if _html_guard(downloaded_path):
log(
"[alldebrid] Download returned HTML page (not file bytes). Try again or check AllDebrid link status.",
file=sys.stderr,
)
return None
2025-12-18 22:50:21 -08:00
except Exception:
pass
return downloaded_path if downloaded_path.exists() else None
2026-01-04 02:23:50 -08:00
unlocked_url = target
try:
unlocked = client.resolve_unlock_link(target, poll=True, max_wait_seconds=45, poll_interval_seconds=5)
if isinstance(unlocked, str) and unlocked.strip().startswith(("http://", "https://")):
unlocked_url = unlocked.strip()
log(f"[alldebrid] unlock -> {unlocked_url}", file=sys.stderr)
2025-12-18 22:50:21 -08:00
except Exception as exc:
2026-01-04 02:23:50 -08:00
log(f"[alldebrid] Failed to unlock link: {exc}", file=sys.stderr)
if unlocked_url != target:
# Prefer filename from unlocked URL path.
try:
unlocked_name = sanitize_filename(Path(urlparse(unlocked_url).path).name)
if unlocked_name:
suggested_name = unlocked_name
except Exception:
pass
# When using an unlocked URL different from the original hoster, stream it directly and do NOT fall back to the public URL.
allow_html = unlocked_url != target
log(
f"[alldebrid] downloading from {unlocked_url} (allow_html={allow_html})",
file=sys.stderr,
)
downloaded = _download_unlocked(unlocked_url, allow_html=allow_html)
if downloaded:
log(f"[alldebrid] downloaded -> {downloaded}", file=sys.stderr)
return downloaded
# If unlock failed entirely and we never changed URL, allow a single attempt on the original target.
if unlocked_url == target:
downloaded = _download_unlocked(target, allow_html=False)
if downloaded:
log(f"[alldebrid] downloaded (original target) -> {downloaded}", file=sys.stderr)
return downloaded
return None
2025-12-18 22:50:21 -08:00
except Exception:
return None
2026-01-07 05:09:59 -08:00
def download_items(
self,
result: SearchResult,
output_dir: Path,
*,
emit: Callable[[Path, str, str, Dict[str, Any]], None],
progress: Any,
quiet_mode: bool,
path_from_result: Callable[[Any], Path],
config: Optional[Dict[str, Any]] = None,
) -> int:
2026-01-07 11:01:13 -08:00
# Check if this is a direct magnet_id from the account (e.g., from selector)
full_metadata = getattr(result, "full_metadata", None) or {}
if isinstance(full_metadata, dict):
magnet_id_direct = full_metadata.get("magnet_id")
if magnet_id_direct is not None:
try:
magnet_id = int(magnet_id_direct)
debug(f"[download_items] Found magnet_id {magnet_id} in metadata, downloading files directly")
cfg = config if isinstance(config, dict) else (self.config or {})
count = self._download_magnet_by_id(
magnet_id,
output_dir,
cfg,
emit,
progress,
quiet_mode,
path_from_result,
)
debug(f"[download_items] _download_magnet_by_id returned {count}")
return count
except Exception as e:
debug(f"[download_items] Failed to download by magnet_id: {e}")
2026-01-07 05:09:59 -08:00
spec = self._resolve_magnet_spec_from_result(result)
if not spec:
return 0
cfg = config if isinstance(config, dict) else (self.config or {})
def _on_emit(path: Path, file_url: str, relpath: str, metadata: Dict[str, Any]) -> None:
emit(path, file_url, relpath, metadata)
downloaded, _ = download_magnet(
spec,
str(getattr(result, "path", "") or ""),
output_dir,
cfg,
progress,
quiet_mode,
path_from_result,
_on_emit,
)
return downloaded
2025-12-16 01:45:01 -08:00
@staticmethod
def _flatten_files(items: Any,
*,
2026-01-07 05:09:59 -08:00
_prefix: Optional[List[str]] = None) -> Iterable[Dict[str, Any]]:
2025-12-28 03:51:48 -08:00
"""Flatten AllDebrid magnet file tree into file dicts, preserving relative paths.
2025-12-16 01:45:01 -08:00
API commonly returns:
- file: {n: name, s: size, l: link}
- folder: {n: name, e: [sub_items]}
2025-12-28 03:51:48 -08:00
This flattener attaches a best-effort relative path to each yielded file node
as `_relpath` using POSIX separators (e.g., "Season 1/E01.mkv").
2025-12-16 01:45:01 -08:00
Some call sites in this repo also expect {name, size, link}, so we accept both.
"""
2025-12-28 03:51:48 -08:00
prefix = list(_prefix or [])
2025-12-16 01:45:01 -08:00
if not items:
return
if isinstance(items, dict):
items = [items]
if not isinstance(items, list):
return
for node in items:
if not isinstance(node, dict):
continue
2025-12-29 17:05:03 -08:00
children = node.get("e") or node.get("children")
2025-12-16 01:45:01 -08:00
if isinstance(children, list):
2025-12-29 17:05:03 -08:00
folder_name = node.get("n") or node.get("name")
2025-12-28 03:51:48 -08:00
next_prefix = prefix
if isinstance(folder_name, str) and folder_name.strip():
next_prefix = prefix + [folder_name.strip()]
yield from AllDebrid._flatten_files(children, _prefix=next_prefix)
2025-12-16 01:45:01 -08:00
continue
2025-12-29 17:05:03 -08:00
name = node.get("n") or node.get("name")
link = node.get("l") or node.get("link")
2026-01-07 05:09:59 -08:00
if isinstance(name, str) and name.strip() and isinstance(link, str) and link.strip():
2025-12-28 03:51:48 -08:00
rel_parts = prefix + [name.strip()]
relpath = "/".join([p for p in rel_parts if p])
enriched = dict(node)
enriched["_relpath"] = relpath
yield enriched
2025-12-16 01:45:01 -08:00
2026-01-07 11:01:13 -08:00
def _download_magnet_by_id(
self,
magnet_id: int,
output_dir: Path,
config: Dict[str, Any],
emit: Callable[[Path, str, str, Dict[str, Any]], None],
progress: Any,
quiet_mode: bool,
path_from_result: Callable[[Any], Path],
) -> int:
"""Download files from an existing magnet ID (already in account)."""
api_key = _get_debrid_api_key(config or {})
if not api_key:
log("AllDebrid API key not configured", file=sys.stderr)
return 0
try:
client = AllDebridClient(api_key)
except Exception as exc:
log(f"Failed to init AllDebrid client: {exc}", file=sys.stderr)
return 0
try:
files_result = client.magnet_links([magnet_id])
except Exception as exc:
log(f"Failed to list files for magnet {magnet_id}: {exc}", file=sys.stderr)
return 0
magnet_files = files_result.get(str(magnet_id), {}) if isinstance(files_result, dict) else {}
file_nodes = magnet_files.get("files") if isinstance(magnet_files, dict) else []
if not file_nodes:
log(f"AllDebrid magnet {magnet_id} has no files", file=sys.stderr)
return 0
downloaded = 0
for node in self._flatten_files(file_nodes):
locked_url = str(node.get("l") or node.get("link") or "").strip()
file_name = str(node.get("n") or node.get("name") or "").strip()
relpath = str(node.get("_relpath") or file_name or "").strip()
if not locked_url or not relpath:
continue
# Unlock the URL if it's restricted (contains /f/)
file_url = locked_url
if "/f/" in locked_url:
try:
unlocked = client.unlock_link(locked_url)
if unlocked:
file_url = unlocked
debug(f"[alldebrid] Unlocked restricted link for {file_name}")
else:
debug(f"[alldebrid] Failed to unlock {locked_url}, trying locked URL")
except Exception as exc:
debug(f"[alldebrid] unlock_link failed: {exc}, trying locked URL")
target_path = output_dir
rel_path_obj = Path(relpath)
if rel_path_obj.parent:
target_path = output_dir / rel_path_obj.parent
try:
target_path.mkdir(parents=True, exist_ok=True)
except Exception:
target_path = output_dir
try:
result_obj = _download_direct_file(
file_url,
target_path,
quiet=quiet_mode,
suggested_filename=rel_path_obj.name,
pipeline_progress=progress,
)
except Exception as exc:
debug(f"Failed to download {file_url}: {exc}")
continue
downloaded_path = path_from_result(result_obj)
metadata = {
"magnet_id": magnet_id,
"relpath": relpath,
"name": file_name,
}
emit(downloaded_path, file_url, relpath, metadata)
downloaded += 1
if downloaded == 0:
log(f"AllDebrid magnet {magnet_id} produced no downloads", file=sys.stderr)
return downloaded
2025-12-16 01:45:01 -08:00
def search(
self,
query: str,
limit: int = 50,
filters: Optional[Dict[str,
Any]] = None,
2025-12-16 01:45:01 -08:00
**kwargs: Any,
) -> List[SearchResult]:
q = (query or "").strip()
if not q:
return []
api_key = _get_debrid_api_key(self.config or {})
if not api_key:
return []
view = None
if isinstance(filters, dict):
view = str(filters.get("view") or "").strip().lower() or None
view = view or "folders"
try:
from API.alldebrid import AllDebridClient
client = AllDebridClient(api_key)
except Exception as exc:
log(f"[alldebrid] Failed to init client: {exc}", file=sys.stderr)
return []
q_lower = q.lower()
needle = "" if q_lower in {"*",
"all",
"list"} else q_lower
2025-12-16 01:45:01 -08:00
# Second-stage: list files for a specific magnet id.
if view == "files":
magnet_id_val = None
if isinstance(filters, dict):
magnet_id_val = filters.get("magnet_id")
if magnet_id_val is None:
magnet_id_val = kwargs.get("magnet_id")
2026-01-04 02:23:50 -08:00
if magnet_id_val is None:
return []
2025-12-16 01:45:01 -08:00
try:
magnet_id = int(magnet_id_val)
2026-01-04 02:23:50 -08:00
except (TypeError, ValueError):
2025-12-16 01:45:01 -08:00
return []
magnet_status: Dict[str,
Any] = {}
2025-12-16 01:45:01 -08:00
try:
magnet_status = client.magnet_status(magnet_id)
except Exception:
magnet_status = {}
2025-12-29 17:05:03 -08:00
magnet_name = str(
magnet_status.get("filename") or magnet_status.get("name")
or magnet_status.get("hash") or f"magnet-{magnet_id}"
2025-12-29 17:05:03 -08:00
)
status_code = magnet_status.get("statusCode")
status_text = str(magnet_status.get("status") or "").strip() or "unknown"
ready = status_code == 4 or bool(magnet_status.get("ready"))
2025-12-16 01:45:01 -08:00
if not ready:
return [
SearchResult(
table="alldebrid",
title=magnet_name,
path=f"alldebrid:magnet:{magnet_id}",
detail=status_text,
annotations=["folder",
"not-ready"],
2025-12-16 01:45:01 -08:00
media_kind="folder",
tag={"alldebrid",
"folder",
str(magnet_id),
"not-ready"},
2025-12-16 01:45:01 -08:00
columns=[
("Folder",
magnet_name),
("ID",
str(magnet_id)),
("Status",
status_text),
("Ready",
"no"),
2025-12-16 01:45:01 -08:00
],
full_metadata={
"magnet": magnet_status,
2026-01-01 20:37:27 -08:00
"magnet_id": magnet_id,
"provider": "alldebrid",
"provider_view": "files",
"magnet_name": magnet_name,
},
2025-12-16 01:45:01 -08:00
)
]
try:
files_result = client.magnet_links([magnet_id])
2025-12-29 17:05:03 -08:00
magnet_files = (
files_result.get(str(magnet_id),
{}) if isinstance(files_result,
dict) else {}
2025-12-29 17:05:03 -08:00
)
file_tree = magnet_files.get("files",
[]) if isinstance(magnet_files,
dict) else []
2025-12-16 01:45:01 -08:00
except Exception as exc:
2025-12-29 17:05:03 -08:00
log(
f"[alldebrid] Failed to list files for magnet {magnet_id}: {exc}",
file=sys.stderr,
)
2025-12-16 01:45:01 -08:00
file_tree = []
results: List[SearchResult] = []
for file_node in self._flatten_files(file_tree):
file_name = str(file_node.get("n") or file_node.get("name")
or "").strip()
file_url = str(file_node.get("l") or file_node.get("link")
or "").strip()
2025-12-29 17:05:03 -08:00
relpath = str(file_node.get("_relpath") or file_name or "").strip()
file_size = file_node.get("s") or file_node.get("size")
2025-12-16 01:45:01 -08:00
if not file_name or not file_url:
continue
if needle and needle not in file_name.lower():
continue
size_bytes: Optional[int] = None
try:
if isinstance(file_size, (int, float)):
size_bytes = int(file_size)
elif isinstance(file_size, str) and file_size.isdigit():
size_bytes = int(file_size)
except Exception:
size_bytes = None
2026-01-07 05:09:59 -08:00
metadata = {
"magnet": magnet_status,
"magnet_id": magnet_id,
"magnet_name": magnet_name,
"relpath": relpath,
"file": file_node,
"provider": "alldebrid",
"provider_view": "files",
2026-01-09 01:22:06 -08:00
# Selection metadata for table system
2026-01-07 11:01:13 -08:00
"_selection_args": ["-magnet-id", str(magnet_id)],
"_selection_action": ["download-file", "-provider", "alldebrid", "-magnet-id", str(magnet_id)],
2026-01-07 05:09:59 -08:00
}
2025-12-16 01:45:01 -08:00
results.append(
SearchResult(
table="alldebrid",
title=file_name,
path=file_url,
detail=magnet_name,
annotations=["file"],
media_kind="file",
size_bytes=size_bytes,
tag={"alldebrid",
"file",
str(magnet_id)},
2025-12-16 01:45:01 -08:00
columns=[
("File",
file_name),
("Folder",
magnet_name),
("ID",
str(magnet_id)),
2025-12-16 01:45:01 -08:00
],
2026-01-07 05:09:59 -08:00
full_metadata=metadata,
2025-12-16 01:45:01 -08:00
)
)
if len(results) >= max(1, limit):
break
return results
# Default: folders view (magnets)
try:
magnets = client.magnet_list() or []
except Exception as exc:
log(f"[alldebrid] Failed to list account magnets: {exc}", file=sys.stderr)
return []
wanted_id: Optional[int] = None
if needle.isdigit():
try:
wanted_id = int(needle)
except Exception:
wanted_id = None
results: List[SearchResult] = []
for magnet in magnets:
if not isinstance(magnet, dict):
continue
2026-01-04 02:23:50 -08:00
magnet_id_val = magnet.get("id")
if magnet_id_val is None:
continue
2025-12-16 01:45:01 -08:00
try:
2026-01-04 02:23:50 -08:00
magnet_id = int(magnet_id_val)
except (TypeError, ValueError):
2025-12-16 01:45:01 -08:00
continue
2025-12-29 17:05:03 -08:00
magnet_name = str(
magnet.get("filename") or magnet.get("name") or magnet.get("hash")
2025-12-29 17:05:03 -08:00
or f"magnet-{magnet_id}"
)
2025-12-16 01:45:01 -08:00
magnet_name_lower = magnet_name.lower()
2025-12-29 17:05:03 -08:00
status_text = str(magnet.get("status") or "").strip() or "unknown"
status_code = magnet.get("statusCode")
ready = status_code == 4 or bool(magnet.get("ready"))
2025-12-16 01:45:01 -08:00
if wanted_id is not None:
if magnet_id != wanted_id:
continue
elif needle and (needle not in magnet_name_lower):
continue
size_bytes: Optional[int] = None
try:
2025-12-29 17:05:03 -08:00
size_val = magnet.get("size")
2025-12-16 01:45:01 -08:00
if isinstance(size_val, (int, float)):
size_bytes = int(size_val)
elif isinstance(size_val, str) and size_val.isdigit():
size_bytes = int(size_val)
except Exception:
size_bytes = None
results.append(
SearchResult(
table="alldebrid",
title=magnet_name,
path=f"alldebrid:magnet:{magnet_id}",
detail=status_text,
annotations=["folder"],
media_kind="folder",
size_bytes=size_bytes,
tag={"alldebrid",
"folder",
str(magnet_id)}
2025-12-29 17:05:03 -08:00
| ({"ready"} if ready else {"not-ready"}),
2025-12-16 01:45:01 -08:00
columns=[
("Folder",
magnet_name),
("ID",
str(magnet_id)),
("Status",
status_text),
("Ready",
"yes" if ready else "no"),
2025-12-16 01:45:01 -08:00
],
full_metadata={
"magnet": magnet,
2026-01-01 20:37:27 -08:00
"magnet_id": magnet_id,
"provider": "alldebrid",
"provider_view": "folders",
"magnet_name": magnet_name,
2026-01-07 11:01:13 -08:00
# Selection metadata: allow @N expansion to drive downloads directly
"_selection_args": ["-magnet-id", str(magnet_id)],
"_selection_action": ["download-file", "-provider", "alldebrid", "-magnet-id", str(magnet_id)],
},
2025-12-16 01:45:01 -08:00
)
)
if len(results) >= max(1, limit):
break
return results
2026-01-01 20:37:27 -08:00
def selector(
self,
selected_items: List[Any],
*,
ctx: Any,
stage_is_last: bool = True,
**_kwargs: Any,
) -> bool:
"""Handle AllDebrid `@N` selection by drilling into magnet files."""
if not stage_is_last:
return False
def _as_payload(item: Any) -> Dict[str, Any]:
if isinstance(item, dict):
return dict(item)
try:
if hasattr(item, "to_dict"):
maybe = item.to_dict() # type: ignore[attr-defined]
if isinstance(maybe, dict):
return maybe
except Exception:
pass
payload: Dict[str, Any] = {}
try:
payload = {
"title": getattr(item, "title", None),
"path": getattr(item, "path", None),
"table": getattr(item, "table", None),
"annotations": getattr(item, "annotations", None),
"media_kind": getattr(item, "media_kind", None),
"full_metadata": getattr(item, "full_metadata", None),
}
except Exception:
payload = {}
return payload
chosen: List[Dict[str, Any]] = []
for item in selected_items or []:
payload = _as_payload(item)
meta = payload.get("full_metadata") or payload.get("metadata") or {}
if not isinstance(meta, dict):
meta = {}
ann_set: set[str] = set()
for ann_source in (payload.get("annotations"), meta.get("annotations")):
if isinstance(ann_source, (list, tuple, set)):
for ann in ann_source:
ann_text = str(ann or "").strip().lower()
if ann_text:
ann_set.add(ann_text)
media_kind = str(payload.get("media_kind") or meta.get("media_kind") or "").strip().lower()
is_folder = (media_kind == "folder") or ("folder" in ann_set)
magnet_id = meta.get("magnet_id")
if magnet_id is None or (not is_folder):
continue
title = str(payload.get("title") or meta.get("magnet_name") or meta.get("name") or "").strip()
if not title:
title = f"magnet-{magnet_id}"
chosen.append({
"magnet_id": magnet_id,
"title": title,
})
if not chosen:
return False
target = chosen[0]
magnet_id = target.get("magnet_id")
title = target.get("title") or f"magnet-{magnet_id}"
try:
files = self.search("*", limit=200, filters={"view": "files", "magnet_id": magnet_id})
except Exception as exc:
print(f"alldebrid selector failed: {exc}\n")
return True
try:
from SYS.result_table import ResultTable
from SYS.rich_display import stdout_console
except Exception:
return True
table = ResultTable(f"AllDebrid Files: {title}").set_preserve_order(True)
table.set_table("alldebrid")
try:
table.set_table_metadata({"provider": "alldebrid", "view": "files", "magnet_id": magnet_id})
except Exception:
pass
2026-01-07 11:01:13 -08:00
table.set_source_command("download-file", ["-provider", "alldebrid"])
2026-01-01 20:37:27 -08:00
results_payload: List[Dict[str, Any]] = []
for r in files or []:
table.add_result(r)
try:
results_payload.append(r.to_dict())
except Exception:
results_payload.append(
{
"table": getattr(r, "table", "alldebrid"),
"title": getattr(r, "title", ""),
"path": getattr(r, "path", ""),
"full_metadata": getattr(r, "full_metadata", None),
}
)
try:
ctx.set_last_result_table(table, results_payload)
ctx.set_current_stage_table(table)
except Exception:
pass
try:
stdout_console().print()
stdout_console().print(table)
except Exception:
pass
return True
2026-01-07 11:01:13 -08:00
try:
from SYS.result_table_adapters import register_provider
from SYS.result_table_api import ColumnSpec, ResultModel, metadata_column, title_column
def _as_payload(item: Any) -> Dict[str, Any]:
if isinstance(item, dict):
return dict(item)
try:
if hasattr(item, "to_dict"):
result = item.to_dict() # type: ignore[attr-defined]
if isinstance(result, dict):
return result
except Exception:
pass
payload: Dict[str, Any] = {}
for attr in ("title", "path", "columns", "full_metadata", "table", "source", "size_bytes", "size", "ext"):
try:
val = getattr(item, attr, None)
except Exception:
val = None
if val is not None:
payload.setdefault(attr, val)
return payload
def _coerce_size(value: Any) -> Optional[int]:
if value is None:
return None
if isinstance(value, (int, float)):
try:
return int(value)
except Exception:
return None
try:
return int(float(str(value).strip()))
except Exception:
return None
def _normalize_columns(columns: Any, metadata: Dict[str, Any]) -> None:
if not isinstance(columns, list):
return
for entry in columns:
if not isinstance(entry, (list, tuple)) or len(entry) < 2:
continue
key, value = entry[0], entry[1]
if not key:
continue
normalized = str(key).replace(" ", "_").strip().lower()
if not normalized:
continue
metadata.setdefault(normalized, value)
def _convert_to_model(item: Any) -> ResultModel:
payload = _as_payload(item)
title = str(payload.get("title") or payload.get("name") or "").strip()
if not title:
candidate = payload.get("path") or payload.get("detail") or payload.get("magnet_name")
title = str(candidate or "").strip()
if not title:
title = "alldebrid"
path_val = payload.get("path")
if path_val is not None and not isinstance(path_val, str):
try:
path_val = str(path_val)
except Exception:
path_val = None
size_bytes = _coerce_size(payload.get("size_bytes") or payload.get("size") or payload.get("file_size"))
metadata: Dict[str, Any] = {}
full_metadata = payload.get("full_metadata")
if isinstance(full_metadata, dict):
metadata.update(full_metadata)
_normalize_columns(payload.get("columns"), metadata)
table_name = str(payload.get("table") or payload.get("source") or "alldebrid").strip().lower()
if table_name:
metadata.setdefault("table", table_name)
metadata.setdefault("source", table_name)
metadata.setdefault("provider", table_name)
ext = payload.get("ext")
if not ext and isinstance(path_val, str):
try:
suffix = Path(path_val).suffix
if suffix:
ext = suffix.lstrip(".")
except Exception:
ext = None
return ResultModel(
title=title,
path=path_val,
ext=str(ext) if ext is not None else None,
size_bytes=size_bytes,
metadata=metadata,
source="alldebrid",
)
def _adapter(items: Iterable[Any]) -> Iterable[ResultModel]:
for item in items or []:
try:
model = _convert_to_model(item)
except Exception:
continue
yield model
def _has_metadata(rows: List[ResultModel], key: str) -> bool:
for row in rows:
md = row.metadata or {}
if key in md:
val = md[key]
if val is None:
continue
if isinstance(val, str) and not val.strip():
continue
return True
return False
def _columns_factory(rows: List[ResultModel]) -> List[ColumnSpec]:
2026-01-09 01:22:06 -08:00
"""Build column specifications from available metadata in rows.
This factory inspects all rows and creates ColumnSpec entries only
for metadata that is actually present in the result set. This avoids
empty columns in the display.
"""
2026-01-07 11:01:13 -08:00
cols = [title_column()]
if _has_metadata(rows, "magnet_name"):
cols.append(metadata_column("magnet_name", "Magnet"))
if _has_metadata(rows, "magnet_id"):
cols.append(metadata_column("magnet_id", "Magnet ID"))
if _has_metadata(rows, "status"):
cols.append(metadata_column("status", "Status"))
if _has_metadata(rows, "ready"):
cols.append(metadata_column("ready", "Ready"))
if _has_metadata(rows, "relpath"):
2026-01-09 01:22:06 -08:00
cols.append(metadata_column("relpath", "File Path"))
2026-01-07 11:01:13 -08:00
if _has_metadata(rows, "provider_view"):
cols.append(metadata_column("provider_view", "View"))
if _has_metadata(rows, "size"):
cols.append(metadata_column("size", "Size"))
return cols
def _selection_fn(row: ResultModel) -> List[str]:
2026-01-09 01:22:06 -08:00
"""Return selection args for @N expansion and auto-download integration.
Selection precedence:
1. Explicit _selection_action (full command args)
2. Explicit _selection_args (URL-specific args)
3. Magic routing based on provider_view (files vs folders)
4. Magnet ID routing for folder-type rows
5. Direct URL for file rows
This ensures that selector overrides all pre-codes and gives users full power.
"""
2026-01-07 11:01:13 -08:00
metadata = row.metadata or {}
2026-01-09 01:22:06 -08:00
# First try explicit action (full command)
2026-01-07 11:01:13 -08:00
action = metadata.get("_selection_action") or metadata.get("selection_action")
if isinstance(action, (list, tuple)) and action:
return [str(x) for x in action if x is not None]
2026-01-09 01:22:06 -08:00
# Next try explicit args (typically URL-based)
2026-01-07 11:01:13 -08:00
args = metadata.get("_selection_args") or metadata.get("selection_args")
if isinstance(args, (list, tuple)) and args:
return [str(x) for x in args if x is not None]
2026-01-09 01:22:06 -08:00
# Magic routing by view type
2026-01-07 11:01:13 -08:00
view = metadata.get("provider_view") or metadata.get("view") or ""
if view == "files":
2026-01-09 01:22:06 -08:00
# File rows: pass direct URL for immediate download
2026-01-07 11:01:13 -08:00
if row.path:
return ["-url", row.path]
2026-01-09 01:22:06 -08:00
# Folder rows: use magnet_id to fetch and download all files
2026-01-07 11:01:13 -08:00
magnet_id = metadata.get("magnet_id")
if magnet_id is not None:
return ["-magnet-id", str(magnet_id)]
2026-01-09 01:22:06 -08:00
# Fallback: try direct URL
2026-01-07 11:01:13 -08:00
if row.path:
return ["-url", row.path]
2026-01-09 01:22:06 -08:00
2026-01-07 11:01:13 -08:00
return ["-title", row.title or ""]
register_provider(
"alldebrid",
_adapter,
columns=_columns_factory,
selection_fn=_selection_fn,
metadata={"description": "AllDebrid account provider"},
)
except Exception:
pass