diff --git a/Provider/Tidal.py b/Provider/Tidal.py index 3d828c0..db0d434 100644 --- a/Provider/Tidal.py +++ b/Provider/Tidal.py @@ -89,11 +89,34 @@ class Tidal(Provider): https://tidal-api.binimum.org. """ - _stringify = staticmethod(stringify) + def _stringify = staticmethod(stringify) _extract_artists = staticmethod(extract_artists) _build_track_tags = staticmethod(build_track_tags) _coerce_duration_seconds = staticmethod(coerce_duration_seconds) + @property + def prefers_transfer_progress(self) -> bool: + return True + + def _get_view(self, query: str) -> str: + text = str(query or "").strip() + if not text: + return "track" + if re.search(r"\balbum\s*:", text, flags=re.IGNORECASE): + return "album" + if re.search(r"\bartist\s*:", text, flags=re.IGNORECASE): + return "artist" + return "track" + + def get_table_type(self, query: str, filters: Optional[Dict[str, Any]] = None) -> str: + view = self._get_view(query) + return f"tidal.{view}" + + def get_table_metadata(self, query: str, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + meta = super().get_table_metadata(query, filters) + meta["view"] = self._get_view(query) + return meta + def __init__(self, config: Optional[Dict[str, Any]] = None) -> None: super().__init__(config) self.api_urls = self._resolve_api_urls() diff --git a/Provider/alldebrid.py b/Provider/alldebrid.py index 55f41ed..ed85234 100644 --- a/Provider/alldebrid.py +++ b/Provider/alldebrid.py @@ -639,6 +639,42 @@ class AllDebrid(TableProviderMixin, Provider): URL = ("magnet:", "alldebrid:magnet:", "alldebrid:", "alldebrid🧲") URL_DOMAINS = () + def extract_query_arguments(self, query: str) -> Tuple[str, Dict[str, Any]]: + normalized = str(query or "").strip() + filters: Dict[str, Any] = {} + + # Pull out id=123 or id:123 + match = re.search(r"\bid\s*[=:]\s*(\d+)", normalized, flags=re.IGNORECASE) + if match: + filters["magnet_id"] = int(match.group(1)) + normalized = re.sub( + r"\bid\s*[=:]\s*\d+", "", normalized, flags=re.IGNORECASE + ).strip() + + if not normalized: + normalized = "*" + + return normalized, filters + + def get_table_title(self, query: str, filters: Optional[Dict[str, Any]] = None) -> str: + f = filters or {} + magnet_id = f.get("magnet_id") + if magnet_id is not None: + return f"{self.label} Files: {magnet_id}" + q = str(query or "").strip() or "*" + return f"{self.label}: {q}" + + def get_table_metadata( + self, query: str, filters: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + meta = super().get_table_metadata(query, filters) + f = filters or {} + magnet_id = f.get("magnet_id") + meta["view"] = "files" if magnet_id is not None else "folders" + if magnet_id is not None: + meta["magnet_id"] = magnet_id + return meta + @classmethod def config_schema(cls) -> List[Dict[str, Any]]: return [ diff --git a/Provider/internetarchive.py b/Provider/internetarchive.py index 18b6f7c..ae71e25 100644 --- a/Provider/internetarchive.py +++ b/Provider/internetarchive.py @@ -467,6 +467,9 @@ class InternetArchive(Provider): """ URL = ("archive.org",) + def get_table_type(self, query: str, filters: Optional[Dict[str, Any]] = None) -> str: + return "internetarchive.folder" + @classmethod def config_schema(cls) -> List[Dict[str, Any]]: return [ diff --git a/Provider/loc.py b/Provider/loc.py index 8f1582e..23ce68d 100644 --- a/Provider/loc.py +++ b/Provider/loc.py @@ -14,6 +14,10 @@ class LOC(Provider): Currently implements Chronicling America collection search via the LoC JSON API. """ + @property + def preserve_order(self) -> bool: + return True + URL_DOMAINS = ["www.loc.gov"] URL = URL_DOMAINS diff --git a/Provider/openlibrary.py b/Provider/openlibrary.py index 7397881..9fc693d 100644 --- a/Provider/openlibrary.py +++ b/Provider/openlibrary.py @@ -834,6 +834,10 @@ class OpenLibrary(Provider): except Exception: return False, "" + @property + def preserve_order(self) -> bool: + return True + def search( self, query: str, diff --git a/Provider/torrent.py b/Provider/torrent.py index 85eac73..f84d4b9 100644 --- a/Provider/torrent.py +++ b/Provider/torrent.py @@ -360,6 +360,10 @@ class ApiBayScraper(Scraper): class Torrent(Provider): TABLE_AUTO_STAGES = {"torrent": ["download-file"]} + @property + def preserve_order(self) -> bool: + return True + def __init__(self, config: Optional[Dict[str, Any]] = None) -> None: super().__init__(config) self.scrapers: List[Scraper] = [] diff --git a/Provider/vimm.py b/Provider/vimm.py index 647eb19..a3d5ad1 100644 --- a/Provider/vimm.py +++ b/Provider/vimm.py @@ -55,6 +55,9 @@ class Vimm(TableProviderMixin, Provider): URL = ("https://vimm.net/vault/",) URL_DOMAINS = ("vimm.net",) + def get_source_command(self, args_list: List[str]) -> Tuple[str, List[str]]: + return "search-file", ["-provider", self.name] + REGION_CHOICES = [ {"value": "1", "text": "Argentina"}, {"value": "2", "text": "Asia"}, diff --git a/Provider/youtube.py b/Provider/youtube.py index e946346..06b6c4a 100644 --- a/Provider/youtube.py +++ b/Provider/youtube.py @@ -34,6 +34,10 @@ class YouTube(TableProviderMixin, Provider): # If the user provides extra args on the selection stage, forward them to download-file. AUTO_STAGE_USE_SELECTION_ARGS = True + @property + def preserve_order(self) -> bool: + return True + def search( self, query: str, diff --git a/ProviderCore/base.py b/ProviderCore/base.py index 38b5180..672f479 100644 --- a/ProviderCore/base.py +++ b/ProviderCore/base.py @@ -124,6 +124,7 @@ class Provider(ABC): """ URL: Sequence[str] = () + NAME: str = "" # Optional provider-driven defaults for what to do when a user selects @N from a # provider table. The CLI uses this to auto-insert stages (e.g. download-file) @@ -149,6 +150,36 @@ class Provider(ABC): or self.__class__.__name__ ).lower() + @property + def preserve_order(self) -> bool: + """True if search result order is significant and should be preserved in displays.""" + return False + + def get_table_type(self, query: str, filters: Optional[Dict[str, Any]] = None) -> str: + """Return the table type identifier for results from this provider.""" + return self.name + + def get_table_title(self, query: str, filters: Optional[Dict[str, Any]] = None) -> str: + """Return a descriptive title for the results table.""" + q = str(query or "").strip() or "*" + return f"{self.label}: {q}" + + def get_table_metadata(self, query: str, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Return metadata for the results table.""" + return {"provider": self.name} + + def get_source_command(self, args_list: List[str]) -> Tuple[str, List[str]]: + """Return the command and arguments that produced this search result. + + Used for @N expansion to re-run the search if needed. + """ + return "search-file", list(args_list) + + @property + def prefers_transfer_progress(self) -> bool: + """True if this provider prefers explicit transfer progress tracking (begin/finish) during download.""" + return False + @classmethod def config_schema(cls) -> List[Dict[str, Any]]: """Return configuration schema for this provider. diff --git a/Store/HydrusNetwork.py b/Store/HydrusNetwork.py index 50cd0ff..56d7b0b 100644 --- a/Store/HydrusNetwork.py +++ b/Store/HydrusNetwork.py @@ -55,6 +55,14 @@ class HydrusNetwork(Store): } ] + @property + def is_remote(self) -> bool: + return True + + @property + def prefer_defer_tags(self) -> bool: + return True + def _log_prefix(self) -> str: store_name = getattr(self, "NAME", None) or "unknown" return f"[hydrusnetwork:{store_name}]" diff --git a/Store/_base.py b/Store/_base.py index c12e9d9..c68713f 100644 --- a/Store/_base.py +++ b/Store/_base.py @@ -27,6 +27,16 @@ class Store(ABC): """ return [] + @property + def is_remote(self) -> bool: + """True if the store is a remote service (e.g. Hydrus) rather than local disk.""" + return False + + @property + def prefer_defer_tags(self) -> bool: + """True if the store prefers tags to be applied after the file is added.""" + return False + @abstractmethod def add_file(self, file_path: Path, **kwargs: Any) -> str: raise NotImplementedError diff --git a/cmdlet/add_file.py b/cmdlet/add_file.py index 3a1c60b..f202b25 100644 --- a/cmdlet/add_file.py +++ b/cmdlet/add_file.py @@ -1229,7 +1229,7 @@ class Add_File(Cmdlet): hash_hint = get_field(result, "hash") or get_field(result, "file_hash") or getattr(pipe_obj, "hash", None) return candidate, hash_hint, None - downloaded_path, hash_hint, tmp_dir = Add_File._maybe_download_alldebrid_result( + downloaded_path, hash_hint, tmp_dir = Add_File._maybe_download_provider_result( result, pipe_obj, config, @@ -1257,7 +1257,7 @@ class Add_File(Cmdlet): return normalized @staticmethod - def _maybe_download_alldebrid_result( + def _maybe_download_provider_result( result: Any, pipe_obj: models.PipeObject, config: Dict[str, Any], @@ -1272,19 +1272,27 @@ class Add_File(Cmdlet): if candidate: provider_key = candidate break - if provider_key != "alldebrid": + + if not provider_key: return None, None, None - try: - from Provider.alldebrid import AllDebrid - except Exception: + provider = get_search_provider(provider_key, config) + if provider is None: return None, None, None - try: - return AllDebrid.download_for_pipe_result(result, pipe_obj, config) - except Exception as exc: - debug(f"[add-file] AllDebrid download helper failed: {exc}") - return None, None, None + # Check for specialized download helper (used by AllDebrid and potentially others) + handler = getattr(provider, "download_for_pipe_result", None) + if not callable(handler): + # Fallback: check class if it's a classmethod and instance didn't have it (unlikely but safe) + handler = getattr(type(provider), "download_for_pipe_result", None) + + if callable(handler): + try: + return handler(result, pipe_obj, config) + except Exception as exc: + debug(f"[add-file] Provider '{provider_key}' download helper failed: {exc}") + + return None, None, None @staticmethod def _download_provider_source( @@ -2098,18 +2106,11 @@ class Add_File(Cmdlet): store = store_instance if store_instance is not None else Store(config) backend = store[backend_name] - hydrus_like_backend = False - try: - hydrus_like_backend = str(type(backend).__name__ or "").lower().startswith("hydrus") - except Exception: - hydrus_like_backend = False - - is_folder_backend = False - try: - is_folder_backend = type(backend).__name__ == "Folder" - except Exception: - is_folder_backend = False + # Use backend properties to drive metadata deferral behavior. + is_remote_backend = getattr(backend, "is_remote", False) + prefer_defer_tags = getattr(backend, "prefer_defer_tags", False) + # ... # Prepare metadata from pipe_obj and sidecars tags, url, title, f_hash = Add_File._prepare_metadata( result, media_path, pipe_obj, config @@ -2203,9 +2204,9 @@ class Add_File(Cmdlet): return 1 upload_tags = tags - if hydrus_like_backend and upload_tags: + if prefer_defer_tags and upload_tags: upload_tags = [] - debug("[add-file] Deferring tag application until after Hydrus upload") + debug(f"[add-file] Deferring tag application for {backend_name} (backend preference)") debug( f"[add-file] Storing into backend '{backend_name}' path='{media_path}' title='{title}' hash='{f_hash[:12] if f_hash else 'N/A'}'" @@ -2227,24 +2228,11 @@ class Add_File(Cmdlet): ##log(f"✓ File added to '{backend_name}': {file_identifier}", file=sys.stderr) stored_path: Optional[str] = None - # IMPORTANT: avoid calling get_file() for remote backends. - # For Hydrus, get_file() returns a browser URL (often with an access key) and should - # only be invoked by explicit user commands (e.g. get-file). + # IMPORTANT: avoid calling get_file() for remote backends by default to avoid + # unintended network activity or credential exposure in result payloads. try: - if is_folder_backend: - # Avoid extra DB round-trips for Folder; we can derive the stored path. - hash_for_path: Optional[str] = None - if isinstance(file_identifier, str) and len(file_identifier) == 64: - hash_for_path = file_identifier - elif f_hash and isinstance(f_hash, str) and len(f_hash) == 64: - hash_for_path = f_hash - if hash_for_path: - suffix = media_path.suffix if media_path else "" - filename = f"{hash_for_path}{suffix}" if suffix else hash_for_path - location_path = getattr(backend, "_location", None) - if location_path: - stored_path = str(Path(location_path) / filename) - else: + if not is_remote_backend: + # For local backends, resolving the path is cheap and useful. maybe_path = backend.get_file(file_identifier) if isinstance(maybe_path, Path): stored_path = str(maybe_path) @@ -2275,7 +2263,7 @@ class Add_File(Cmdlet): # Keep hash/store for downstream commands (get-tag, get-file, etc.). resolved_hash = chosen_hash - if hydrus_like_backend and tags: + if prefer_defer_tags and tags: # Support deferring tag application for batching bulk operations if defer_tag_association and pending_tag_associations is not None: try: @@ -2287,11 +2275,11 @@ class Add_File(Cmdlet): adder = getattr(backend, "add_tag", None) if callable(adder): debug( - f"[add-file] Applying {len(tags)} tag(s) post-upload to Hydrus" + f"[add-file] Applying {len(tags)} tag(s) post-upload to {backend_name}" ) adder(resolved_hash, list(tags)) except Exception as exc: - log(f"[add-file] Hydrus post-upload tagging failed: {exc}", file=sys.stderr) + log(f"[add-file] Post-upload tagging failed for {backend_name}: {exc}", file=sys.stderr) # If we have url(s), ensure they get associated with the destination file. # This mirrors `add-url` behavior but avoids emitting extra pipeline noise. diff --git a/cmdlet/download_file.py b/cmdlet/download_file.py index 744e2de..3658ec3 100644 --- a/cmdlet/download_file.py +++ b/cmdlet/download_file.py @@ -432,13 +432,7 @@ class Download_File(Cmdlet): pass transfer_label = label - table_type = str(table or "").lower() - if table_type == "tidal" or table_type.startswith("tidal."): - try: - progress.begin_transfer(label=transfer_label, total=None) - except Exception: - pass - + # If this looks like a provider item and providers are available, prefer provider.download() downloaded_path: Optional[Path] = None attempted_provider_download = False @@ -448,9 +442,16 @@ class Download_File(Cmdlet): if provider_key and get_search_provider and SearchResult: # Reuse helper to derive the provider key from table/provider/source hints. provider_obj = get_search_provider(provider_key, config) - if provider_obj is not None: - attempted_provider_download = True - sr = SearchResult( + + if provider_obj is not None and getattr(provider_obj, "prefers_transfer_progress", False): + try: + progress.begin_transfer(label=transfer_label, total=None) + except Exception: + pass + + if provider_obj is not None: + attempted_provider_download = True + sr = SearchResult( table=str(table), title=str(title or "Unknown"), path=str(target or ""), @@ -563,8 +564,7 @@ class Download_File(Cmdlet): except Exception as e: log(f"Error downloading item: {e}", file=sys.stderr) finally: - table_type = str(table or "").lower() - if table_type == "tidal" or table_type.startswith("tidal."): + if provider_obj is not None and getattr(provider_obj, "prefers_transfer_progress", False): try: progress.finish_transfer(label=transfer_label) except Exception: diff --git a/cmdlet/search_file.py b/cmdlet/search_file.py index a6d0ee2..dad3e6f 100644 --- a/cmdlet/search_file.py +++ b/cmdlet/search_file.py @@ -30,9 +30,6 @@ from ._shared import ( ) from SYS import pipeline as ctx -STORAGE_ORIGINS = {"local", - "hydrus"} - class _WorkerLogger: def __init__(self, worker_id: str) -> None: @@ -94,7 +91,6 @@ class search_file(Cmdlet): "provider", type="string", description="External provider name (e.g., tidal, youtube, soulseek, etc)", - choices=["bandcamp", "libgen", "soulseek", "youtube", "alldebrid", "loc", "internetarchive", "tidal", "tidal"], ), CmdletArg( "open", @@ -142,22 +138,8 @@ class search_file(Cmdlet): ext = "".join(ch for ch in ext if ch.isalnum()) return ext[:5] - @staticmethod - def _get_tidal_view_from_query(query: str) -> str: - text = str(query or "").strip() - if not text: - return "track" - if re.search(r"\balbum\s*:", text, flags=re.IGNORECASE): - return "album" - if re.search(r"\bartist\s*:", text, flags=re.IGNORECASE): - return "artist" - return "track" - def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]: """Ensure storage results have the necessary fields for result_table display.""" - store_value = str(payload.get("store") or "").lower() - if store_value not in STORAGE_ORIGINS: - return payload # Ensure we have title field if "title" not in payload: @@ -265,78 +247,37 @@ class search_file(Cmdlet): provider_text = str(provider_name or "").strip() provider_lower = provider_text.lower() - id_match = re.search(r"\bid\s*[=:]\s*(\d+)", query, flags=re.IGNORECASE) - parsed_open_id = open_id - if id_match and parsed_open_id is None: - try: - parsed_open_id = int(id_match.group(1)) - except Exception: - parsed_open_id = None - query = re.sub(r"\bid\s*[=:]\s*\d+", "", query, flags=re.IGNORECASE).strip() - if not query: - query = "*" - - effective_open_id = parsed_open_id if parsed_open_id is not None else open_id - if provider_lower == "youtube": - provider_label = "Youtube" - elif provider_lower == "openlibrary": - provider_label = "OpenLibrary" - elif provider_lower == "loc": - provider_label = "LoC" - else: - provider_label = provider_text[:1].upper() + provider_text[1:] if provider_text else "Provider" + # Dynamic query/filter extraction via provider normalized_query = str(query or "").strip() provider_filters: Dict[str, Any] = {} try: normalized_query, provider_filters = provider.extract_query_arguments(query) except Exception: provider_filters = {} + normalized_query = (normalized_query or "").strip() query = normalized_query or "*" - provider_filters = dict(provider_filters or {}) + search_filters = dict(provider_filters or {}) - if provider_lower == "alldebrid" and effective_open_id is not None: - table_title = f"{provider_label} Files: {effective_open_id}".strip().rstrip(":") - else: - table_title = f"{provider_label}: {query}".strip().rstrip(":") + # Dynamic table generation via provider + table_title = provider.get_table_title(query, search_filters).strip().rstrip(":") + table_type = provider.get_table_type(query, search_filters) + table_meta = provider.get_table_metadata(query, search_filters) + preserve_order = provider.preserve_order - preserve_order = provider_lower in {"youtube", "openlibrary", "loc", "torrent"} - table_type = provider_name - table_meta: Dict[str, Any] = {"provider": provider_name} - if provider_lower == "tidal": - view = self._get_tidal_view_from_query(query) - table_meta["view"] = view - table_type = f"tidal.{view}" - elif provider_lower == "internetarchive": - # Internet Archive search results are effectively folders (items); selecting @N - # should open a list of downloadable files for the chosen item. - table_type = "internetarchive.folder" table = Table(table_title)._perseverance(preserve_order) table.set_table(table_type) - if provider_lower == "alldebrid": - table_meta["view"] = "files" if effective_open_id is not None else "folders" - if effective_open_id is not None: - table_meta["magnet_id"] = effective_open_id try: table.set_table_metadata(table_meta) except Exception: pass - if provider_lower == "vimm": - # Keep auto-staged download-file from inheriting raw query tokens; - # only propagate provider hint so @N expands to a clean downloader call. - table.set_source_command("search-file", ["-provider", provider_name]) - else: - table.set_source_command("search-file", list(args_list)) + + # Dynamic source command via provider + source_cmd, source_args = provider.get_source_command(args_list) + table.set_source_command(source_cmd, source_args) - search_filters = dict(provider_filters) debug(f"[search-file] Calling {provider_name}.search(filters={search_filters})") - if provider_lower == "alldebrid": - search_open_id = parsed_open_id if parsed_open_id is not None else open_id - view_value = "files" if search_open_id is not None else "folders" - search_filters["view"] = view_value - if search_open_id is not None: - search_filters["magnet_id"] = search_open_id results = provider.search(query, limit=limit, filters=search_filters or None) debug(f"[search-file] {provider_name} -> {len(results or [])} result(s)") diff --git a/logs/log_fallback.txt b/logs/log_fallback.txt index 490532b..959746b 100644 --- a/logs/log_fallback.txt +++ b/logs/log_fallback.txt @@ -794,3 +794,34 @@ http://10.162.158.28:45899/get_files/file?hash=5c7296f1a5544522e3d118f60080e0389 2026-02-01T07:13:37.692682Z [DEBUG] logger.debug: DEBUG: [add-file] RESOLVED source: path=C:\Users\Admin\AppData\Local\Temp\Medios-Macina\ubuntu-25.10-desktop-amd64.iso, hash=32e30d72ae4798c633323a2684d94a11582bb03a6ab38d2b0d5ae5eabc5e577b... 2026-02-01T07:13:54.207275Z [DEBUG] add_file._handle_local_export: Exporting to local path: C:\Users\Admin\Downloads 2026-02-01T07:14:10.869330Z [DEBUG] add_file._emit_pipe_object: Result (1 rows) +2026-02-01T07:24:10.136647Z [DEBUG] logger.debug: DEBUG: [table] state: restore_previous_result_table +2026-02-01T07:24:27.055053Z [DEBUG] logger.debug: DEBUG: [table] display_table: None +2026-02-01T07:24:43.959638Z [DEBUG] logger.debug: DEBUG: [table] current_stage_table: id=2533513637200 class=Table title='Alldebrid: *' table='alldebrid' rows=50 source='search-file' source_args=['-provider', 'alldebrid', '*'] no_choice=False preserve_order=False meta_keys=['provider', 'view'] +2026-02-01T07:25:00.818388Z [DEBUG] logger.debug: DEBUG: [table] last_result_table: id=2533513637200 class=Table title='Alldebrid: *' table='alldebrid' rows=50 source='search-file' source_args=['-provider', 'alldebrid', '*'] no_choice=False preserve_order=False meta_keys=['provider', 'view'] +2026-02-01T07:25:17.612177Z [DEBUG] logger.debug: DEBUG: [table] buffers: display_items=0 last_result_items=50 history=0 forward=1 last_selection=[] +2026-02-01T07:25:34.284226Z [DEBUG] logger.debug: DEBUG: [search-file] Calling alldebrid.search(filters={}) +2026-02-01T07:25:50.869811Z [DEBUG] logger.debug: DEBUG: +2026-02-01T07:26:07.410241Z [DEBUG] logger.debug: DEBUG: +2026-02-01T07:26:24.132737Z [DEBUG] logger.debug: DEBUG: [search-file] alldebrid -> 50 result(s) +2026-02-01T07:26:41.017739Z [DEBUG] search_file._run_provider_search: Error searching provider 'alldebrid': module 'SYS.pipeline' has no attribute 'set_last_result_table_preserve_history' +2026-02-01T07:26:57.935162Z [DEBUG] logger.debug: DEBUG: Traceback (most recent call last): + File "C:\Forgejo\Medios-Macina\cmdlet\search_file.py", line 418, in _run_provider_search + ctx.set_last_result_table_preserve_history(table, results_list) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +AttributeError: module 'SYS.pipeline' has no attribute 'set_last_result_table_preserve_history' + +2026-02-01T07:27:14.641323Z [DEBUG] logger.debug: DEBUG: Auto-inserting download-file after selection +2026-02-01T07:27:31.181302Z [DEBUG] logger.debug: DEBUG: Inserted auto stage before existing pipeline: ['download-file'] +2026-02-01T07:27:47.818774Z [DEBUG] logger.debug: DEBUG: Applying row action for row 8 -> ['download-file', '-provider', 'alldebrid', '-url', 'alldebrid:magnet:451673845'] +2026-02-01T07:28:04.421821Z [DEBUG] logger.debug: DEBUG: Replacing stage 0 ['download-file'] with row action ['download-file', '-provider', 'alldebrid', '-url', 'alldebrid:magnet:451673845'] +2026-02-01T07:28:21.056473Z [DEBUG] logger.debug: DEBUG: [download-file] run invoked with args: ['-provider', 'alldebrid', '-url', 'alldebrid:magnet:451673845'] +2026-02-01T07:28:37.886918Z [DEBUG] logger.debug: DEBUG: Starting download-file +2026-02-01T07:28:54.656389Z [DEBUG] alldebrid.url_patterns: [alldebrid] url_patterns loaded 0 cached host domains; total patterns=4 +2026-02-01T07:29:11.420397Z [DEBUG] alldebrid.url_patterns: [alldebrid] url_patterns loaded 0 cached host domains; total patterns=4 +2026-02-01T07:29:28.217562Z [DEBUG] alldebrid.url_patterns: [alldebrid] url_patterns loaded 0 cached host domains; total patterns=4 +2026-02-01T07:29:45.060903Z [DEBUG] logger.debug: DEBUG: Output directory: C:\Users\Admin\AppData\Local\Temp\Medios-Macina +2026-02-01T07:30:01.922023Z [DEBUG] logger.debug: DEBUG: Processing URL: alldebrid:magnet:451673845 +2026-02-01T07:30:18.681576Z [DEBUG] alldebrid.url_patterns: [alldebrid] url_patterns loaded 0 cached host domains; total patterns=4 +2026-02-01T07:30:35.323006Z [DEBUG] logger.debug: DEBUG: Provider alldebrid claimed alldebrid:magnet:451673845 +2026-02-01T07:30:51.934937Z [DEBUG] logger.debug: DEBUG: [download_items] Found magnet_id 451673845, downloading files directly +2026-02-01T07:31:08.563245Z [DEBUG] logger.debug: DEBUG: