This commit is contained in:
2026-02-01 19:01:47 -08:00
parent 95748698fa
commit f0a82c2403
7 changed files with 113 additions and 50 deletions

View File

@@ -452,45 +452,44 @@ class Download_File(Cmdlet):
if provider_obj is not None:
attempted_provider_download = True
sr = SearchResult(
table=str(table),
title=str(title or "Unknown"),
path=str(target or ""),
tag=set(tags_list) if tags_list else set(),
media_kind=str(media_kind or "file"),
full_metadata=full_metadata
if isinstance(full_metadata,
dict) else {},
)
debug(
f"[download-file] Downloading provider item via {table}: {sr.title}"
)
table=str(table),
title=str(title or "Unknown"),
path=str(target or ""),
tag=set(tags_list) if tags_list else set(),
media_kind=str(media_kind or "file"),
full_metadata=full_metadata
if isinstance(full_metadata, dict) else {},
)
debug(
f"[download-file] Downloading provider item via {table}: {sr.title}"
)
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
output_dir = final_output_dir
# Generic: allow provider to strict output_dir?
# Using default output_dir for now.
downloaded_path = provider_obj.download(sr, output_dir)
provider_sr = sr
debug(f"[download-file] Provider download result: {downloaded_path}")
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
output_dir = final_output_dir
# Generic: allow provider to strict output_dir?
# Using default output_dir for now.
downloaded_path = provider_obj.download(sr, output_dir)
provider_sr = sr
debug(f"[download-file] Provider download result: {downloaded_path}")
if downloaded_path is None:
try:
downloaded_extra = self._download_provider_items(
provider=provider_obj,
provider_name=str(provider_key),
search_result=sr,
output_dir=output_dir,
progress=progress,
quiet_mode=quiet_mode,
config=config,
)
except Exception:
downloaded_extra = 0
if downloaded_path is None:
try:
downloaded_extra = self._download_provider_items(
provider=provider_obj,
provider_name=str(provider_key),
search_result=sr,
output_dir=output_dir,
progress=progress,
quiet_mode=quiet_mode,
config=config,
)
except Exception:
downloaded_extra = 0
if downloaded_extra:
downloaded_count += int(downloaded_extra)
continue
if downloaded_extra:
downloaded_count += int(downloaded_extra)
continue
# Fallback: if we have a direct HTTP URL and no provider successfully handled it
if (downloaded_path is None and not attempted_provider_download

View File

@@ -138,6 +138,12 @@ class search_file(Cmdlet):
ext = "".join(ch for ch in ext if ch.isalnum())
return ext[:5]
@staticmethod
def _normalize_lookup_target(value: Optional[str]) -> str:
"""Normalize candidate names for store/provider matching."""
raw = str(value or "").strip().lower()
return "".join(ch for ch in raw if ch.isalnum())
def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Ensure storage results have the necessary fields for result_table display."""
@@ -535,10 +541,12 @@ class search_file(Cmdlet):
configured = list_configured_backend_names(config or {})
if storage_backend:
matched = None
for p in (providers_map or {}):
if str(p).strip().lower() == str(storage_backend).strip().lower():
matched = p
break
storage_hint = self._normalize_lookup_target(storage_backend)
if storage_hint:
for p in (providers_map or {}):
if self._normalize_lookup_target(p) == storage_hint:
matched = p
break
if matched and str(storage_backend) not in configured:
log(f"Note: Treating '-store {storage_backend}' as provider search for '{matched}'", file=sys.stderr)
return self._run_provider_search(
@@ -553,10 +561,12 @@ class search_file(Cmdlet):
)
elif store_filter:
matched = None
for p in (providers_map or {}):
if str(p).strip().lower() == str(store_filter).strip().lower():
matched = p
break
store_hint = self._normalize_lookup_target(store_filter)
if store_hint:
for p in (providers_map or {}):
if self._normalize_lookup_target(p) == store_hint:
matched = p
break
if matched and str(store_filter) not in configured:
log(f"Note: Treating 'store:{store_filter}' as provider search for '{matched}'", file=sys.stderr)
return self._run_provider_search(