This commit is contained in:
2026-02-01 19:01:47 -08:00
parent 95748698fa
commit f0a82c2403
7 changed files with 113 additions and 50 deletions

View File

@@ -234,7 +234,7 @@
"ddl\\.to/([0-9a-zA-Z]{12})" "ddl\\.to/([0-9a-zA-Z]{12})"
], ],
"regexp": "((ddownload\\.com/[0-9a-zA-Z]{12}))|(ddl\\.to/([0-9a-zA-Z]{12}))", "regexp": "((ddownload\\.com/[0-9a-zA-Z]{12}))|(ddl\\.to/([0-9a-zA-Z]{12}))",
"status": true "status": false
}, },
"dropapk": { "dropapk": {
"name": "dropapk", "name": "dropapk",
@@ -622,7 +622,7 @@
"(simfileshare\\.net/download/[0-9]+/)" "(simfileshare\\.net/download/[0-9]+/)"
], ],
"regexp": "(simfileshare\\.net/download/[0-9]+/)", "regexp": "(simfileshare\\.net/download/[0-9]+/)",
"status": false "status": true
}, },
"streamtape": { "streamtape": {
"name": "streamtape", "name": "streamtape",

View File

@@ -89,7 +89,7 @@ class Tidal(Provider):
https://tidal-api.binimum.org. https://tidal-api.binimum.org.
""" """
def _stringify = staticmethod(stringify) _stringify = staticmethod(stringify)
_extract_artists = staticmethod(extract_artists) _extract_artists = staticmethod(extract_artists)
_build_track_tags = staticmethod(build_track_tags) _build_track_tags = staticmethod(build_track_tags)
_coerce_duration_seconds = staticmethod(coerce_duration_seconds) _coerce_duration_seconds = staticmethod(coerce_duration_seconds)

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import hashlib import hashlib
import json import json
import re
import sys import sys
import time import time
import shutil import shutil

View File

@@ -150,6 +150,22 @@ class Provider(ABC):
or self.__class__.__name__ or self.__class__.__name__
).lower() ).lower()
@property
def label(self) -> str:
"""Friendly display name for the provider."""
if hasattr(self, "NAME") and self.NAME:
name = str(self.NAME)
if name.lower() == "loc":
return "LoC"
if name.lower() == "openlibrary":
return "OpenLibrary"
if name.lower() == "internetarchive":
return "Internet Archive"
if name.lower() == "alldebrid":
return "AllDebrid"
return name[:1].upper() + name[1:]
return self.__class__.__name__
@property @property
def preserve_order(self) -> bool: def preserve_order(self) -> bool:
"""True if search result order is significant and should be preserved in displays.""" """True if search result order is significant and should be preserved in displays."""

View File

@@ -50,6 +50,7 @@ def _resolve_root_dir() -> Path:
ROOT_DIR = _resolve_root_dir() ROOT_DIR = _resolve_root_dir()
DB_PATH = (ROOT_DIR / "medios.db").resolve() DB_PATH = (ROOT_DIR / "medios.db").resolve()
LOG_DB_PATH = (ROOT_DIR / "logs.db").resolve()
class Database: class Database:
_instance: Optional[Database] = None _instance: Optional[Database] = None
@@ -286,6 +287,38 @@ _LOG_THREAD_STARTED = False
_LOG_THREAD_LOCK = threading.Lock() _LOG_THREAD_LOCK = threading.Lock()
def _ensure_log_db_schema() -> None:
try:
conn = sqlite3.connect(
str(LOG_DB_PATH),
timeout=30.0,
check_same_thread=False,
)
try:
conn.execute("PRAGMA busy_timeout = 30000")
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA synchronous=NORMAL")
conn.execute(
"""
CREATE TABLE IF NOT EXISTS logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
level TEXT,
module TEXT,
message TEXT
)
"""
)
conn.commit()
finally:
conn.close()
except Exception:
pass
_ensure_log_db_schema()
def _log_worker_loop() -> None: def _log_worker_loop() -> None:
"""Background log writer using a temporary per-write connection with """Background log writer using a temporary per-write connection with
small retry/backoff and a file fallback when writes fail repeatedly. small retry/backoff and a file fallback when writes fail repeatedly.
@@ -297,9 +330,13 @@ def _log_worker_loop() -> None:
written = False written = False
while attempts < 3 and not written: while attempts < 3 and not written:
try: try:
# Create a short-lived connection for the logging write so the conn = sqlite3.connect(str(LOG_DB_PATH), timeout=30.0)
# logging thread does not contend with the main connection lock. try:
conn = sqlite3.connect(str(db.db_path), timeout=5.0) conn.execute("PRAGMA busy_timeout = 30000")
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA synchronous=NORMAL")
except sqlite3.Error:
pass
cur = conn.cursor() cur = conn.cursor()
cur.execute("INSERT INTO logs (level, module, message) VALUES (?, ?, ?)", (level, module, message)) cur.execute("INSERT INTO logs (level, module, message) VALUES (?, ?, ?)", (level, module, message))
conn.commit() conn.commit()

View File

@@ -452,45 +452,44 @@ class Download_File(Cmdlet):
if provider_obj is not None: if provider_obj is not None:
attempted_provider_download = True attempted_provider_download = True
sr = SearchResult( sr = SearchResult(
table=str(table), table=str(table),
title=str(title or "Unknown"), title=str(title or "Unknown"),
path=str(target or ""), path=str(target or ""),
tag=set(tags_list) if tags_list else set(), tag=set(tags_list) if tags_list else set(),
media_kind=str(media_kind or "file"), media_kind=str(media_kind or "file"),
full_metadata=full_metadata full_metadata=full_metadata
if isinstance(full_metadata, if isinstance(full_metadata, dict) else {},
dict) else {}, )
) debug(
debug( f"[download-file] Downloading provider item via {table}: {sr.title}"
f"[download-file] Downloading provider item via {table}: {sr.title}" )
)
# Preserve provider structure when possible (AllDebrid folders -> subfolders). # Preserve provider structure when possible (AllDebrid folders -> subfolders).
output_dir = final_output_dir output_dir = final_output_dir
# Generic: allow provider to strict output_dir? # Generic: allow provider to strict output_dir?
# Using default output_dir for now. # Using default output_dir for now.
downloaded_path = provider_obj.download(sr, output_dir) downloaded_path = provider_obj.download(sr, output_dir)
provider_sr = sr provider_sr = sr
debug(f"[download-file] Provider download result: {downloaded_path}") debug(f"[download-file] Provider download result: {downloaded_path}")
if downloaded_path is None: if downloaded_path is None:
try: try:
downloaded_extra = self._download_provider_items( downloaded_extra = self._download_provider_items(
provider=provider_obj, provider=provider_obj,
provider_name=str(provider_key), provider_name=str(provider_key),
search_result=sr, search_result=sr,
output_dir=output_dir, output_dir=output_dir,
progress=progress, progress=progress,
quiet_mode=quiet_mode, quiet_mode=quiet_mode,
config=config, config=config,
) )
except Exception: except Exception:
downloaded_extra = 0 downloaded_extra = 0
if downloaded_extra: if downloaded_extra:
downloaded_count += int(downloaded_extra) downloaded_count += int(downloaded_extra)
continue continue
# Fallback: if we have a direct HTTP URL and no provider successfully handled it # Fallback: if we have a direct HTTP URL and no provider successfully handled it
if (downloaded_path is None and not attempted_provider_download if (downloaded_path is None and not attempted_provider_download

View File

@@ -138,6 +138,12 @@ class search_file(Cmdlet):
ext = "".join(ch for ch in ext if ch.isalnum()) ext = "".join(ch for ch in ext if ch.isalnum())
return ext[:5] return ext[:5]
@staticmethod
def _normalize_lookup_target(value: Optional[str]) -> str:
"""Normalize candidate names for store/provider matching."""
raw = str(value or "").strip().lower()
return "".join(ch for ch in raw if ch.isalnum())
def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]: def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Ensure storage results have the necessary fields for result_table display.""" """Ensure storage results have the necessary fields for result_table display."""
@@ -535,10 +541,12 @@ class search_file(Cmdlet):
configured = list_configured_backend_names(config or {}) configured = list_configured_backend_names(config or {})
if storage_backend: if storage_backend:
matched = None matched = None
for p in (providers_map or {}): storage_hint = self._normalize_lookup_target(storage_backend)
if str(p).strip().lower() == str(storage_backend).strip().lower(): if storage_hint:
matched = p for p in (providers_map or {}):
break if self._normalize_lookup_target(p) == storage_hint:
matched = p
break
if matched and str(storage_backend) not in configured: if matched and str(storage_backend) not in configured:
log(f"Note: Treating '-store {storage_backend}' as provider search for '{matched}'", file=sys.stderr) log(f"Note: Treating '-store {storage_backend}' as provider search for '{matched}'", file=sys.stderr)
return self._run_provider_search( return self._run_provider_search(
@@ -553,10 +561,12 @@ class search_file(Cmdlet):
) )
elif store_filter: elif store_filter:
matched = None matched = None
for p in (providers_map or {}): store_hint = self._normalize_lookup_target(store_filter)
if str(p).strip().lower() == str(store_filter).strip().lower(): if store_hint:
matched = p for p in (providers_map or {}):
break if self._normalize_lookup_target(p) == store_hint:
matched = p
break
if matched and str(store_filter) not in configured: if matched and str(store_filter) not in configured:
log(f"Note: Treating 'store:{store_filter}' as provider search for '{matched}'", file=sys.stderr) log(f"Note: Treating 'store:{store_filter}' as provider search for '{matched}'", file=sys.stderr)
return self._run_provider_search( return self._run_provider_search(