h
This commit is contained in:
@@ -234,7 +234,7 @@
|
||||
"ddl\\.to/([0-9a-zA-Z]{12})"
|
||||
],
|
||||
"regexp": "((ddownload\\.com/[0-9a-zA-Z]{12}))|(ddl\\.to/([0-9a-zA-Z]{12}))",
|
||||
"status": true
|
||||
"status": false
|
||||
},
|
||||
"dropapk": {
|
||||
"name": "dropapk",
|
||||
@@ -622,7 +622,7 @@
|
||||
"(simfileshare\\.net/download/[0-9]+/)"
|
||||
],
|
||||
"regexp": "(simfileshare\\.net/download/[0-9]+/)",
|
||||
"status": false
|
||||
"status": true
|
||||
},
|
||||
"streamtape": {
|
||||
"name": "streamtape",
|
||||
|
||||
@@ -89,7 +89,7 @@ class Tidal(Provider):
|
||||
https://tidal-api.binimum.org.
|
||||
"""
|
||||
|
||||
def _stringify = staticmethod(stringify)
|
||||
_stringify = staticmethod(stringify)
|
||||
_extract_artists = staticmethod(extract_artists)
|
||||
_build_track_tags = staticmethod(build_track_tags)
|
||||
_coerce_duration_seconds = staticmethod(coerce_duration_seconds)
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
|
||||
@@ -150,6 +150,22 @@ class Provider(ABC):
|
||||
or self.__class__.__name__
|
||||
).lower()
|
||||
|
||||
@property
|
||||
def label(self) -> str:
|
||||
"""Friendly display name for the provider."""
|
||||
if hasattr(self, "NAME") and self.NAME:
|
||||
name = str(self.NAME)
|
||||
if name.lower() == "loc":
|
||||
return "LoC"
|
||||
if name.lower() == "openlibrary":
|
||||
return "OpenLibrary"
|
||||
if name.lower() == "internetarchive":
|
||||
return "Internet Archive"
|
||||
if name.lower() == "alldebrid":
|
||||
return "AllDebrid"
|
||||
return name[:1].upper() + name[1:]
|
||||
return self.__class__.__name__
|
||||
|
||||
@property
|
||||
def preserve_order(self) -> bool:
|
||||
"""True if search result order is significant and should be preserved in displays."""
|
||||
|
||||
@@ -50,6 +50,7 @@ def _resolve_root_dir() -> Path:
|
||||
|
||||
ROOT_DIR = _resolve_root_dir()
|
||||
DB_PATH = (ROOT_DIR / "medios.db").resolve()
|
||||
LOG_DB_PATH = (ROOT_DIR / "logs.db").resolve()
|
||||
|
||||
class Database:
|
||||
_instance: Optional[Database] = None
|
||||
@@ -286,6 +287,38 @@ _LOG_THREAD_STARTED = False
|
||||
_LOG_THREAD_LOCK = threading.Lock()
|
||||
|
||||
|
||||
def _ensure_log_db_schema() -> None:
|
||||
try:
|
||||
conn = sqlite3.connect(
|
||||
str(LOG_DB_PATH),
|
||||
timeout=30.0,
|
||||
check_same_thread=False,
|
||||
)
|
||||
try:
|
||||
conn.execute("PRAGMA busy_timeout = 30000")
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
level TEXT,
|
||||
module TEXT,
|
||||
message TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
_ensure_log_db_schema()
|
||||
|
||||
|
||||
def _log_worker_loop() -> None:
|
||||
"""Background log writer using a temporary per-write connection with
|
||||
small retry/backoff and a file fallback when writes fail repeatedly.
|
||||
@@ -297,9 +330,13 @@ def _log_worker_loop() -> None:
|
||||
written = False
|
||||
while attempts < 3 and not written:
|
||||
try:
|
||||
# Create a short-lived connection for the logging write so the
|
||||
# logging thread does not contend with the main connection lock.
|
||||
conn = sqlite3.connect(str(db.db_path), timeout=5.0)
|
||||
conn = sqlite3.connect(str(LOG_DB_PATH), timeout=30.0)
|
||||
try:
|
||||
conn.execute("PRAGMA busy_timeout = 30000")
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
except sqlite3.Error:
|
||||
pass
|
||||
cur = conn.cursor()
|
||||
cur.execute("INSERT INTO logs (level, module, message) VALUES (?, ?, ?)", (level, module, message))
|
||||
conn.commit()
|
||||
|
||||
@@ -452,45 +452,44 @@ class Download_File(Cmdlet):
|
||||
if provider_obj is not None:
|
||||
attempted_provider_download = True
|
||||
sr = SearchResult(
|
||||
table=str(table),
|
||||
title=str(title or "Unknown"),
|
||||
path=str(target or ""),
|
||||
tag=set(tags_list) if tags_list else set(),
|
||||
media_kind=str(media_kind or "file"),
|
||||
full_metadata=full_metadata
|
||||
if isinstance(full_metadata,
|
||||
dict) else {},
|
||||
)
|
||||
debug(
|
||||
f"[download-file] Downloading provider item via {table}: {sr.title}"
|
||||
)
|
||||
table=str(table),
|
||||
title=str(title or "Unknown"),
|
||||
path=str(target or ""),
|
||||
tag=set(tags_list) if tags_list else set(),
|
||||
media_kind=str(media_kind or "file"),
|
||||
full_metadata=full_metadata
|
||||
if isinstance(full_metadata, dict) else {},
|
||||
)
|
||||
debug(
|
||||
f"[download-file] Downloading provider item via {table}: {sr.title}"
|
||||
)
|
||||
|
||||
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
|
||||
output_dir = final_output_dir
|
||||
# Generic: allow provider to strict output_dir?
|
||||
# Using default output_dir for now.
|
||||
# Preserve provider structure when possible (AllDebrid folders -> subfolders).
|
||||
output_dir = final_output_dir
|
||||
# Generic: allow provider to strict output_dir?
|
||||
# Using default output_dir for now.
|
||||
|
||||
downloaded_path = provider_obj.download(sr, output_dir)
|
||||
provider_sr = sr
|
||||
debug(f"[download-file] Provider download result: {downloaded_path}")
|
||||
downloaded_path = provider_obj.download(sr, output_dir)
|
||||
provider_sr = sr
|
||||
debug(f"[download-file] Provider download result: {downloaded_path}")
|
||||
|
||||
if downloaded_path is None:
|
||||
try:
|
||||
downloaded_extra = self._download_provider_items(
|
||||
provider=provider_obj,
|
||||
provider_name=str(provider_key),
|
||||
search_result=sr,
|
||||
output_dir=output_dir,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
config=config,
|
||||
)
|
||||
except Exception:
|
||||
downloaded_extra = 0
|
||||
if downloaded_path is None:
|
||||
try:
|
||||
downloaded_extra = self._download_provider_items(
|
||||
provider=provider_obj,
|
||||
provider_name=str(provider_key),
|
||||
search_result=sr,
|
||||
output_dir=output_dir,
|
||||
progress=progress,
|
||||
quiet_mode=quiet_mode,
|
||||
config=config,
|
||||
)
|
||||
except Exception:
|
||||
downloaded_extra = 0
|
||||
|
||||
if downloaded_extra:
|
||||
downloaded_count += int(downloaded_extra)
|
||||
continue
|
||||
if downloaded_extra:
|
||||
downloaded_count += int(downloaded_extra)
|
||||
continue
|
||||
|
||||
# Fallback: if we have a direct HTTP URL and no provider successfully handled it
|
||||
if (downloaded_path is None and not attempted_provider_download
|
||||
|
||||
@@ -138,6 +138,12 @@ class search_file(Cmdlet):
|
||||
ext = "".join(ch for ch in ext if ch.isalnum())
|
||||
return ext[:5]
|
||||
|
||||
@staticmethod
|
||||
def _normalize_lookup_target(value: Optional[str]) -> str:
|
||||
"""Normalize candidate names for store/provider matching."""
|
||||
raw = str(value or "").strip().lower()
|
||||
return "".join(ch for ch in raw if ch.isalnum())
|
||||
|
||||
def _ensure_storage_columns(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Ensure storage results have the necessary fields for result_table display."""
|
||||
|
||||
@@ -535,10 +541,12 @@ class search_file(Cmdlet):
|
||||
configured = list_configured_backend_names(config or {})
|
||||
if storage_backend:
|
||||
matched = None
|
||||
for p in (providers_map or {}):
|
||||
if str(p).strip().lower() == str(storage_backend).strip().lower():
|
||||
matched = p
|
||||
break
|
||||
storage_hint = self._normalize_lookup_target(storage_backend)
|
||||
if storage_hint:
|
||||
for p in (providers_map or {}):
|
||||
if self._normalize_lookup_target(p) == storage_hint:
|
||||
matched = p
|
||||
break
|
||||
if matched and str(storage_backend) not in configured:
|
||||
log(f"Note: Treating '-store {storage_backend}' as provider search for '{matched}'", file=sys.stderr)
|
||||
return self._run_provider_search(
|
||||
@@ -553,10 +561,12 @@ class search_file(Cmdlet):
|
||||
)
|
||||
elif store_filter:
|
||||
matched = None
|
||||
for p in (providers_map or {}):
|
||||
if str(p).strip().lower() == str(store_filter).strip().lower():
|
||||
matched = p
|
||||
break
|
||||
store_hint = self._normalize_lookup_target(store_filter)
|
||||
if store_hint:
|
||||
for p in (providers_map or {}):
|
||||
if self._normalize_lookup_target(p) == store_hint:
|
||||
matched = p
|
||||
break
|
||||
if matched and str(store_filter) not in configured:
|
||||
log(f"Note: Treating 'store:{store_filter}' as provider search for '{matched}'", file=sys.stderr)
|
||||
return self._run_provider_search(
|
||||
|
||||
Reference in New Issue
Block a user