Files
Medios-Macina/SYS/config.py

818 lines
30 KiB
Python
Raw Normal View History

2025-12-29 19:00:00 -08:00
""" """
from __future__ import annotations
2026-01-22 11:05:40 -08:00
import json
2026-01-23 16:46:48 -08:00
import sqlite3
import time
2026-01-30 10:47:47 -08:00
import os
import datetime
import sys
2026-02-09 17:22:40 -08:00
import tempfile
2026-01-23 18:40:00 -08:00
from copy import deepcopy
2025-12-29 19:00:00 -08:00
from pathlib import Path
2026-01-23 18:40:00 -08:00
from typing import Any, Dict, List, Optional, Tuple
2025-12-29 19:00:00 -08:00
from SYS.logger import log
2026-01-31 19:57:09 -08:00
import logging
logger = logging.getLogger(__name__)
2026-01-11 00:52:54 -08:00
from SYS.utils import expand_path
2026-02-09 17:45:57 -08:00
from SYS.database import db, get_config_all, rows_to_config
2025-12-29 19:00:00 -08:00
SCRIPT_DIR = Path(__file__).resolve().parent
2026-01-30 10:47:47 -08:00
# Save lock settings (cross-process)
_SAVE_LOCK_DIRNAME = ".medios_save_lock"
_SAVE_LOCK_TIMEOUT = 30.0 # seconds to wait for save lock
_SAVE_LOCK_STALE_SECONDS = 3600 # consider lock stale after 1 hour
2026-01-23 17:12:15 -08:00
_CONFIG_CACHE: Dict[str, Any] = {}
2026-01-23 18:40:00 -08:00
_LAST_SAVED_CONFIG: Dict[str, Any] = {}
2026-01-23 16:46:48 -08:00
_CONFIG_SAVE_MAX_RETRIES = 5
_CONFIG_SAVE_RETRY_DELAY = 0.15
2025-12-29 19:00:00 -08:00
2026-01-30 10:47:47 -08:00
class ConfigSaveConflict(Exception):
"""Raised when a save would overwrite external changes present on disk."""
pass
2026-01-11 03:56:09 -08:00
def global_config() -> List[Dict[str, Any]]:
"""Return configuration schema for global settings."""
return [
{
"key": "debug",
"label": "Debug Output",
"default": "false",
"choices": ["true", "false"]
2026-01-11 10:59:50 -08:00
},
{
"key": "auto_update",
"label": "Auto-Update",
"default": "true",
"choices": ["true", "false"]
2026-01-11 03:56:09 -08:00
}
]
2026-01-11 00:52:54 -08:00
def clear_config_cache() -> None:
2026-01-23 18:40:00 -08:00
"""Clear the configuration cache and baseline snapshot."""
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
2026-01-23 17:12:15 -08:00
_CONFIG_CACHE = {}
2026-01-23 18:40:00 -08:00
_LAST_SAVED_CONFIG = {}
2025-12-29 19:00:00 -08:00
def get_hydrus_instance(
config: Dict[str, Any], instance_name: str = "home"
) -> Optional[Dict[str, Any]]:
"""Get a specific Hydrus instance config by name.
2026-01-23 16:46:48 -08:00
Supports modern config plus a fallback when no exact match exists.
2025-12-29 19:00:00 -08:00
"""
store = config.get("store", {})
2026-01-23 16:46:48 -08:00
if not isinstance(store, dict):
return None
hydrusnetwork = store.get("hydrusnetwork", {})
if not isinstance(hydrusnetwork, dict) or not hydrusnetwork:
return None
instance = hydrusnetwork.get(instance_name)
if isinstance(instance, dict):
return instance
target = str(instance_name or "").lower()
for name, conf in hydrusnetwork.items():
if isinstance(conf, dict) and str(name).lower() == target:
return conf
keys = sorted(hydrusnetwork.keys())
for key in keys:
if not str(key or "").startswith("new_"):
candidate = hydrusnetwork.get(key)
if isinstance(candidate, dict):
return candidate
first_key = keys[0]
candidate = hydrusnetwork.get(first_key)
if isinstance(candidate, dict):
return candidate
2025-12-29 19:00:00 -08:00
return None
def get_hydrus_access_key(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]:
"""Get Hydrus access key for an instance.
Config format:
- config["store"]["hydrusnetwork"][name]["API"]
Args:
config: Configuration dict
instance_name: Name of the Hydrus instance (default: "home")
Returns:
Access key string, or None if not found
"""
instance = get_hydrus_instance(config, instance_name)
if instance:
key = instance.get("API")
return str(key).strip() if key else None
return None
def get_hydrus_url(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]:
"""Get Hydrus URL for an instance.
Config format:
- config["store"]["hydrusnetwork"][name]["URL"]
Args:
config: Configuration dict
instance_name: Name of the Hydrus instance (default: "home")
Returns:
URL string, or None if not found
"""
instance = get_hydrus_instance(config, instance_name)
url = instance.get("URL") if instance else None
return str(url).strip() if url else None
def get_provider_block(config: Dict[str, Any], name: str) -> Dict[str, Any]:
provider_cfg = config.get("provider")
if not isinstance(provider_cfg, dict):
return {}
2026-01-27 14:56:01 -08:00
normalized = _normalize_provider_name(name)
if normalized:
block = provider_cfg.get(normalized)
if isinstance(block, dict):
return block
for key, block in provider_cfg.items():
if not isinstance(block, dict):
continue
if _normalize_provider_name(key) == normalized:
return block
return {}
2025-12-29 19:00:00 -08:00
def get_soulseek_username(config: Dict[str, Any]) -> Optional[str]:
block = get_provider_block(config, "soulseek")
val = block.get("username") or block.get("USERNAME")
return str(val).strip() if val else None
def get_soulseek_password(config: Dict[str, Any]) -> Optional[str]:
block = get_provider_block(config, "soulseek")
val = block.get("password") or block.get("PASSWORD")
return str(val).strip() if val else None
def resolve_output_dir(config: Dict[str, Any]) -> Path:
"""Resolve output directory from config with single source of truth.
Priority:
1. config["temp"] - explicitly set temp/output directory
2. config["outfile"] - fallback to outfile setting
2026-01-11 10:59:50 -08:00
3. System Temp - default fallback directory
2025-12-29 19:00:00 -08:00
Returns:
Path to output directory
"""
# First try explicit temp setting from config
temp_value = config.get("temp")
if temp_value:
try:
2026-01-11 00:52:54 -08:00
path = expand_path(temp_value)
2025-12-29 19:00:00 -08:00
# Verify we can access it (not a system directory with permission issues)
if path.exists() or path.parent.exists():
return path
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.debug("resolve_output_dir: failed to expand temp value %r: %s", temp_value, exc, exc_info=True)
2025-12-29 19:00:00 -08:00
# Then try outfile setting
outfile_value = config.get("outfile")
if outfile_value:
try:
2026-01-11 00:52:54 -08:00
return expand_path(outfile_value)
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.debug("resolve_output_dir: failed to expand outfile value %r: %s", outfile_value, exc, exc_info=True)
2025-12-29 19:00:00 -08:00
2026-01-11 10:59:50 -08:00
# Fallback to system temp directory
return Path(tempfile.gettempdir())
2025-12-29 19:00:00 -08:00
def get_local_storage_path(config: Dict[str, Any]) -> Optional[Path]:
"""Get local storage path from config.
Supports multiple formats:
- Old: config["storage"]["local"]["path"]
- Old: config["Local"]["path"]
Args:
config: Configuration dict
Returns:
Path object if found, None otherwise
"""
# Fall back to storage.local.path format
storage = config.get("storage", {})
if isinstance(storage, dict):
local_config = storage.get("local", {})
if isinstance(local_config, dict):
path_str = local_config.get("path")
if path_str:
2026-01-11 00:52:54 -08:00
return expand_path(path_str)
2025-12-29 19:00:00 -08:00
# Fall back to old Local format
local_config = config.get("Local", {})
if isinstance(local_config, dict):
path_str = local_config.get("path")
if path_str:
2026-01-11 00:52:54 -08:00
return expand_path(path_str)
2025-12-29 19:00:00 -08:00
return None
def get_debrid_api_key(config: Dict[str, Any], service: str = "All-debrid") -> Optional[str]:
"""Get Debrid API key from config.
Config format:
- config["store"]["debrid"][<name>]["api_key"]
where <name> is the store name (e.g. "all-debrid")
Args:
config: Configuration dict
service: Service name (default: "All-debrid")
Returns:
API key string if found, None otherwise
"""
store = config.get("store", {})
if not isinstance(store, dict):
return None
debrid_config = store.get("debrid", {})
if not isinstance(debrid_config, dict):
return None
service_key = str(service).strip().lower()
entry = debrid_config.get(service_key)
if isinstance(entry, dict):
api_key = entry.get("api_key")
return str(api_key).strip() if api_key else None
if isinstance(entry, str):
return entry.strip() or None
return None
def get_provider_credentials(config: Dict[str, Any], provider: str) -> Optional[Dict[str, str]]:
"""Get provider credentials (email/password) from config.
Supports both formats:
- New: config["provider"][provider] = {"email": "...", "password": "..."}
- Old: config[provider.capitalize()] = {"email": "...", "password": "..."}
Args:
config: Configuration dict
provider: Provider name (e.g., "openlibrary", "soulseek")
Returns:
Dict with credentials if found, None otherwise
"""
# Try new format first
provider_config = config.get("provider", {})
if isinstance(provider_config, dict):
creds = provider_config.get(provider.lower(), {})
if isinstance(creds, dict) and creds:
return creds
# Fall back to old format (capitalized key)
old_key_map = {
"openlibrary": "OpenLibrary",
"archive": "Archive",
"soulseek": "Soulseek",
}
old_key = old_key_map.get(provider.lower())
if old_key:
creds = config.get(old_key, {})
if isinstance(creds, dict) and creds:
return creds
return None
def resolve_cookies_path(
config: Dict[str, Any], script_dir: Optional[Path] = None
) -> Optional[Path]:
2026-02-07 14:58:13 -08:00
# Only support modular config style:
2025-12-29 19:00:00 -08:00
# [tool=ytdlp]
# cookies="C:\\path\\cookies.txt"
values: list[Any] = []
try:
tool = config.get("tool")
if isinstance(tool, dict):
ytdlp = tool.get("ytdlp")
if isinstance(ytdlp, dict):
values.append(ytdlp.get("cookies"))
values.append(ytdlp.get("cookiefile"))
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.debug("resolve_cookies_path: failed to read tool.ytdlp cookies: %s", exc, exc_info=True)
2025-12-29 19:00:00 -08:00
base_dir = script_dir or SCRIPT_DIR
for value in values:
if not value:
continue
2026-01-11 00:52:54 -08:00
candidate = expand_path(value)
2025-12-29 19:00:00 -08:00
if not candidate.is_absolute():
2026-01-11 00:52:54 -08:00
candidate = expand_path(base_dir / candidate)
2025-12-29 19:00:00 -08:00
if candidate.is_file():
return candidate
default_path = base_dir / "cookies.txt"
if default_path.is_file():
return default_path
return None
def resolve_debug_log(config: Dict[str, Any]) -> Optional[Path]:
value = config.get("download_debug_log")
if not value:
return None
2026-01-11 00:52:54 -08:00
path = expand_path(value)
2025-12-29 19:00:00 -08:00
if not path.is_absolute():
path = Path.cwd() / path
return path
2026-01-27 14:56:01 -08:00
def _normalize_provider_name(value: Any) -> Optional[str]:
candidate = str(value or "").strip().lower()
return candidate if candidate else None
def _extract_api_key(value: Any) -> Optional[str]:
if isinstance(value, dict):
for key in ("api_key", "API_KEY", "apikey", "APIKEY"):
candidate = value.get(key)
if isinstance(candidate, str) and candidate.strip():
return candidate.strip()
elif isinstance(value, str):
trimmed = value.strip()
if trimmed:
return trimmed
return None
def _sync_alldebrid_api_key(config: Dict[str, Any]) -> None:
if not isinstance(config, dict):
return
providers = config.get("provider")
if not isinstance(providers, dict):
providers = {}
config["provider"] = providers
provider_entry = providers.get("alldebrid")
provider_section: Dict[str, Any] | None = None
provider_key = None
if isinstance(provider_entry, dict):
provider_section = provider_entry
provider_key = _extract_api_key(provider_section)
elif isinstance(provider_entry, str):
provider_key = provider_entry.strip()
if provider_key:
provider_section = {"api_key": provider_key}
providers["alldebrid"] = provider_section
store_block = config.get("store")
if not isinstance(store_block, dict):
store_block = {}
config["store"] = store_block
debrid_block = store_block.get("debrid")
store_key = None
if isinstance(debrid_block, dict):
service_entry = debrid_block.get("all-debrid")
if isinstance(service_entry, dict):
store_key = _extract_api_key(service_entry)
elif isinstance(service_entry, str):
store_key = service_entry.strip()
if store_key:
debrid_block["all-debrid"] = {"api_key": store_key}
else:
debrid_block = None
if provider_key:
if debrid_block is None:
debrid_block = {}
store_block["debrid"] = debrid_block
service_section = debrid_block.get("all-debrid")
if not isinstance(service_section, dict):
service_section = {}
debrid_block["all-debrid"] = service_section
service_section["api_key"] = provider_key
elif store_key:
if provider_section is None:
provider_section = {}
providers["alldebrid"] = provider_section
provider_section["api_key"] = store_key
2025-12-29 19:00:00 -08:00
2026-01-23 18:40:00 -08:00
def _flatten_config_entries(config: Dict[str, Any]) -> Dict[Tuple[str, str, str, str], Any]:
entries: Dict[Tuple[str, str, str, str], Any] = {}
for key, value in config.items():
if key in ('store', 'provider', 'tool') and isinstance(value, dict):
for subtype, instances in value.items():
if not isinstance(instances, dict):
continue
if key == 'store':
for name, settings in instances.items():
if not isinstance(settings, dict):
continue
for k, v in settings.items():
entries[(key, subtype, name, k)] = v
else:
for k, v in instances.items():
entries[(key, subtype, 'default', k)] = v
elif not key.startswith('_') and value is not None:
entries[('global', 'none', 'none', key)] = value
return entries
def _count_changed_entries(old_config: Dict[str, Any], new_config: Dict[str, Any]) -> int:
old_entries = _flatten_config_entries(old_config or {})
new_entries = _flatten_config_entries(new_config or {})
changed = {k for k, v in new_entries.items() if old_entries.get(k) != v}
removed = {k for k in old_entries if k not in new_entries}
return len(changed) + len(removed)
2026-01-23 17:12:15 -08:00
def load_config() -> Dict[str, Any]:
2026-01-23 18:40:00 -08:00
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
2026-01-23 17:12:15 -08:00
if _CONFIG_CACHE:
return _CONFIG_CACHE
2025-12-29 19:00:00 -08:00
2026-01-23 17:12:15 -08:00
# Load strictly from database
2026-01-22 01:53:13 -08:00
db_config = get_config_all()
if db_config:
2026-01-27 14:56:01 -08:00
_sync_alldebrid_api_key(db_config)
2026-01-23 17:12:15 -08:00
_CONFIG_CACHE = db_config
2026-01-23 18:40:00 -08:00
_LAST_SAVED_CONFIG = deepcopy(db_config)
2026-01-30 10:47:47 -08:00
try:
# Log a compact summary to help detect startup overwrites/mismatches
provs = list(db_config.get("provider", {}).keys()) if isinstance(db_config.get("provider"), dict) else []
stores = list(db_config.get("store", {}).keys()) if isinstance(db_config.get("store"), dict) else []
mtime = None
try:
2026-01-30 19:10:38 -08:00
mtime = datetime.datetime.fromtimestamp(db.db_path.stat().st_mtime, datetime.timezone.utc).isoformat().replace('+00:00', 'Z')
2026-01-30 10:47:47 -08:00
except Exception:
mtime = None
summary = (
f"Loaded config from {db.db_path.name}: providers={len(provs)} ({', '.join(provs[:10])}{'...' if len(provs)>10 else ''}), "
f"stores={len(stores)} ({', '.join(stores[:10])}{'...' if len(stores)>10 else ''}), mtime={mtime}"
)
log(summary)
2026-01-30 12:04:37 -08:00
# Forensics disabled: audit/mismatch/backup detection removed to simplify code.
2026-01-30 10:47:47 -08:00
except Exception:
2026-01-31 19:57:09 -08:00
logger.exception("Failed to build config load summary from %s", db.db_path)
2026-01-22 01:53:13 -08:00
return db_config
2026-01-23 18:40:00 -08:00
_LAST_SAVED_CONFIG = {}
2026-01-22 01:53:13 -08:00
return {}
2025-12-29 19:00:00 -08:00
2026-01-23 17:12:15 -08:00
def reload_config() -> Dict[str, Any]:
clear_config_cache()
return load_config()
2026-01-22 11:05:40 -08:00
2025-12-29 19:00:00 -08:00
2026-01-30 10:47:47 -08:00
def _acquire_save_lock(timeout: float = _SAVE_LOCK_TIMEOUT):
"""Acquire a cross-process save lock implemented as a directory.
Returns the Path to the created lock directory. Raises ConfigSaveConflict
if the lock cannot be acquired within the timeout.
"""
lock_dir = Path(db.db_path).with_name(_SAVE_LOCK_DIRNAME)
start = time.time()
while True:
try:
lock_dir.mkdir(exist_ok=False)
# Write owner metadata for diagnostics
try:
(lock_dir / "owner.json").write_text(json.dumps({
"pid": os.getpid(),
"ts": time.time(),
"cmdline": " ".join(sys.argv),
}))
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to write save lock owner metadata %s: %s", lock_dir, exc)
2026-01-30 10:47:47 -08:00
return lock_dir
except FileExistsError:
# Check for stale lock
try:
owner = lock_dir / "owner.json"
if owner.exists():
data = json.loads(owner.read_text())
ts = data.get("ts") or 0
if time.time() - ts > _SAVE_LOCK_STALE_SECONDS:
try:
import shutil
shutil.rmtree(lock_dir)
continue
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to remove stale save lock dir %s", lock_dir)
2026-01-30 10:47:47 -08:00
else:
# No owner file; if directory is old enough consider it stale
try:
if time.time() - lock_dir.stat().st_mtime > _SAVE_LOCK_STALE_SECONDS:
import shutil
shutil.rmtree(lock_dir)
continue
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to stat/remove stale save lock dir %s", lock_dir)
except Exception as exc:
logger.exception("Failed to inspect save lock directory %s: %s", lock_dir, exc)
2026-01-30 10:47:47 -08:00
if time.time() - start > timeout:
raise ConfigSaveConflict("Save lock busy; could not acquire in time")
time.sleep(0.1)
def _release_save_lock(lock_dir: Path) -> None:
try:
owner = lock_dir / "owner.json"
try:
if owner.exists():
owner.unlink()
except Exception:
2026-01-31 19:57:09 -08:00
logger.exception("Failed to remove save lock owner file %s", owner)
2026-01-30 10:47:47 -08:00
lock_dir.rmdir()
except Exception:
2026-01-31 19:57:09 -08:00
logger.exception("Failed to release save lock directory %s", lock_dir)
2026-01-30 10:47:47 -08:00
2026-01-23 17:12:15 -08:00
def save_config(config: Dict[str, Any]) -> int:
2026-01-23 18:40:00 -08:00
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
2026-01-27 14:56:01 -08:00
_sync_alldebrid_api_key(config)
2026-01-30 10:47:47 -08:00
# Acquire cross-process save lock to avoid concurrent saves from different
# processes which can lead to race conditions and DB-level overwrite.
lock_dir = None
try:
lock_dir = _acquire_save_lock()
except ConfigSaveConflict:
# Surface a clear exception to callers so they can retry or handle it.
raise
2026-01-23 18:40:00 -08:00
previous_config = deepcopy(_LAST_SAVED_CONFIG)
changed_count = _count_changed_entries(previous_config, config)
2026-01-23 16:46:48 -08:00
def _write_entries() -> int:
2026-01-30 10:47:47 -08:00
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
2026-01-23 16:46:48 -08:00
count = 0
2026-01-30 10:47:47 -08:00
# Use the transaction-provided connection directly to avoid re-acquiring
# the connection lock via db.* helpers which can lead to deadlock.
with db.transaction() as conn:
# Detect concurrent changes by reading the current DB state inside the
# same transaction before mutating it. Use the transaction connection
# directly to avoid acquiring the connection lock again (deadlock).
try:
cur = conn.cursor()
cur.execute("SELECT category, subtype, item_name, key, value FROM config")
rows = cur.fetchall()
current_disk = rows_to_config(rows)
cur.close()
except Exception:
current_disk = {}
if current_disk != _LAST_SAVED_CONFIG:
# If we have no local changes, refresh caches and skip the write.
if changed_count == 0:
log("Skip save: disk configuration changed since last load and no local changes; not writing to DB.")
# Refresh local caches to match the disk
_CONFIG_CACHE = current_disk
_LAST_SAVED_CONFIG = deepcopy(current_disk)
return 0
# Otherwise, abort to avoid overwriting external changes
raise ConfigSaveConflict(
"Configuration on disk changed since you started editing; save aborted to prevent overwrite. Reload and reapply your changes."
)
# Proceed with writing when no conflicting external changes detected
conn.execute("DELETE FROM config")
2026-01-22 11:05:40 -08:00
for key, value in config.items():
2026-01-30 10:47:47 -08:00
if key in ('store', 'provider', 'tool') and isinstance(value, dict):
for subtype, instances in value.items():
if not isinstance(instances, dict):
continue
if key == 'store':
for name, settings in instances.items():
if isinstance(settings, dict):
for k, v in settings.items():
val_str = json.dumps(v) if not isinstance(v, str) else v
conn.execute(
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
(key, subtype, name, k, val_str),
)
count += 1
else:
normalized_subtype = subtype
if key == 'provider':
normalized_subtype = _normalize_provider_name(subtype)
if not normalized_subtype:
continue
for k, v in instances.items():
val_str = json.dumps(v) if not isinstance(v, str) else v
conn.execute(
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
(key, normalized_subtype, "default", k, val_str),
)
count += 1
2026-01-22 11:05:40 -08:00
else:
2026-01-23 16:46:48 -08:00
if not key.startswith("_") and value is not None:
2026-01-30 10:47:47 -08:00
val_str = json.dumps(value) if not isinstance(value, str) else value
conn.execute(
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
("global", "none", "none", key, val_str),
)
2026-01-23 16:46:48 -08:00
count += 1
return count
2026-01-30 10:47:47 -08:00
2026-01-23 16:46:48 -08:00
saved_entries = 0
attempts = 0
while True:
try:
saved_entries = _write_entries()
2026-01-30 10:47:47 -08:00
# Central log entry
2026-01-23 18:40:00 -08:00
log(
f"Synced {saved_entries} entries to {db.db_path} "
f"({changed_count} changed entries)"
)
2026-01-30 10:47:47 -08:00
# Try to checkpoint WAL to ensure main DB file reflects latest state.
# Use a separate short-lived connection to perform the checkpoint so
# we don't contend with our main connection lock or active transactions.
try:
try:
with sqlite3.connect(str(db.db_path), timeout=5.0) as _con:
_con.execute("PRAGMA wal_checkpoint(TRUNCATE)")
except Exception:
with sqlite3.connect(str(db.db_path), timeout=5.0) as _con:
_con.execute("PRAGMA wal_checkpoint")
except Exception as exc:
log(f"Warning: WAL checkpoint failed: {exc}")
2026-01-30 12:04:37 -08:00
# Forensics disabled: audit/logs/backups removed to keep save lean.
# Release the save lock we acquired earlier
2026-01-30 10:47:47 -08:00
try:
2026-01-30 12:04:37 -08:00
if lock_dir is not None and lock_dir.exists():
_release_save_lock(lock_dir)
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to release save lock during save flow: %s", exc)
2026-01-30 10:47:47 -08:00
2026-01-23 16:46:48 -08:00
break
except sqlite3.OperationalError as exc:
attempts += 1
locked_error = "locked" in str(exc).lower()
if not locked_error or attempts >= _CONFIG_SAVE_MAX_RETRIES:
2026-01-23 17:12:15 -08:00
log(f"CRITICAL: Database write failed: {exc}")
2026-01-30 10:47:47 -08:00
# Ensure we release potential save lock before bubbling error
try:
if lock_dir is not None and lock_dir.exists():
_release_save_lock(lock_dir)
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to release save lock after DB write failure: %s", exc)
2026-01-23 16:46:48 -08:00
raise
delay = _CONFIG_SAVE_RETRY_DELAY * attempts
2026-01-23 17:12:15 -08:00
log(f"Database locked; retry {attempts}/{_CONFIG_SAVE_MAX_RETRIES} in {delay:.2f}s")
2026-01-23 16:46:48 -08:00
time.sleep(delay)
except Exception as exc:
2026-01-23 17:12:15 -08:00
log(f"CRITICAL: Configuration save failed: {exc}")
2026-01-30 10:47:47 -08:00
try:
if lock_dir is not None and lock_dir.exists():
_release_save_lock(lock_dir)
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.exception("Failed to release save lock after CRITICAL configuration save failure: %s", exc)
2026-01-23 16:46:48 -08:00
raise
2025-12-29 19:00:00 -08:00
2026-01-23 15:02:19 -08:00
clear_config_cache()
2026-01-23 17:12:15 -08:00
_CONFIG_CACHE = config
2026-01-23 18:40:00 -08:00
_LAST_SAVED_CONFIG = deepcopy(config)
2026-01-23 16:46:48 -08:00
return saved_entries
2025-12-29 19:00:00 -08:00
def load() -> Dict[str, Any]:
"""Return the parsed downlow configuration."""
return load_config()
2026-01-23 16:46:48 -08:00
def save(config: Dict[str, Any]) -> int:
2025-12-29 19:00:00 -08:00
"""Persist *config* back to disk."""
2026-01-23 16:46:48 -08:00
return save_config(config)
2026-01-30 10:47:47 -08:00
2026-01-31 15:37:17 -08:00
def save_config_and_verify(config: Dict[str, Any], retries: int = 3, delay: float = 0.15) -> int:
"""Save configuration and verify crucial keys persisted to disk.
This helper performs a best-effort verification loop that reloads the
configuration from disk and confirms that modified API key entries (e.g.
AllDebrid) were written successfully. If verification fails after the
configured number of retries, a RuntimeError is raised.
"""
# Detect an API key that should be verified (provider or store-backed)
expected_key = None
try:
providers = config.get("provider", {}) if isinstance(config, dict) else {}
if isinstance(providers, dict):
entry = providers.get("alldebrid")
if entry is not None:
# _extract_api_key is a small internal helper; reuse the implementation here
if isinstance(entry, dict):
for k in ("api_key", "API_KEY", "apikey", "APIKEY"):
v = entry.get(k)
if isinstance(v, str) and v.strip():
expected_key = v.strip()
break
elif isinstance(entry, str) and entry.strip():
expected_key = entry.strip()
if not expected_key:
store_block = config.get("store", {}) if isinstance(config, dict) else {}
debrid = store_block.get("debrid") if isinstance(store_block, dict) else None
if isinstance(debrid, dict):
srv = debrid.get("all-debrid")
if isinstance(srv, dict):
for k in ("api_key", "API_KEY", "apikey", "APIKEY"):
v = srv.get(k)
if isinstance(v, str) and v.strip():
expected_key = v.strip()
break
elif isinstance(srv, str) and srv.strip():
expected_key = srv.strip()
2026-01-31 19:57:09 -08:00
except Exception as exc:
logger.debug("Failed to determine expected key for save verification: %s", exc, exc_info=True)
2026-01-31 15:37:17 -08:00
expected_key = None
last_exc: Exception | None = None
for attempt in range(1, max(1, int(retries)) + 1):
try:
saved = save_config(config)
if not expected_key:
# Nothing special to verify; return success.
return saved
# Reload directly from disk and compare the canonical debrid/provider keys
clear_config_cache()
reloaded = load_config()
# Provider-level key
prov_block = reloaded.get("provider", {}) if isinstance(reloaded, dict) else {}
prov_key = None
if isinstance(prov_block, dict):
aentry = prov_block.get("alldebrid")
if isinstance(aentry, dict):
for k in ("api_key", "API_KEY", "apikey", "APIKEY"):
v = aentry.get(k)
if isinstance(v, str) and v.strip():
prov_key = v.strip()
break
elif isinstance(aentry, str) and aentry.strip():
prov_key = aentry.strip()
# Store-level key
try:
store_key = get_debrid_api_key(reloaded, service="All-debrid")
except Exception:
store_key = None
if prov_key == expected_key or store_key == expected_key:
2026-01-31 21:32:51 -08:00
try:
# Log a short, masked fingerprint to aid debugging without exposing the key itself
import hashlib
fp = hashlib.sha256(expected_key.encode("utf-8")).hexdigest()[:8]
log(f"Verified AllDebrid API key persisted (fingerprint={fp})")
except Exception:
# If hashing/logging fails, don't abort the save
pass
2026-01-31 15:37:17 -08:00
return saved
# Not yet persisted; log and retry
log(f"Warning: Post-save verification attempt {attempt} failed (expected key not found in DB). Retrying...")
time.sleep(delay * attempt)
except Exception as exc:
last_exc = exc
log(f"Warning: save and verify attempt {attempt} failed: {exc}")
time.sleep(delay * attempt)
# All retries exhausted
raise RuntimeError(f"Post-save verification failed after {retries} attempts: {last_exc}")
2026-01-30 10:47:47 -08:00
def count_changed_entries(config: Dict[str, Any]) -> int:
"""Return the number of changed configuration entries compared to the last saved snapshot.
This is useful for user-facing messages that want to indicate how many entries
were actually modified, not the total number of rows persisted to the database.
"""
return _count_changed_entries(_LAST_SAVED_CONFIG, config)