fdsfdsd
This commit is contained in:
@@ -507,7 +507,7 @@
|
|||||||
"mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})"
|
"mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})"
|
||||||
],
|
],
|
||||||
"regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})",
|
"regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})",
|
||||||
"status": false
|
"status": true
|
||||||
},
|
},
|
||||||
"mexashare": {
|
"mexashare": {
|
||||||
"name": "mexashare",
|
"name": "mexashare",
|
||||||
@@ -679,7 +679,7 @@
|
|||||||
"(uploadboy\\.com/[0-9a-zA-Z]{12})"
|
"(uploadboy\\.com/[0-9a-zA-Z]{12})"
|
||||||
],
|
],
|
||||||
"regexp": "(uploadboy\\.com/[0-9a-zA-Z]{12})",
|
"regexp": "(uploadboy\\.com/[0-9a-zA-Z]{12})",
|
||||||
"status": false
|
"status": true
|
||||||
},
|
},
|
||||||
"uploader": {
|
"uploader": {
|
||||||
"name": "uploader",
|
"name": "uploader",
|
||||||
|
|||||||
361
SYS/config.py
361
SYS/config.py
@@ -5,21 +5,37 @@ from __future__ import annotations
|
|||||||
import json
|
import json
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import time
|
import time
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
import datetime
|
||||||
|
import sys
|
||||||
|
import getpass
|
||||||
|
import hashlib
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
from SYS.logger import log
|
from SYS.logger import log
|
||||||
from SYS.utils import expand_path
|
from SYS.utils import expand_path
|
||||||
from SYS.database import db, get_config_all, save_config_value
|
from SYS.database import db, get_config_all, save_config_value, rows_to_config
|
||||||
|
|
||||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||||
|
|
||||||
|
# Save lock settings (cross-process)
|
||||||
|
_SAVE_LOCK_DIRNAME = ".medios_save_lock"
|
||||||
|
_SAVE_LOCK_TIMEOUT = 30.0 # seconds to wait for save lock
|
||||||
|
_SAVE_LOCK_STALE_SECONDS = 3600 # consider lock stale after 1 hour
|
||||||
|
|
||||||
_CONFIG_CACHE: Dict[str, Any] = {}
|
_CONFIG_CACHE: Dict[str, Any] = {}
|
||||||
_LAST_SAVED_CONFIG: Dict[str, Any] = {}
|
_LAST_SAVED_CONFIG: Dict[str, Any] = {}
|
||||||
_CONFIG_SAVE_MAX_RETRIES = 5
|
_CONFIG_SAVE_MAX_RETRIES = 5
|
||||||
_CONFIG_SAVE_RETRY_DELAY = 0.15
|
_CONFIG_SAVE_RETRY_DELAY = 0.15
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigSaveConflict(Exception):
|
||||||
|
"""Raised when a save would overwrite external changes present on disk."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def global_config() -> List[Dict[str, Any]]:
|
def global_config() -> List[Dict[str, Any]]:
|
||||||
"""Return configuration schema for global settings."""
|
"""Return configuration schema for global settings."""
|
||||||
return [
|
return [
|
||||||
@@ -455,6 +471,70 @@ def load_config() -> Dict[str, Any]:
|
|||||||
_sync_alldebrid_api_key(db_config)
|
_sync_alldebrid_api_key(db_config)
|
||||||
_CONFIG_CACHE = db_config
|
_CONFIG_CACHE = db_config
|
||||||
_LAST_SAVED_CONFIG = deepcopy(db_config)
|
_LAST_SAVED_CONFIG = deepcopy(db_config)
|
||||||
|
try:
|
||||||
|
# Log a compact summary to help detect startup overwrites/mismatches
|
||||||
|
provs = list(db_config.get("provider", {}).keys()) if isinstance(db_config.get("provider"), dict) else []
|
||||||
|
stores = list(db_config.get("store", {}).keys()) if isinstance(db_config.get("store"), dict) else []
|
||||||
|
mtime = None
|
||||||
|
try:
|
||||||
|
mtime = datetime.datetime.utcfromtimestamp(db.db_path.stat().st_mtime).isoformat() + "Z"
|
||||||
|
except Exception:
|
||||||
|
mtime = None
|
||||||
|
summary = (
|
||||||
|
f"Loaded config from {db.db_path.name}: providers={len(provs)} ({', '.join(provs[:10])}{'...' if len(provs)>10 else ''}), "
|
||||||
|
f"stores={len(stores)} ({', '.join(stores[:10])}{'...' if len(stores)>10 else ''}), mtime={mtime}"
|
||||||
|
)
|
||||||
|
log(summary)
|
||||||
|
|
||||||
|
# Try to detect if the most recent audit indicates we previously saved items
|
||||||
|
# that are no longer present in the loaded config (possible overwrite/restore)
|
||||||
|
try:
|
||||||
|
audit_path = Path(db.db_path).with_name("config_audit.log")
|
||||||
|
if audit_path.exists():
|
||||||
|
last_line = None
|
||||||
|
with audit_path.open("r", encoding="utf-8") as fh:
|
||||||
|
for line in fh:
|
||||||
|
if line and line.strip():
|
||||||
|
last_line = line
|
||||||
|
if last_line:
|
||||||
|
try:
|
||||||
|
last_entry = json.loads(last_line)
|
||||||
|
last_provs = set(last_entry.get("providers") or [])
|
||||||
|
current_provs = set(provs)
|
||||||
|
missing = sorted(list(last_provs - current_provs))
|
||||||
|
if missing:
|
||||||
|
log(
|
||||||
|
f"WARNING: Config mismatch on load - last saved providers {sorted(list(last_provs))} "
|
||||||
|
f"are missing from loaded config: {missing} (last saved {last_entry.get('dt')})"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
# Write a forensic mismatch record to help diagnose potential overwrites
|
||||||
|
mismatch_path = Path(db.db_path).with_name("config_mismatch.log")
|
||||||
|
record = {
|
||||||
|
"detected": datetime.datetime.utcnow().isoformat() + "Z",
|
||||||
|
"db": str(db.db_path),
|
||||||
|
"db_mtime": mtime,
|
||||||
|
"last_saved_dt": last_entry.get("dt"),
|
||||||
|
"last_saved_providers": sorted(list(last_provs)),
|
||||||
|
"missing": missing,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
backup_dir = Path(db.db_path).with_name("config_backups")
|
||||||
|
if backup_dir.exists():
|
||||||
|
files = sorted(backup_dir.glob("medios-backup-*.db"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||||
|
record["latest_backup"] = str(files[0]) if files else None
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
with mismatch_path.open("a", encoding="utf-8") as fh:
|
||||||
|
fh.write(json.dumps(record) + "\n")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
return db_config
|
return db_config
|
||||||
|
|
||||||
_LAST_SAVED_CONFIG = {}
|
_LAST_SAVED_CONFIG = {}
|
||||||
@@ -466,19 +546,121 @@ def reload_config() -> Dict[str, Any]:
|
|||||||
return load_config()
|
return load_config()
|
||||||
|
|
||||||
|
|
||||||
|
def _acquire_save_lock(timeout: float = _SAVE_LOCK_TIMEOUT):
|
||||||
|
"""Acquire a cross-process save lock implemented as a directory.
|
||||||
|
|
||||||
|
Returns the Path to the created lock directory. Raises ConfigSaveConflict
|
||||||
|
if the lock cannot be acquired within the timeout.
|
||||||
|
"""
|
||||||
|
lock_dir = Path(db.db_path).with_name(_SAVE_LOCK_DIRNAME)
|
||||||
|
start = time.time()
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
lock_dir.mkdir(exist_ok=False)
|
||||||
|
# Write owner metadata for diagnostics
|
||||||
|
try:
|
||||||
|
(lock_dir / "owner.json").write_text(json.dumps({
|
||||||
|
"pid": os.getpid(),
|
||||||
|
"ts": time.time(),
|
||||||
|
"cmdline": " ".join(sys.argv),
|
||||||
|
}))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return lock_dir
|
||||||
|
except FileExistsError:
|
||||||
|
# Check for stale lock
|
||||||
|
try:
|
||||||
|
owner = lock_dir / "owner.json"
|
||||||
|
if owner.exists():
|
||||||
|
data = json.loads(owner.read_text())
|
||||||
|
ts = data.get("ts") or 0
|
||||||
|
if time.time() - ts > _SAVE_LOCK_STALE_SECONDS:
|
||||||
|
try:
|
||||||
|
import shutil
|
||||||
|
shutil.rmtree(lock_dir)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# No owner file; if directory is old enough consider it stale
|
||||||
|
try:
|
||||||
|
if time.time() - lock_dir.stat().st_mtime > _SAVE_LOCK_STALE_SECONDS:
|
||||||
|
import shutil
|
||||||
|
shutil.rmtree(lock_dir)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
if time.time() - start > timeout:
|
||||||
|
raise ConfigSaveConflict("Save lock busy; could not acquire in time")
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
|
||||||
|
def _release_save_lock(lock_dir: Path) -> None:
|
||||||
|
try:
|
||||||
|
owner = lock_dir / "owner.json"
|
||||||
|
try:
|
||||||
|
if owner.exists():
|
||||||
|
owner.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
lock_dir.rmdir()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def save_config(config: Dict[str, Any]) -> int:
|
def save_config(config: Dict[str, Any]) -> int:
|
||||||
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
|
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
|
||||||
_sync_alldebrid_api_key(config)
|
_sync_alldebrid_api_key(config)
|
||||||
|
|
||||||
|
# Acquire cross-process save lock to avoid concurrent saves from different
|
||||||
|
# processes which can lead to race conditions and DB-level overwrite.
|
||||||
|
lock_dir = None
|
||||||
|
try:
|
||||||
|
lock_dir = _acquire_save_lock()
|
||||||
|
except ConfigSaveConflict:
|
||||||
|
# Surface a clear exception to callers so they can retry or handle it.
|
||||||
|
raise
|
||||||
|
|
||||||
previous_config = deepcopy(_LAST_SAVED_CONFIG)
|
previous_config = deepcopy(_LAST_SAVED_CONFIG)
|
||||||
changed_count = _count_changed_entries(previous_config, config)
|
changed_count = _count_changed_entries(previous_config, config)
|
||||||
|
|
||||||
def _write_entries() -> int:
|
def _write_entries() -> int:
|
||||||
|
global _CONFIG_CACHE, _LAST_SAVED_CONFIG
|
||||||
count = 0
|
count = 0
|
||||||
with db.transaction():
|
# Use the transaction-provided connection directly to avoid re-acquiring
|
||||||
db.execute("DELETE FROM config")
|
# the connection lock via db.* helpers which can lead to deadlock.
|
||||||
|
with db.transaction() as conn:
|
||||||
|
# Detect concurrent changes by reading the current DB state inside the
|
||||||
|
# same transaction before mutating it. Use the transaction connection
|
||||||
|
# directly to avoid acquiring the connection lock again (deadlock).
|
||||||
|
try:
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT category, subtype, item_name, key, value FROM config")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
current_disk = rows_to_config(rows)
|
||||||
|
cur.close()
|
||||||
|
except Exception:
|
||||||
|
current_disk = {}
|
||||||
|
|
||||||
|
if current_disk != _LAST_SAVED_CONFIG:
|
||||||
|
# If we have no local changes, refresh caches and skip the write.
|
||||||
|
if changed_count == 0:
|
||||||
|
log("Skip save: disk configuration changed since last load and no local changes; not writing to DB.")
|
||||||
|
# Refresh local caches to match the disk
|
||||||
|
_CONFIG_CACHE = current_disk
|
||||||
|
_LAST_SAVED_CONFIG = deepcopy(current_disk)
|
||||||
|
return 0
|
||||||
|
# Otherwise, abort to avoid overwriting external changes
|
||||||
|
raise ConfigSaveConflict(
|
||||||
|
"Configuration on disk changed since you started editing; save aborted to prevent overwrite. Reload and reapply your changes."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Proceed with writing when no conflicting external changes detected
|
||||||
|
conn.execute("DELETE FROM config")
|
||||||
for key, value in config.items():
|
for key, value in config.items():
|
||||||
if key in ('store', 'provider', 'tool'):
|
if key in ('store', 'provider', 'tool') and isinstance(value, dict):
|
||||||
if isinstance(value, dict):
|
|
||||||
for subtype, instances in value.items():
|
for subtype, instances in value.items():
|
||||||
if not isinstance(instances, dict):
|
if not isinstance(instances, dict):
|
||||||
continue
|
continue
|
||||||
@@ -486,7 +668,11 @@ def save_config(config: Dict[str, Any]) -> int:
|
|||||||
for name, settings in instances.items():
|
for name, settings in instances.items():
|
||||||
if isinstance(settings, dict):
|
if isinstance(settings, dict):
|
||||||
for k, v in settings.items():
|
for k, v in settings.items():
|
||||||
save_config_value(key, subtype, name, k, v)
|
val_str = json.dumps(v) if not isinstance(v, str) else v
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
(key, subtype, name, k, val_str),
|
||||||
|
)
|
||||||
count += 1
|
count += 1
|
||||||
else:
|
else:
|
||||||
normalized_subtype = subtype
|
normalized_subtype = subtype
|
||||||
@@ -495,35 +681,185 @@ def save_config(config: Dict[str, Any]) -> int:
|
|||||||
if not normalized_subtype:
|
if not normalized_subtype:
|
||||||
continue
|
continue
|
||||||
for k, v in instances.items():
|
for k, v in instances.items():
|
||||||
save_config_value(key, normalized_subtype, "default", k, v)
|
val_str = json.dumps(v) if not isinstance(v, str) else v
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
(key, normalized_subtype, "default", k, val_str),
|
||||||
|
)
|
||||||
count += 1
|
count += 1
|
||||||
else:
|
else:
|
||||||
if not key.startswith("_") and value is not None:
|
if not key.startswith("_") and value is not None:
|
||||||
save_config_value("global", "none", "none", key, value)
|
val_str = json.dumps(value) if not isinstance(value, str) else value
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO config (category, subtype, item_name, key, value) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
("global", "none", "none", key, val_str),
|
||||||
|
)
|
||||||
count += 1
|
count += 1
|
||||||
return count
|
return count
|
||||||
|
|
||||||
|
|
||||||
saved_entries = 0
|
saved_entries = 0
|
||||||
attempts = 0
|
attempts = 0
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
saved_entries = _write_entries()
|
saved_entries = _write_entries()
|
||||||
|
# Central log entry
|
||||||
log(
|
log(
|
||||||
f"Synced {saved_entries} entries to {db.db_path} "
|
f"Synced {saved_entries} entries to {db.db_path} "
|
||||||
f"({changed_count} changed entries)"
|
f"({changed_count} changed entries)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Try to checkpoint WAL to ensure main DB file reflects latest state.
|
||||||
|
# Use a separate short-lived connection to perform the checkpoint so
|
||||||
|
# we don't contend with our main connection lock or active transactions.
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
with sqlite3.connect(str(db.db_path), timeout=5.0) as _con:
|
||||||
|
_con.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||||
|
except Exception:
|
||||||
|
with sqlite3.connect(str(db.db_path), timeout=5.0) as _con:
|
||||||
|
_con.execute("PRAGMA wal_checkpoint")
|
||||||
|
except Exception as exc:
|
||||||
|
log(f"Warning: WAL checkpoint failed: {exc}")
|
||||||
|
|
||||||
|
# Audit to disk so we can correlate saves across restarts and processes.
|
||||||
|
|
||||||
|
# Audit to disk so we can correlate saves across restarts and processes.
|
||||||
|
try:
|
||||||
|
audit_path = Path(db.db_path).with_name("config_audit.log")
|
||||||
|
|
||||||
|
# Gather non-secret summary info (provider/store names)
|
||||||
|
provider_names = []
|
||||||
|
store_names = []
|
||||||
|
try:
|
||||||
|
pblock = config.get("provider")
|
||||||
|
if isinstance(pblock, dict):
|
||||||
|
provider_names = [str(k) for k in pblock.keys()]
|
||||||
|
except Exception:
|
||||||
|
provider_names = []
|
||||||
|
try:
|
||||||
|
sblock = config.get("store")
|
||||||
|
if isinstance(sblock, dict):
|
||||||
|
store_names = [str(k) for k in sblock.keys()]
|
||||||
|
except Exception:
|
||||||
|
store_names = []
|
||||||
|
|
||||||
|
stack = traceback.format_stack()
|
||||||
|
caller = stack[-1].strip() if stack else ""
|
||||||
|
|
||||||
|
# Try to include the database file modification time for correlation
|
||||||
|
db_mtime = None
|
||||||
|
try:
|
||||||
|
db_mtime = datetime.datetime.utcfromtimestamp(db.db_path.stat().st_mtime).isoformat() + "Z"
|
||||||
|
except Exception:
|
||||||
|
db_mtime = None
|
||||||
|
|
||||||
|
# Create a consistent timestamped backup of the DB so we can recover later
|
||||||
|
backup_path = None
|
||||||
|
try:
|
||||||
|
backup_dir = Path(db.db_path).with_name("config_backups")
|
||||||
|
backup_dir.mkdir(parents=False, exist_ok=True)
|
||||||
|
ts = datetime.datetime.utcnow().strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
candidate = backup_dir / f"medios-backup-{ts}.db"
|
||||||
|
try:
|
||||||
|
# Use sqlite backup API for a consistent copy
|
||||||
|
src_con = sqlite3.connect(str(db.db_path))
|
||||||
|
dest_con = sqlite3.connect(str(candidate))
|
||||||
|
src_con.backup(dest_con)
|
||||||
|
dest_con.close()
|
||||||
|
src_con.close()
|
||||||
|
backup_path = str(candidate)
|
||||||
|
except Exception as e:
|
||||||
|
log(f"Warning: Failed to create DB backup: {e}")
|
||||||
|
|
||||||
|
# Prune older backups (keep last 20)
|
||||||
|
try:
|
||||||
|
files = sorted(backup_dir.glob("medios-backup-*.db"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||||
|
for old in files[20:]:
|
||||||
|
try:
|
||||||
|
old.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
backup_path = None
|
||||||
|
|
||||||
|
# Collect process/exec info and a short hash of the config for forensic tracing
|
||||||
|
try:
|
||||||
|
exe = sys.executable
|
||||||
|
argv = list(sys.argv)
|
||||||
|
cwd = os.getcwd()
|
||||||
|
user = getpass.getuser()
|
||||||
|
try:
|
||||||
|
cfg_hash = hashlib.md5(json.dumps(config, sort_keys=True).encode('utf-8')).hexdigest()
|
||||||
|
except Exception:
|
||||||
|
cfg_hash = None
|
||||||
|
except Exception:
|
||||||
|
exe = None
|
||||||
|
argv = None
|
||||||
|
cwd = None
|
||||||
|
user = None
|
||||||
|
cfg_hash = None
|
||||||
|
|
||||||
|
entry = {
|
||||||
|
"ts": time.time(),
|
||||||
|
"dt": datetime.datetime.utcnow().isoformat() + "Z",
|
||||||
|
"pid": os.getpid(),
|
||||||
|
"exe": exe,
|
||||||
|
"argv": argv,
|
||||||
|
"cwd": cwd,
|
||||||
|
"user": user,
|
||||||
|
"stack": "".join(stack[-20:]),
|
||||||
|
"caller": caller,
|
||||||
|
"config_hash": cfg_hash,
|
||||||
|
"saved_entries": saved_entries,
|
||||||
|
"changed_count": changed_count,
|
||||||
|
"db": str(db.db_path),
|
||||||
|
"db_mtime": db_mtime,
|
||||||
|
"backup": backup_path,
|
||||||
|
"providers": provider_names,
|
||||||
|
"stores": store_names,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
with audit_path.open("a", encoding="utf-8") as fh:
|
||||||
|
fh.write(json.dumps(entry) + "\n")
|
||||||
|
except Exception:
|
||||||
|
# Best-effort; don't fail the save if audit write fails
|
||||||
|
log("Warning: Failed to write config audit file")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
# Release the save lock we acquired earlier
|
||||||
|
try:
|
||||||
|
if lock_dir is not None and lock_dir.exists():
|
||||||
|
_release_save_lock(lock_dir)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
break
|
break
|
||||||
except sqlite3.OperationalError as exc:
|
except sqlite3.OperationalError as exc:
|
||||||
attempts += 1
|
attempts += 1
|
||||||
locked_error = "locked" in str(exc).lower()
|
locked_error = "locked" in str(exc).lower()
|
||||||
if not locked_error or attempts >= _CONFIG_SAVE_MAX_RETRIES:
|
if not locked_error or attempts >= _CONFIG_SAVE_MAX_RETRIES:
|
||||||
log(f"CRITICAL: Database write failed: {exc}")
|
log(f"CRITICAL: Database write failed: {exc}")
|
||||||
|
# Ensure we release potential save lock before bubbling error
|
||||||
|
try:
|
||||||
|
if lock_dir is not None and lock_dir.exists():
|
||||||
|
_release_save_lock(lock_dir)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
raise
|
raise
|
||||||
delay = _CONFIG_SAVE_RETRY_DELAY * attempts
|
delay = _CONFIG_SAVE_RETRY_DELAY * attempts
|
||||||
log(f"Database locked; retry {attempts}/{_CONFIG_SAVE_MAX_RETRIES} in {delay:.2f}s")
|
log(f"Database locked; retry {attempts}/{_CONFIG_SAVE_MAX_RETRIES} in {delay:.2f}s")
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"CRITICAL: Configuration save failed: {exc}")
|
log(f"CRITICAL: Configuration save failed: {exc}")
|
||||||
|
try:
|
||||||
|
if lock_dir is not None and lock_dir.exists():
|
||||||
|
_release_save_lock(lock_dir)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
raise
|
raise
|
||||||
|
|
||||||
clear_config_cache()
|
clear_config_cache()
|
||||||
@@ -540,3 +876,12 @@ def load() -> Dict[str, Any]:
|
|||||||
def save(config: Dict[str, Any]) -> int:
|
def save(config: Dict[str, Any]) -> int:
|
||||||
"""Persist *config* back to disk."""
|
"""Persist *config* back to disk."""
|
||||||
return save_config(config)
|
return save_config(config)
|
||||||
|
|
||||||
|
|
||||||
|
def count_changed_entries(config: Dict[str, Any]) -> int:
|
||||||
|
"""Return the number of changed configuration entries compared to the last saved snapshot.
|
||||||
|
|
||||||
|
This is useful for user-facing messages that want to indicate how many entries
|
||||||
|
were actually modified, not the total number of rows persisted to the database.
|
||||||
|
"""
|
||||||
|
return _count_changed_entries(_LAST_SAVED_CONFIG, config)
|
||||||
|
|||||||
169
SYS/database.py
169
SYS/database.py
@@ -8,8 +8,14 @@ from queue import Queue
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
from SYS.logger import log
|
from SYS.logger import log
|
||||||
|
|
||||||
|
# DB execute retry settings (for transient 'database is locked' errors)
|
||||||
|
_DB_EXEC_RETRY_MAX = 5
|
||||||
|
_DB_EXEC_RETRY_BASE_DELAY = 0.05
|
||||||
|
|
||||||
# The database is located in the project root (prefer explicit repo hints).
|
# The database is located in the project root (prefer explicit repo hints).
|
||||||
def _resolve_root_dir() -> Path:
|
def _resolve_root_dir() -> Path:
|
||||||
env_root = (
|
env_root = (
|
||||||
@@ -65,9 +71,14 @@ class Database:
|
|||||||
timeout=30.0 # Increase timeout to 30s to avoid locking issues
|
timeout=30.0 # Increase timeout to 30s to avoid locking issues
|
||||||
)
|
)
|
||||||
self.conn.row_factory = sqlite3.Row
|
self.conn.row_factory = sqlite3.Row
|
||||||
|
# Reentrant lock to allow nested DB calls within the same thread (e.g., transaction ->
|
||||||
|
# get_config_all / save_config_value) without deadlocking.
|
||||||
|
self._conn_lock = threading.RLock()
|
||||||
|
|
||||||
# Use WAL mode for better concurrency (allows multiple readers + 1 writer)
|
# Use WAL mode for better concurrency (allows multiple readers + 1 writer)
|
||||||
|
# Set a busy timeout so SQLite waits for short locks rather than immediately failing
|
||||||
try:
|
try:
|
||||||
|
self.conn.execute("PRAGMA busy_timeout = 30000")
|
||||||
self.conn.execute("PRAGMA journal_mode=WAL")
|
self.conn.execute("PRAGMA journal_mode=WAL")
|
||||||
self.conn.execute("PRAGMA synchronous=NORMAL")
|
self.conn.execute("PRAGMA synchronous=NORMAL")
|
||||||
except sqlite3.Error:
|
except sqlite3.Error:
|
||||||
@@ -140,45 +151,113 @@ class Database:
|
|||||||
return self.conn
|
return self.conn
|
||||||
|
|
||||||
def execute(self, query: str, params: tuple = ()):
|
def execute(self, query: str, params: tuple = ()):
|
||||||
|
attempts = 0
|
||||||
|
while True:
|
||||||
|
# Serialize access to the underlying sqlite connection to avoid
|
||||||
|
# concurrent use from multiple threads which can trigger locks.
|
||||||
|
with self._conn_lock:
|
||||||
cursor = self.conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
cursor.execute(query, params)
|
cursor.execute(query, params)
|
||||||
if not self.conn.in_transaction:
|
if not self.conn.in_transaction:
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
return cursor
|
return cursor
|
||||||
except Exception:
|
except sqlite3.OperationalError as exc:
|
||||||
|
msg = str(exc).lower()
|
||||||
|
# Retry a few times on transient lock errors
|
||||||
|
if 'locked' in msg and attempts < _DB_EXEC_RETRY_MAX:
|
||||||
|
attempts += 1
|
||||||
|
delay = _DB_EXEC_RETRY_BASE_DELAY * attempts
|
||||||
|
log(f"Database locked on execute; retry {attempts}/{_DB_EXEC_RETRY_MAX} in {delay:.2f}s")
|
||||||
|
try:
|
||||||
if not self.conn.in_transaction:
|
if not self.conn.in_transaction:
|
||||||
self.conn.rollback()
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
time.sleep(delay)
|
||||||
|
continue
|
||||||
|
# Not recoverable or out of retries
|
||||||
|
if not self.conn.in_transaction:
|
||||||
|
try:
|
||||||
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
if not self.conn.in_transaction:
|
||||||
|
try:
|
||||||
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def executemany(self, query: str, param_list: List[tuple]):
|
def executemany(self, query: str, param_list: List[tuple]):
|
||||||
|
attempts = 0
|
||||||
|
while True:
|
||||||
|
with self._conn_lock:
|
||||||
cursor = self.conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
cursor.executemany(query, param_list)
|
cursor.executemany(query, param_list)
|
||||||
if not self.conn.in_transaction:
|
if not self.conn.in_transaction:
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
return cursor
|
return cursor
|
||||||
except Exception:
|
except sqlite3.OperationalError as exc:
|
||||||
|
msg = str(exc).lower()
|
||||||
|
if 'locked' in msg and attempts < _DB_EXEC_RETRY_MAX:
|
||||||
|
attempts += 1
|
||||||
|
delay = _DB_EXEC_RETRY_BASE_DELAY * attempts
|
||||||
|
log(f"Database locked on executemany; retry {attempts}/{_DB_EXEC_RETRY_MAX} in {delay:.2f}s")
|
||||||
|
try:
|
||||||
if not self.conn.in_transaction:
|
if not self.conn.in_transaction:
|
||||||
self.conn.rollback()
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
time.sleep(delay)
|
||||||
|
continue
|
||||||
|
if not self.conn.in_transaction:
|
||||||
|
try:
|
||||||
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
if not self.conn.in_transaction:
|
||||||
|
try:
|
||||||
|
self.conn.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def transaction(self):
|
def transaction(self):
|
||||||
"""Context manager for a database transaction."""
|
"""Context manager for a database transaction.
|
||||||
|
|
||||||
|
Transactions acquire the connection lock for the duration of the transaction
|
||||||
|
to prevent other threads from performing concurrent operations on the
|
||||||
|
same sqlite connection which can lead to locking issues.
|
||||||
|
"""
|
||||||
if self.conn.in_transaction:
|
if self.conn.in_transaction:
|
||||||
# Already in a transaction, just yield
|
# Already in a transaction, just yield
|
||||||
yield self.conn
|
yield self.conn
|
||||||
else:
|
else:
|
||||||
|
# Hold the connection lock for the lifetime of the transaction
|
||||||
|
self._conn_lock.acquire()
|
||||||
try:
|
try:
|
||||||
self.conn.execute("BEGIN")
|
self.conn.execute("BEGIN")
|
||||||
|
try:
|
||||||
yield self.conn
|
yield self.conn
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.conn.rollback()
|
self.conn.rollback()
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
self._conn_lock.release()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
def fetchall(self, query: str, params: tuple = ()):
|
def fetchall(self, query: str, params: tuple = ()):
|
||||||
|
with self._conn_lock:
|
||||||
cursor = self.conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
cursor.execute(query, params)
|
cursor.execute(query, params)
|
||||||
@@ -187,13 +266,13 @@ class Database:
|
|||||||
cursor.close()
|
cursor.close()
|
||||||
|
|
||||||
def fetchone(self, query: str, params: tuple = ()):
|
def fetchone(self, query: str, params: tuple = ()):
|
||||||
|
with self._conn_lock:
|
||||||
cursor = self.conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
try:
|
try:
|
||||||
cursor.execute(query, params)
|
cursor.execute(query, params)
|
||||||
return cursor.fetchone()
|
return cursor.fetchone()
|
||||||
finally:
|
finally:
|
||||||
cursor.close()
|
cursor.close()
|
||||||
|
|
||||||
# Singleton instance
|
# Singleton instance
|
||||||
db = Database()
|
db = Database()
|
||||||
|
|
||||||
@@ -203,13 +282,48 @@ _LOG_THREAD_LOCK = threading.Lock()
|
|||||||
|
|
||||||
|
|
||||||
def _log_worker_loop() -> None:
|
def _log_worker_loop() -> None:
|
||||||
|
"""Background log writer using a temporary per-write connection with
|
||||||
|
small retry/backoff and a file fallback when writes fail repeatedly.
|
||||||
|
"""
|
||||||
while True:
|
while True:
|
||||||
level, module, message = _LOG_QUEUE.get()
|
level, module, message = _LOG_QUEUE.get()
|
||||||
try:
|
try:
|
||||||
db.execute(
|
attempts = 0
|
||||||
"INSERT INTO logs (level, module, message) VALUES (?, ?, ?)",
|
written = False
|
||||||
(level, module, message)
|
while attempts < 3 and not written:
|
||||||
)
|
try:
|
||||||
|
# Create a short-lived connection for the logging write so the
|
||||||
|
# logging thread does not contend with the main connection lock.
|
||||||
|
conn = sqlite3.connect(str(db.db_path), timeout=5.0)
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("INSERT INTO logs (level, module, message) VALUES (?, ?, ?)", (level, module, message))
|
||||||
|
conn.commit()
|
||||||
|
cur.close()
|
||||||
|
conn.close()
|
||||||
|
written = True
|
||||||
|
except sqlite3.OperationalError as exc:
|
||||||
|
attempts += 1
|
||||||
|
if 'locked' in str(exc).lower():
|
||||||
|
time.sleep(0.05 * attempts)
|
||||||
|
continue
|
||||||
|
# Non-lock operational errors: abort attempts
|
||||||
|
log(f"Warning: Failed to write log entry (operational): {exc}")
|
||||||
|
break
|
||||||
|
except Exception as exc:
|
||||||
|
log(f"Warning: Failed to write log entry: {exc}")
|
||||||
|
break
|
||||||
|
if not written:
|
||||||
|
# Fallback to a file-based log so we never lose the message silently
|
||||||
|
try:
|
||||||
|
fallback_dir = Path(db.db_path).with_name("logs")
|
||||||
|
fallback_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
fallback_file = fallback_dir / "log_fallback.txt"
|
||||||
|
with fallback_file.open("a", encoding="utf-8") as fh:
|
||||||
|
fh.write(f"{datetime.datetime.utcnow().isoformat()}Z [{level}] {module}: {message}\n")
|
||||||
|
except Exception:
|
||||||
|
# Last resort: print to stderr
|
||||||
|
try:
|
||||||
|
log(f"ERROR: Could not persist log message: {level} {module} {message}")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
@@ -261,11 +375,14 @@ def save_config_value(category: str, subtype: str, item_name: str, key: str, val
|
|||||||
(category, subtype, item_name, key, val_str)
|
(category, subtype, item_name, key, val_str)
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_config_all() -> Dict[str, Any]:
|
def rows_to_config(rows) -> Dict[str, Any]:
|
||||||
"""Retrieve all configuration from the database in the legacy dict format."""
|
"""Convert DB rows (category, subtype, item_name, key, value) into a config dict.
|
||||||
rows = db.fetchall("SELECT category, subtype, item_name, key, value FROM config")
|
|
||||||
config: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
|
This central helper is used by `get_config_all` and callers that need to
|
||||||
|
parse rows fetched with a transaction connection to avoid nested lock
|
||||||
|
acquisitions.
|
||||||
|
"""
|
||||||
|
config: Dict[str, Any] = {}
|
||||||
for row in rows:
|
for row in rows:
|
||||||
cat = row['category']
|
cat = row['category']
|
||||||
sub = row['subtype']
|
sub = row['subtype']
|
||||||
@@ -277,11 +394,31 @@ def get_config_all() -> Dict[str, Any]:
|
|||||||
if cat == 'store' and str(sub).strip().lower() == 'folder':
|
if cat == 'store' and str(sub).strip().lower() == 'folder':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Try to parse JSON value, fallback to string
|
# Conservative JSON parsing: only attempt to decode when the value
|
||||||
|
# looks like JSON (object/array/quoted string/true/false/null/number).
|
||||||
|
parsed_val = val
|
||||||
|
try:
|
||||||
|
if isinstance(val, str):
|
||||||
|
s = val.strip()
|
||||||
|
if s == "":
|
||||||
|
parsed_val = ""
|
||||||
|
else:
|
||||||
|
first = s[0]
|
||||||
|
lowered = s.lower()
|
||||||
|
if first in ('{', '[', '"') or lowered in ('true', 'false', 'null') or __import__('re').fullmatch(r'-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?', s):
|
||||||
try:
|
try:
|
||||||
parsed_val = json.loads(val)
|
parsed_val = json.loads(val)
|
||||||
except Exception:
|
except Exception:
|
||||||
parsed_val = val
|
parsed_val = val
|
||||||
|
else:
|
||||||
|
parsed_val = val
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
parsed_val = json.loads(val)
|
||||||
|
except Exception:
|
||||||
|
parsed_val = val
|
||||||
|
except Exception:
|
||||||
|
parsed_val = val
|
||||||
|
|
||||||
if cat == 'global':
|
if cat == 'global':
|
||||||
config[key] = parsed_val
|
config[key] = parsed_val
|
||||||
@@ -301,6 +438,12 @@ def get_config_all() -> Dict[str, Any]:
|
|||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_all() -> Dict[str, Any]:
|
||||||
|
"""Retrieve all configuration from the database in the legacy dict format."""
|
||||||
|
rows = db.fetchall("SELECT category, subtype, item_name, key, value FROM config")
|
||||||
|
return rows_to_config(rows)
|
||||||
|
|
||||||
# Worker Management Methods for medios.db
|
# Worker Management Methods for medios.db
|
||||||
|
|
||||||
def insert_worker(worker_id: str, worker_type: str, title: str = "", description: str = "") -> bool:
|
def insert_worker(worker_id: str, worker_type: str, title: str = "", description: str = "") -> bool:
|
||||||
|
|||||||
12
TUI.py
12
TUI.py
@@ -42,6 +42,7 @@ from TUI.pipeline_runner import PipelineRunResult # type: ignore # noqa: E402
|
|||||||
from SYS.result_table import Table, extract_hash_value, extract_store_value # type: ignore # noqa: E402
|
from SYS.result_table import Table, extract_hash_value, extract_store_value # type: ignore # noqa: E402
|
||||||
|
|
||||||
from SYS.config import load_config # type: ignore # noqa: E402
|
from SYS.config import load_config # type: ignore # noqa: E402
|
||||||
|
from SYS.database import db
|
||||||
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402
|
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402
|
||||||
from SYS.cmdlet_catalog import ensure_registry_loaded, list_cmdlet_names # type: ignore # noqa: E402
|
from SYS.cmdlet_catalog import ensure_registry_loaded, list_cmdlet_names # type: ignore # noqa: E402
|
||||||
from SYS.cli_syntax import validate_pipeline_text # type: ignore # noqa: E402
|
from SYS.cli_syntax import validate_pipeline_text # type: ignore # noqa: E402
|
||||||
@@ -525,6 +526,17 @@ class PipelineHubApp(App):
|
|||||||
# Run startup check automatically
|
# Run startup check automatically
|
||||||
self._run_pipeline_background(".status")
|
self._run_pipeline_background(".status")
|
||||||
|
|
||||||
|
# Provide a visible startup summary of configured providers/stores for debugging
|
||||||
|
try:
|
||||||
|
cfg = load_config() or {}
|
||||||
|
provs = list(cfg.get("provider", {}).keys()) if isinstance(cfg.get("provider"), dict) else []
|
||||||
|
stores = list(cfg.get("store", {}).keys()) if isinstance(cfg.get("store"), dict) else []
|
||||||
|
prov_display = ", ".join(provs[:10]) + ("..." if len(provs) > 10 else "")
|
||||||
|
store_display = ", ".join(stores[:10]) + ("..." if len(stores) > 10 else "")
|
||||||
|
self._append_log_line(f"Startup config: providers={len(provs)} ({prov_display or '(none)'}), stores={len(stores)} ({store_display or '(none)')}, db={db.db_path.name}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Actions
|
# Actions
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
|||||||
@@ -7,8 +7,10 @@ from textual.app import ComposeResult
|
|||||||
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
||||||
from textual.screen import ModalScreen
|
from textual.screen import ModalScreen
|
||||||
from textual.widgets import Static, Button, Input, Label, ListView, ListItem, Rule, Select
|
from textual.widgets import Static, Button, Input, Label, ListView, ListItem, Rule, Select
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from SYS.config import load_config, save_config, reload_config, global_config
|
from SYS.config import load_config, save_config, reload_config, global_config, count_changed_entries, ConfigSaveConflict
|
||||||
|
from SYS.database import db
|
||||||
from SYS.logger import log
|
from SYS.logger import log
|
||||||
from Store.registry import _discover_store_classes, _required_keys_for
|
from Store.registry import _discover_store_classes, _required_keys_for
|
||||||
from ProviderCore.registry import list_providers
|
from ProviderCore.registry import list_providers
|
||||||
@@ -124,6 +126,8 @@ class ConfigModal(ModalScreen):
|
|||||||
self._matrix_status: Optional[Static] = None
|
self._matrix_status: Optional[Static] = None
|
||||||
self._matrix_test_running = False
|
self._matrix_test_running = False
|
||||||
self._editor_snapshot: Optional[Dict[str, Any]] = None
|
self._editor_snapshot: Optional[Dict[str, Any]] = None
|
||||||
|
# Path to the database file used by this process (for diagnostics)
|
||||||
|
self._db_path = str(db.db_path)
|
||||||
|
|
||||||
def _capture_editor_snapshot(self) -> None:
|
def _capture_editor_snapshot(self) -> None:
|
||||||
self._editor_snapshot = deepcopy(self.config_data)
|
self._editor_snapshot = deepcopy(self.config_data)
|
||||||
@@ -141,6 +145,8 @@ class ConfigModal(ModalScreen):
|
|||||||
def compose(self) -> ComposeResult:
|
def compose(self) -> ComposeResult:
|
||||||
with Container(id="config-container"):
|
with Container(id="config-container"):
|
||||||
yield Static("CONFIGURATION EDITOR", classes="section-title")
|
yield Static("CONFIGURATION EDITOR", classes="section-title")
|
||||||
|
yield Static(f"DB: {self._db_path}", classes="config-label", id="config-db-path")
|
||||||
|
yield Static("Last saved: unknown", classes="config-label", id="config-last-save")
|
||||||
with Horizontal():
|
with Horizontal():
|
||||||
with Vertical(id="config-sidebar"):
|
with Vertical(id="config-sidebar"):
|
||||||
yield Label("Categories", classes="config-label")
|
yield Label("Categories", classes="config-label")
|
||||||
@@ -156,11 +162,28 @@ class ConfigModal(ModalScreen):
|
|||||||
yield Button("Add Store", variant="primary", id="add-store-btn")
|
yield Button("Add Store", variant="primary", id="add-store-btn")
|
||||||
yield Button("Add Provider", variant="primary", id="add-provider-btn")
|
yield Button("Add Provider", variant="primary", id="add-provider-btn")
|
||||||
yield Button("Back", id="back-btn")
|
yield Button("Back", id="back-btn")
|
||||||
|
yield Button("Restore Backup", id="restore-backup-btn")
|
||||||
|
yield Button("Copy DB Path", id="copy-db-btn")
|
||||||
yield Button("Close", variant="error", id="cancel-btn")
|
yield Button("Close", variant="error", id="cancel-btn")
|
||||||
|
|
||||||
def on_mount(self) -> None:
|
def on_mount(self) -> None:
|
||||||
self.query_one("#add-store-btn", Button).display = False
|
self.query_one("#add-store-btn", Button).display = False
|
||||||
self.query_one("#add-provider-btn", Button).display = False
|
self.query_one("#add-provider-btn", Button).display = False
|
||||||
|
# Update DB path and last-saved on mount
|
||||||
|
try:
|
||||||
|
self.query_one("#config-db-path", Static).update(self._db_path)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
mtime = None
|
||||||
|
try:
|
||||||
|
mtime = db.db_path.stat().st_mtime
|
||||||
|
mtime = __import__('datetime').datetime.utcfromtimestamp(mtime).isoformat() + "Z"
|
||||||
|
except Exception:
|
||||||
|
mtime = None
|
||||||
|
self.query_one("#config-last-save", Static).update(f"Last saved: {mtime or '(unknown)'}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
self.refresh_view()
|
self.refresh_view()
|
||||||
|
|
||||||
def refresh_view(self) -> None:
|
def refresh_view(self) -> None:
|
||||||
@@ -568,19 +591,37 @@ class ConfigModal(ModalScreen):
|
|||||||
if not self.validate_current_editor():
|
if not self.validate_current_editor():
|
||||||
return
|
return
|
||||||
if self.editing_item_name and not self._editor_has_changes():
|
if self.editing_item_name and not self._editor_has_changes():
|
||||||
self.notify("No changes to save", severity="warning")
|
self.notify("No changes to save", severity="warning", timeout=3)
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
saved = self.save_all()
|
saved = self.save_all()
|
||||||
msg = f"Configuration saved ({saved} entries)"
|
|
||||||
if saved == 0:
|
if saved == 0:
|
||||||
msg = "Configuration saved (no rows changed)"
|
msg = f"Configuration saved (no rows changed) to {db.db_path.name}"
|
||||||
self.notify(msg)
|
else:
|
||||||
|
msg = f"Configuration saved ({saved} change(s)) to {db.db_path.name}"
|
||||||
|
# Make the success notification visible a bit longer so it's not missed
|
||||||
|
self.notify(msg, timeout=5)
|
||||||
# Return to the main list view within the current category
|
# Return to the main list view within the current category
|
||||||
self.editing_item_name = None
|
self.editing_item_name = None
|
||||||
self.editing_item_type = None
|
self.editing_item_type = None
|
||||||
self.refresh_view()
|
self.refresh_view()
|
||||||
self._editor_snapshot = None
|
self._editor_snapshot = None
|
||||||
|
except ConfigSaveConflict as exc:
|
||||||
|
# A concurrent on-disk change was detected; do not overwrite it.
|
||||||
|
self.notify(
|
||||||
|
"Save aborted: configuration changed on disk. The editor will refresh.",
|
||||||
|
severity="error",
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
# Refresh our in-memory view from disk and drop the editor snapshot
|
||||||
|
try:
|
||||||
|
self.config_data = reload_config()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._editor_snapshot = None
|
||||||
|
self.editing_item_name = None
|
||||||
|
self.editing_item_type = None
|
||||||
|
self.refresh_view()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self.notify(f"Save failed: {exc}", severity="error", timeout=10)
|
self.notify(f"Save failed: {exc}", severity="error", timeout=10)
|
||||||
elif bid in self._button_id_map:
|
elif bid in self._button_id_map:
|
||||||
@@ -602,13 +643,12 @@ class ConfigModal(ModalScreen):
|
|||||||
if "provider" in self.config_data and name in self.config_data["provider"]:
|
if "provider" in self.config_data and name in self.config_data["provider"]:
|
||||||
del self.config_data["provider"][name]
|
del self.config_data["provider"][name]
|
||||||
removed = True
|
removed = True
|
||||||
|
if str(name).strip().lower() == "alldebrid":
|
||||||
|
self._remove_alldebrid_store_entry()
|
||||||
if removed:
|
if removed:
|
||||||
try:
|
try:
|
||||||
saved = self.save_all()
|
saved = self.save_all()
|
||||||
msg = f"Configuration saved ({saved} entries)"
|
self.notify("Saving configuration...", timeout=3)
|
||||||
if saved == 0:
|
|
||||||
msg = "Configuration saved (no rows changed)"
|
|
||||||
self.notify(msg)
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self.notify(f"Save failed: {exc}", severity="error", timeout=10)
|
self.notify(f"Save failed: {exc}", severity="error", timeout=10)
|
||||||
self.refresh_view()
|
self.refresh_view()
|
||||||
@@ -640,6 +680,27 @@ class ConfigModal(ModalScreen):
|
|||||||
self._request_matrix_test()
|
self._request_matrix_test()
|
||||||
elif bid == "matrix-rooms-btn":
|
elif bid == "matrix-rooms-btn":
|
||||||
self._open_matrix_room_picker()
|
self._open_matrix_room_picker()
|
||||||
|
elif bid == "restore-backup-btn":
|
||||||
|
try:
|
||||||
|
backup = self._get_last_backup_path()
|
||||||
|
if not backup:
|
||||||
|
self.notify("No backups available", severity="warning")
|
||||||
|
else:
|
||||||
|
# Ask for confirmation via a simple notification and perform restore
|
||||||
|
self.notify(f"Restoring {backup.name}...", timeout=2)
|
||||||
|
self._restore_backup_background(str(backup))
|
||||||
|
except Exception as exc:
|
||||||
|
self.notify(f"Restore failed: {exc}", severity="error")
|
||||||
|
elif bid == "copy-db-btn":
|
||||||
|
try:
|
||||||
|
if hasattr(self.app, "copy_to_clipboard"):
|
||||||
|
self.app.copy_to_clipboard(str(db.db_path))
|
||||||
|
self.notify("DB path copied to clipboard")
|
||||||
|
else:
|
||||||
|
# Fall back to a visible notification
|
||||||
|
self.notify(str(db.db_path))
|
||||||
|
except Exception:
|
||||||
|
self.notify("Failed to copy DB path", severity="warning")
|
||||||
elif bid.startswith("paste-"):
|
elif bid.startswith("paste-"):
|
||||||
# Programmatic paste button
|
# Programmatic paste button
|
||||||
target_id = bid.replace("paste-", "")
|
target_id = bid.replace("paste-", "")
|
||||||
@@ -674,6 +735,65 @@ class ConfigModal(ModalScreen):
|
|||||||
else:
|
else:
|
||||||
self.notify("Clipboard not supported in this terminal", severity="warning")
|
self.notify("Clipboard not supported in this terminal", severity="warning")
|
||||||
|
|
||||||
|
def _get_last_backup_path(self):
|
||||||
|
try:
|
||||||
|
backup_dir = Path(db.db_path).with_name("config_backups")
|
||||||
|
if not backup_dir.exists():
|
||||||
|
return None
|
||||||
|
files = sorted(backup_dir.glob("medios-backup-*.db"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||||
|
return files[0] if files else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@work(thread=True)
|
||||||
|
def _restore_backup_background(self, backup_path: str) -> None:
|
||||||
|
try:
|
||||||
|
import sqlite3, json
|
||||||
|
cfg = {}
|
||||||
|
with sqlite3.connect(backup_path) as conn:
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT category, subtype, item_name, key, value FROM config")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
for cat, sub, name, key, val in rows:
|
||||||
|
try:
|
||||||
|
parsed = json.loads(val)
|
||||||
|
except Exception:
|
||||||
|
parsed = val
|
||||||
|
if cat == 'global':
|
||||||
|
cfg[key] = parsed
|
||||||
|
elif cat in ('provider', 'tool'):
|
||||||
|
cd = cfg.setdefault(cat, {})
|
||||||
|
sd = cd.setdefault(sub, {})
|
||||||
|
sd[key] = parsed
|
||||||
|
elif cat == 'store':
|
||||||
|
cd = cfg.setdefault('store', {})
|
||||||
|
sd = cd.setdefault(sub, {})
|
||||||
|
nd = sd.setdefault(name, {})
|
||||||
|
nd[key] = parsed
|
||||||
|
else:
|
||||||
|
cfg.setdefault(cat, {})[key] = parsed
|
||||||
|
|
||||||
|
# Persist restored config using save_config
|
||||||
|
from SYS.config import save_config, reload_config
|
||||||
|
saved = save_config(cfg)
|
||||||
|
# Reload and update UI from main thread
|
||||||
|
self.app.call_from_thread(self._on_restore_complete, True, backup_path, saved)
|
||||||
|
except Exception as exc:
|
||||||
|
self.app.call_from_thread(self._on_restore_complete, False, backup_path, str(exc))
|
||||||
|
|
||||||
|
def _on_restore_complete(self, success: bool, backup_path: str, saved_or_error):
|
||||||
|
if success:
|
||||||
|
# Refresh our in-memory view and UI
|
||||||
|
try:
|
||||||
|
from SYS.config import reload_config
|
||||||
|
self.config_data = reload_config()
|
||||||
|
self.refresh_view()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self.notify(f"Restore complete: re-saved {saved_or_error} entries from {Path(backup_path).name}")
|
||||||
|
else:
|
||||||
|
self.notify(f"Restore failed: {saved_or_error}", severity="error")
|
||||||
|
|
||||||
def on_store_type_selected(self, stype: str) -> None:
|
def on_store_type_selected(self, stype: str) -> None:
|
||||||
if not stype:
|
if not stype:
|
||||||
return
|
return
|
||||||
@@ -847,6 +967,28 @@ class ConfigModal(ModalScreen):
|
|||||||
|
|
||||||
self._update_config_value(widget_id, value)
|
self._update_config_value(widget_id, value)
|
||||||
|
|
||||||
|
def _remove_alldebrid_store_entry(self) -> bool:
|
||||||
|
"""Remove the mirrored AllDebrid store entry that would recreate the provider."""
|
||||||
|
store_block = self.config_data.get("store")
|
||||||
|
if not isinstance(store_block, dict):
|
||||||
|
return False
|
||||||
|
debrid = store_block.get("debrid")
|
||||||
|
if not isinstance(debrid, dict):
|
||||||
|
return False
|
||||||
|
|
||||||
|
removed = False
|
||||||
|
for key in list(debrid.keys()):
|
||||||
|
if str(key or "").strip().lower() == "all-debrid":
|
||||||
|
debrid.pop(key, None)
|
||||||
|
removed = True
|
||||||
|
|
||||||
|
if not debrid:
|
||||||
|
store_block.pop("debrid", None)
|
||||||
|
if not store_block:
|
||||||
|
self.config_data.pop("store", None)
|
||||||
|
|
||||||
|
return removed
|
||||||
|
|
||||||
def _get_matrix_provider_block(self) -> Dict[str, Any]:
|
def _get_matrix_provider_block(self) -> Dict[str, Any]:
|
||||||
providers = self.config_data.get("provider")
|
providers = self.config_data.get("provider")
|
||||||
if not isinstance(providers, dict):
|
if not isinstance(providers, dict):
|
||||||
@@ -870,6 +1012,7 @@ class ConfigModal(ModalScreen):
|
|||||||
self._synchronize_inputs_to_config()
|
self._synchronize_inputs_to_config()
|
||||||
if self._matrix_status:
|
if self._matrix_status:
|
||||||
self._matrix_status.update("Saving configuration before testing…")
|
self._matrix_status.update("Saving configuration before testing…")
|
||||||
|
changed = count_changed_entries(self.config_data)
|
||||||
try:
|
try:
|
||||||
entries = save_config(self.config_data)
|
entries = save_config(self.config_data)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
@@ -879,7 +1022,7 @@ class ConfigModal(ModalScreen):
|
|||||||
return
|
return
|
||||||
self.config_data = reload_config()
|
self.config_data = reload_config()
|
||||||
if self._matrix_status:
|
if self._matrix_status:
|
||||||
self._matrix_status.update(f"Saved configuration ({entries} entries). Testing Matrix connection…")
|
self._matrix_status.update(f"Saved configuration ({changed} change(s)) to {db.db_path.name}. Testing Matrix connection…")
|
||||||
self._matrix_test_running = True
|
self._matrix_test_running = True
|
||||||
self._matrix_test_background()
|
self._matrix_test_background()
|
||||||
|
|
||||||
@@ -941,6 +1084,7 @@ class ConfigModal(ModalScreen):
|
|||||||
return
|
return
|
||||||
matrix_block = self.config_data.setdefault("provider", {}).setdefault("matrix", {})
|
matrix_block = self.config_data.setdefault("provider", {}).setdefault("matrix", {})
|
||||||
matrix_block["rooms"] = ", ".join(cleaned)
|
matrix_block["rooms"] = ", ".join(cleaned)
|
||||||
|
changed = count_changed_entries(self.config_data)
|
||||||
try:
|
try:
|
||||||
entries = save_config(self.config_data)
|
entries = save_config(self.config_data)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
@@ -949,7 +1093,7 @@ class ConfigModal(ModalScreen):
|
|||||||
return
|
return
|
||||||
self.config_data = reload_config()
|
self.config_data = reload_config()
|
||||||
if self._matrix_status:
|
if self._matrix_status:
|
||||||
status = f"Saved {len(cleaned)} default room(s) ({entries} rows persisted)."
|
status = f"Saved {len(cleaned)} default room(s) ({changed} change(s)) to {db.db_path.name}."
|
||||||
self._matrix_status.update(status)
|
self._matrix_status.update(status)
|
||||||
self.refresh_view()
|
self.refresh_view()
|
||||||
|
|
||||||
@@ -967,10 +1111,139 @@ class ConfigModal(ModalScreen):
|
|||||||
|
|
||||||
def save_all(self) -> int:
|
def save_all(self) -> int:
|
||||||
self._synchronize_inputs_to_config()
|
self._synchronize_inputs_to_config()
|
||||||
entries = save_config(self.config_data)
|
# Compute change count prior to persisting so callers can report the number of
|
||||||
|
# actual changes rather than the total number of rows written to the DB.
|
||||||
|
changed = count_changed_entries(self.config_data)
|
||||||
|
# Snapshot config for background save
|
||||||
|
snapshot = deepcopy(self.config_data)
|
||||||
|
# Schedule the save to run on a background worker so the UI doesn't block.
|
||||||
|
try:
|
||||||
|
# Prefer Textual's worker when running inside the app
|
||||||
|
self._save_background(snapshot, changed)
|
||||||
|
except Exception:
|
||||||
|
# Fallback: start a plain thread that runs the underlying task body
|
||||||
|
import threading
|
||||||
|
func = getattr(self._save_background, "__wrapped__", None)
|
||||||
|
if func:
|
||||||
|
thread = threading.Thread(target=lambda: func(self, snapshot, changed), daemon=True)
|
||||||
|
else:
|
||||||
|
thread = threading.Thread(target=lambda: self._save_background(snapshot, changed), daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
# Ensure DB path indicator is current and show saving status
|
||||||
|
self._db_path = str(db.db_path)
|
||||||
|
try:
|
||||||
|
self.query_one("#config-db-path", Static).update(self._db_path)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.query_one("#config-last-save", Static).update("Last saved: (saving...)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
log(f"ConfigModal scheduled save (changed={changed})")
|
||||||
|
return changed
|
||||||
|
|
||||||
|
@work(thread=True)
|
||||||
|
def _save_background(self, cfg: Dict[str, Any], changed: int) -> None:
|
||||||
|
try:
|
||||||
|
saved_entries = save_config(cfg)
|
||||||
|
try:
|
||||||
|
appobj = self.app
|
||||||
|
except Exception:
|
||||||
|
appobj = None
|
||||||
|
if appobj and hasattr(appobj, 'call_from_thread'):
|
||||||
|
appobj.call_from_thread(self._on_save_complete, True, None, changed, saved_entries)
|
||||||
|
else:
|
||||||
|
# If no app (e.g., running under tests), call completion directly
|
||||||
|
self._on_save_complete(True, None, changed, saved_entries)
|
||||||
|
except ConfigSaveConflict as exc:
|
||||||
|
try:
|
||||||
|
appobj = self.app
|
||||||
|
except Exception:
|
||||||
|
appobj = None
|
||||||
|
if appobj and hasattr(appobj, 'call_from_thread'):
|
||||||
|
appobj.call_from_thread(self._on_save_complete, False, str(exc), changed, 0)
|
||||||
|
else:
|
||||||
|
self._on_save_complete(False, str(exc), changed, 0)
|
||||||
|
except Exception as exc:
|
||||||
|
try:
|
||||||
|
appobj = self.app
|
||||||
|
except Exception:
|
||||||
|
appobj = None
|
||||||
|
if appobj and hasattr(appobj, 'call_from_thread'):
|
||||||
|
appobj.call_from_thread(self._on_save_complete, False, str(exc), changed, 0)
|
||||||
|
else:
|
||||||
|
self._on_save_complete(False, str(exc), changed, 0)
|
||||||
|
|
||||||
|
def _on_save_complete(self, success: bool, error: Optional[str], changed: int, saved_entries: int) -> None:
|
||||||
|
# Safely determine whether a Textual app context is available. Accessing
|
||||||
|
# `self.app` can raise when not running inside the TUI; handle that.
|
||||||
|
try:
|
||||||
|
appobj = self.app
|
||||||
|
except Exception:
|
||||||
|
appobj = None
|
||||||
|
|
||||||
|
if success:
|
||||||
|
try:
|
||||||
self.config_data = reload_config()
|
self.config_data = reload_config()
|
||||||
log(f"ConfigModal saved {entries} configuration entries")
|
except Exception:
|
||||||
return entries
|
pass
|
||||||
|
|
||||||
|
# Update last-saved label with file timestamp for visibility
|
||||||
|
db_mtime = None
|
||||||
|
try:
|
||||||
|
db_mtime = db.db_path.stat().st_mtime
|
||||||
|
db_mtime = __import__('datetime').datetime.utcfromtimestamp(db_mtime).isoformat() + "Z"
|
||||||
|
except Exception:
|
||||||
|
db_mtime = None
|
||||||
|
|
||||||
|
if appobj:
|
||||||
|
try:
|
||||||
|
if changed == 0:
|
||||||
|
label_text = f"Last saved: (no changes)"
|
||||||
|
else:
|
||||||
|
label_text = f"Last saved: {changed} change(s) at {db_mtime or '(unknown)'}"
|
||||||
|
try:
|
||||||
|
self.query_one("#config-last-save", Static).update(label_text)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.refresh_view()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.notify(f"Configuration saved ({changed} change(s)) to {db.db_path.name}", timeout=5)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# No TUI available; log instead of updating UI
|
||||||
|
log(f"Configuration saved ({changed} change(s)) to {db.db_path.name}")
|
||||||
|
|
||||||
|
log(f"ConfigModal saved {saved_entries} configuration entries (changed={changed})")
|
||||||
|
else:
|
||||||
|
# Save failed; notify via UI if available, otherwise log
|
||||||
|
if appobj:
|
||||||
|
try:
|
||||||
|
self.notify(f"Save failed: {error}", severity="error", timeout=10)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.config_data = reload_config()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.refresh_view()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
log(f"Save failed: {error}")
|
||||||
|
|
||||||
def validate_current_editor(self) -> bool:
|
def validate_current_editor(self) -> bool:
|
||||||
"""Ensure all required fields for the current item are filled."""
|
"""Ensure all required fields for the current item are filled."""
|
||||||
|
|||||||
Reference in New Issue
Block a user