This commit is contained in:
2026-01-30 10:47:47 -08:00
parent a44b80fd1d
commit ab94c57244
5 changed files with 872 additions and 99 deletions

View File

@@ -8,8 +8,14 @@ from queue import Queue
from pathlib import Path
from typing import Any, Dict, List, Optional
from contextlib import contextmanager
import time
import datetime
from SYS.logger import log
# DB execute retry settings (for transient 'database is locked' errors)
_DB_EXEC_RETRY_MAX = 5
_DB_EXEC_RETRY_BASE_DELAY = 0.05
# The database is located in the project root (prefer explicit repo hints).
def _resolve_root_dir() -> Path:
env_root = (
@@ -65,9 +71,14 @@ class Database:
timeout=30.0 # Increase timeout to 30s to avoid locking issues
)
self.conn.row_factory = sqlite3.Row
# Reentrant lock to allow nested DB calls within the same thread (e.g., transaction ->
# get_config_all / save_config_value) without deadlocking.
self._conn_lock = threading.RLock()
# Use WAL mode for better concurrency (allows multiple readers + 1 writer)
# Set a busy timeout so SQLite waits for short locks rather than immediately failing
try:
self.conn.execute("PRAGMA busy_timeout = 30000")
self.conn.execute("PRAGMA journal_mode=WAL")
self.conn.execute("PRAGMA synchronous=NORMAL")
except sqlite3.Error:
@@ -139,61 +150,129 @@ class Database:
def get_connection(self):
return self.conn
def execute(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except Exception:
if not self.conn.in_transaction:
self.conn.rollback()
raise
def execute(self, query: str, params: tuple = ()):
attempts = 0
while True:
# Serialize access to the underlying sqlite connection to avoid
# concurrent use from multiple threads which can trigger locks.
with self._conn_lock:
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except sqlite3.OperationalError as exc:
msg = str(exc).lower()
# Retry a few times on transient lock errors
if 'locked' in msg and attempts < _DB_EXEC_RETRY_MAX:
attempts += 1
delay = _DB_EXEC_RETRY_BASE_DELAY * attempts
log(f"Database locked on execute; retry {attempts}/{_DB_EXEC_RETRY_MAX} in {delay:.2f}s")
try:
if not self.conn.in_transaction:
self.conn.rollback()
except Exception:
pass
time.sleep(delay)
continue
# Not recoverable or out of retries
if not self.conn.in_transaction:
try:
self.conn.rollback()
except Exception:
pass
raise
except Exception:
if not self.conn.in_transaction:
try:
self.conn.rollback()
except Exception:
pass
raise
def executemany(self, query: str, param_list: List[tuple]):
cursor = self.conn.cursor()
try:
cursor.executemany(query, param_list)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except Exception:
if not self.conn.in_transaction:
self.conn.rollback()
raise
attempts = 0
while True:
with self._conn_lock:
cursor = self.conn.cursor()
try:
cursor.executemany(query, param_list)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except sqlite3.OperationalError as exc:
msg = str(exc).lower()
if 'locked' in msg and attempts < _DB_EXEC_RETRY_MAX:
attempts += 1
delay = _DB_EXEC_RETRY_BASE_DELAY * attempts
log(f"Database locked on executemany; retry {attempts}/{_DB_EXEC_RETRY_MAX} in {delay:.2f}s")
try:
if not self.conn.in_transaction:
self.conn.rollback()
except Exception:
pass
time.sleep(delay)
continue
if not self.conn.in_transaction:
try:
self.conn.rollback()
except Exception:
pass
raise
except Exception:
if not self.conn.in_transaction:
try:
self.conn.rollback()
except Exception:
pass
raise
@contextmanager
def transaction(self):
"""Context manager for a database transaction."""
"""Context manager for a database transaction.
Transactions acquire the connection lock for the duration of the transaction
to prevent other threads from performing concurrent operations on the
same sqlite connection which can lead to locking issues.
"""
if self.conn.in_transaction:
# Already in a transaction, just yield
yield self.conn
else:
# Hold the connection lock for the lifetime of the transaction
self._conn_lock.acquire()
try:
self.conn.execute("BEGIN")
yield self.conn
self.conn.commit()
except Exception:
self.conn.rollback()
raise
try:
yield self.conn
self.conn.commit()
except Exception:
self.conn.rollback()
raise
finally:
try:
self._conn_lock.release()
except Exception:
pass
def fetchall(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
return cursor.fetchall()
finally:
cursor.close()
def fetchone(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
return cursor.fetchone()
finally:
cursor.close()
with self._conn_lock:
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
return cursor.fetchall()
finally:
cursor.close()
def fetchone(self, query: str, params: tuple = ()):
with self._conn_lock:
cursor = self.conn.cursor()
try:
cursor.execute(query, params)
return cursor.fetchone()
finally:
cursor.close()
# Singleton instance
db = Database()
@@ -203,15 +282,50 @@ _LOG_THREAD_LOCK = threading.Lock()
def _log_worker_loop() -> None:
"""Background log writer using a temporary per-write connection with
small retry/backoff and a file fallback when writes fail repeatedly.
"""
while True:
level, module, message = _LOG_QUEUE.get()
try:
db.execute(
"INSERT INTO logs (level, module, message) VALUES (?, ?, ?)",
(level, module, message)
)
except Exception:
pass
attempts = 0
written = False
while attempts < 3 and not written:
try:
# Create a short-lived connection for the logging write so the
# logging thread does not contend with the main connection lock.
conn = sqlite3.connect(str(db.db_path), timeout=5.0)
cur = conn.cursor()
cur.execute("INSERT INTO logs (level, module, message) VALUES (?, ?, ?)", (level, module, message))
conn.commit()
cur.close()
conn.close()
written = True
except sqlite3.OperationalError as exc:
attempts += 1
if 'locked' in str(exc).lower():
time.sleep(0.05 * attempts)
continue
# Non-lock operational errors: abort attempts
log(f"Warning: Failed to write log entry (operational): {exc}")
break
except Exception as exc:
log(f"Warning: Failed to write log entry: {exc}")
break
if not written:
# Fallback to a file-based log so we never lose the message silently
try:
fallback_dir = Path(db.db_path).with_name("logs")
fallback_dir.mkdir(parents=True, exist_ok=True)
fallback_file = fallback_dir / "log_fallback.txt"
with fallback_file.open("a", encoding="utf-8") as fh:
fh.write(f"{datetime.datetime.utcnow().isoformat()}Z [{level}] {module}: {message}\n")
except Exception:
# Last resort: print to stderr
try:
log(f"ERROR: Could not persist log message: {level} {module} {message}")
except Exception:
pass
finally:
try:
_LOG_QUEUE.task_done()
@@ -261,11 +375,14 @@ def save_config_value(category: str, subtype: str, item_name: str, key: str, val
(category, subtype, item_name, key, val_str)
)
def get_config_all() -> Dict[str, Any]:
"""Retrieve all configuration from the database in the legacy dict format."""
rows = db.fetchall("SELECT category, subtype, item_name, key, value FROM config")
def rows_to_config(rows) -> Dict[str, Any]:
"""Convert DB rows (category, subtype, item_name, key, value) into a config dict.
This central helper is used by `get_config_all` and callers that need to
parse rows fetched with a transaction connection to avoid nested lock
acquisitions.
"""
config: Dict[str, Any] = {}
for row in rows:
cat = row['category']
sub = row['subtype']
@@ -276,13 +393,33 @@ def get_config_all() -> Dict[str, Any]:
# Drop legacy folder store entries (folder store is removed).
if cat == 'store' and str(sub).strip().lower() == 'folder':
continue
# Try to parse JSON value, fallback to string
# Conservative JSON parsing: only attempt to decode when the value
# looks like JSON (object/array/quoted string/true/false/null/number).
parsed_val = val
try:
parsed_val = json.loads(val)
if isinstance(val, str):
s = val.strip()
if s == "":
parsed_val = ""
else:
first = s[0]
lowered = s.lower()
if first in ('{', '[', '"') or lowered in ('true', 'false', 'null') or __import__('re').fullmatch(r'-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?', s):
try:
parsed_val = json.loads(val)
except Exception:
parsed_val = val
else:
parsed_val = val
else:
try:
parsed_val = json.loads(val)
except Exception:
parsed_val = val
except Exception:
parsed_val = val
if cat == 'global':
config[key] = parsed_val
else:
@@ -298,9 +435,15 @@ def get_config_all() -> Dict[str, Any]:
name_dict[key] = parsed_val
else:
config.setdefault(cat, {})[key] = parsed_val
return config
def get_config_all() -> Dict[str, Any]:
"""Retrieve all configuration from the database in the legacy dict format."""
rows = db.fetchall("SELECT category, subtype, item_name, key, value FROM config")
return rows_to_config(rows)
# Worker Management Methods for medios.db
def insert_worker(worker_id: str, worker_type: str, title: str = "", description: str = "") -> bool: