This commit is contained in:
2026-01-22 11:05:40 -08:00
parent 874939a65b
commit 2ae651c225
7 changed files with 106 additions and 108 deletions

3
.gitignore vendored
View File

@@ -243,4 +243,5 @@ authtoken.secret
mypy.
.idea
medios.db
medios.db
medios*

2
CLI.py
View File

@@ -19,7 +19,7 @@ if not os.environ.get("MM_DEBUG"):
db_path = Path(__file__).resolve().parent / "medios.db"
if db_path.exists():
import sqlite3
with sqlite3.connect(str(db_path)) as conn:
with sqlite3.connect(str(db_path), timeout=30.0) as conn:
cur = conn.cursor()
# Check for global debug key
cur.execute("SELECT value FROM config WHERE key = 'debug' AND category = 'global'")

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import re
import tempfile
import json
from pathlib import Path
from typing import Any, Dict, Optional, List
from SYS.logger import log
@@ -39,80 +40,6 @@ def clear_config_cache() -> None:
_CONFIG_CACHE.clear()
def reload_config(
config_dir: Optional[Path] = None, filename: str = "medios.db"
) -> Dict[str, Any]:
_CONFIG_CACHE.pop("db_config", None)
return load_config(config_dir=config_dir, filename=filename)
def load_config(
config_dir: Optional[Path] = None, filename: str = "medios.db"
) -> Dict[str, Any]:
# We no longer use config_dir or filename for the config file itself,
# but we keep them in the signature for backward compatibility.
cache_key = "db_config"
if cache_key in _CONFIG_CACHE:
return _CONFIG_CACHE[cache_key]
# Load from database
try:
from SYS.database import get_config_all
db_config = get_config_all()
if db_config:
_CONFIG_CACHE[cache_key] = db_config
return db_config
except Exception:
pass
return {}
def save_config(
config: Dict[str, Any],
config_dir: Optional[Path] = None,
filename: str = "medios.db",
) -> None:
"""Persist configuration to the database."""
try:
from SYS.database import save_config_value
for key, value in config.items():
if key in ('store', 'provider', 'tool'):
if isinstance(value, dict):
for subtype, instances in value.items():
if isinstance(instances, dict):
# provider/tool are usually config[cat][subtype][key]
# but store is config['store'][subtype][name][key]
if key == 'store':
for name, settings in instances.items():
if isinstance(settings, dict):
for k, v in settings.items():
save_config_value(key, subtype, name, k, v)
else:
for k, v in instances.items():
save_config_value(key, subtype, "default", k, v)
else:
# global settings
if not key.startswith("_"):
save_config_value("global", "none", "none", key, value)
except Exception as e:
log(f"Failed to save config to database: {e}")
_CONFIG_CACHE["db_config"] = config
def load() -> Dict[str, Any]:
"""Return the parsed configuration from database."""
return load_config()
def save(config: Dict[str, Any]) -> None:
"""Persist *config* back to database."""
save_config(config)
def _make_cache_key(config_dir: Optional[Path], filename: str, actual_path: Optional[Path]) -> str:
if actual_path:
return str(actual_path.resolve())
@@ -436,6 +363,11 @@ def _validate_config_safety(config: Dict[str, Any]) -> None:
return
def _serialize_conf(config: Dict[str, Any]) -> str:
"""Serialize configuration to a string for legacy .conf files."""
return json.dumps(config, indent=4)
def save_config(
config: Dict[str, Any],
config_dir: Optional[Path] = None,
@@ -448,27 +380,28 @@ def save_config(
try:
from SYS.database import db, save_config_value
# We want to clear and re-save or just update?
# For simplicity, we'll iterate and update.
for key, value in config.items():
if key in ('store', 'provider', 'tool'):
if isinstance(value, dict):
for subtype, instances in value.items():
if isinstance(instances, dict):
# provider/tool are usually config[cat][subtype][key]
# but store is config['store'][subtype][name][key]
if key == 'store':
for name, settings in instances.items():
if isinstance(settings, dict):
for k, v in settings.items():
save_config_value(key, subtype, name, k, v)
else:
for k, v in instances.items():
save_config_value(key, subtype, "default", k, v)
else:
# global settings
if not key.startswith("_"):
save_config_value("global", "none", "none", key, value)
with db.transaction():
# We want to clear and re-save or just update?
# For simplicity, we'll iterate and update.
for key, value in config.items():
if key in ('store', 'provider', 'tool'):
if isinstance(value, dict):
for subtype, instances in value.items():
if isinstance(instances, dict):
# provider/tool are usually config[cat][subtype][key]
# but store is config['store'][subtype][name][key]
if key == 'store':
for name, settings in instances.items():
if isinstance(settings, dict):
for k, v in settings.items():
save_config_value(key, subtype, name, k, v)
else:
for k, v in instances.items():
save_config_value(key, subtype, "default", k, v)
else:
# global settings
if not key.startswith("_"):
save_config_value("global", "none", "none", key, value)
except Exception as e:
log(f"Failed to save config to database: {e}")

View File

@@ -4,6 +4,7 @@ import sqlite3
import json
from pathlib import Path
from typing import Any, Dict, List, Optional
from contextlib import contextmanager
# The database is located in the project root
ROOT_DIR = Path(__file__).resolve().parent.parent
@@ -19,8 +20,20 @@ class Database:
return cls._instance
def _init_db(self):
self.conn = sqlite3.connect(str(DB_PATH), check_same_thread=False)
self.conn = sqlite3.connect(
str(DB_PATH),
check_same_thread=False,
timeout=30.0 # Increase timeout to 30s to avoid locking issues
)
self.conn.row_factory = sqlite3.Row
# Use WAL mode for better concurrency (allows multiple readers + 1 writer)
try:
self.conn.execute("PRAGMA journal_mode=WAL")
self.conn.execute("PRAGMA synchronous=NORMAL")
except sqlite3.Error:
pass
self._create_tables()
def _create_tables(self):
@@ -89,19 +102,58 @@ class Database:
def execute(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
cursor.execute(query, params)
self.conn.commit()
return cursor
try:
cursor.execute(query, params)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except Exception:
if not self.conn.in_transaction:
self.conn.rollback()
raise
def executemany(self, query: str, param_list: List[tuple]):
cursor = self.conn.cursor()
try:
cursor.executemany(query, param_list)
if not self.conn.in_transaction:
self.conn.commit()
return cursor
except Exception:
if not self.conn.in_transaction:
self.conn.rollback()
raise
@contextmanager
def transaction(self):
"""Context manager for a database transaction."""
if self.conn.in_transaction:
# Already in a transaction, just yield
yield self.conn
else:
try:
self.conn.execute("BEGIN")
yield self.conn
self.conn.commit()
except Exception:
self.conn.rollback()
raise
def fetchall(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
cursor.execute(query, params)
return cursor.fetchall()
try:
cursor.execute(query, params)
return cursor.fetchall()
finally:
cursor.close()
def fetchone(self, query: str, params: tuple = ()):
cursor = self.conn.cursor()
cursor.execute(query, params)
return cursor.fetchone()
try:
cursor.execute(query, params)
return cursor.fetchone()
finally:
cursor.close()
# Singleton instance
db = Database()

View File

@@ -751,7 +751,7 @@ def main() -> int:
if db_path.exists():
try:
import sqlite3
with sqlite3.connect(str(db_path)) as conn:
with sqlite3.connect(str(db_path), timeout=30.0) as conn:
# We want to set store.hydrusnetwork.hydrus.<key>
cur = conn.cursor()
# Check if hydrusnetwork store exists

View File

@@ -161,7 +161,7 @@ def update_medios_config(hydrus_path: Path) -> bool:
try:
import sqlite3
import json
with sqlite3.connect(str(db_path)) as conn:
with sqlite3.connect(str(db_path), timeout=30.0) as conn:
conn.row_factory = sqlite3.Row
cur = conn.cursor()

View File

@@ -791,7 +791,19 @@ def main(argv: Optional[List[str]] = None) -> int:
# and the user did not explicitly pass --venv. This matches the user's likely
# intent when they called: <venv_python> scripts/run_client.py ...
cur_py = Path(sys.executable)
if args.venv is None and _is_running_in_virtualenv() and cur_py:
# However, if we've already found a repo-local venv and the current Python
# is external to the repository, we do NOT prefer it yet - we'll verify the
# repo-local one first. This prevents tools like Medios-Macina from
# accidentally installing their own venv into the repo's services.
cur_is_external = True
try:
if repo_root in cur_py.resolve().parents:
cur_is_external = False
except Exception:
pass
if args.venv is None and _is_running_in_virtualenv() and cur_py and (not venv_py or not cur_is_external):
# If current interpreter looks like a venv and can import required modules,
# prefer it immediately rather than forcing the repo venv.
req = find_requirements(repo_root)