This commit is contained in:
2026-01-31 19:00:04 -08:00
parent dcf16e0cc4
commit 6513a3ad04
25 changed files with 617 additions and 397 deletions

View File

@@ -203,7 +203,7 @@ def _run_cli(clean_args: List[str]) -> int:
error_msg += f" - detected repo root: {repo}\n"
cli_path = repo / "CLI.py"
error_msg += f" - CLI.py exists at {cli_path}: {cli_path.exists()}\n"
except:
except Exception:
pass
error_msg += (
"\nRemedy: Run 'pip install -e scripts' from the project root or re-run the bootstrap script.\n"

View File

@@ -0,0 +1,109 @@
"""Migration utility: convert Python literal config values in the DB into canonical JSON.
Usage:
python scripts/migrate_config_literals.py [--apply] [--backup=path] [--quiet]
By default the script runs in dry-run mode and prints candidate rows it would change.
Use --apply to persist changes. --backup writes a JSON file listing changed rows before applying.
"""
from __future__ import annotations
import argparse
import sqlite3
import json
import ast
from pathlib import Path
from typing import Any, Dict, List, Tuple
DB = Path("medios.db")
def _is_json_like(s: str) -> bool:
if not isinstance(s, str):
return False
s = s.strip()
if not s:
return False
return s[0] in '{["' or s.lower() in ("true", "false", "null") or s[0].isdigit() or s[0] == "'"
def find_candidates(conn: sqlite3.Connection) -> List[Tuple[int, str, str, str, str, str]]:
cur = conn.cursor()
cur.execute("SELECT rowid, category, subtype, item_name, key, value FROM config")
rows = []
for rowid, cat, sub, name, key, val in cur.fetchall():
if val is None:
continue
s = str(val)
if _is_json_like(s):
try:
json.loads(s)
except Exception:
# Try ast.literal_eval
try:
parsed = ast.literal_eval(s)
# Only consider basic JSON-serializable types
json.dumps(parsed)
rows.append((rowid, cat, sub, name, key, s))
except Exception:
continue
return rows
def apply_migration(conn: sqlite3.Connection, candidates: List[Tuple[int, str, str, str, str, str]]) -> List[Tuple[int, str, str, str, str, str]]:
cur = conn.cursor()
changed = []
for row in candidates:
rowid, cat, sub, name, key, val = row
try:
parsed = ast.literal_eval(val)
new_val = json.dumps(parsed, ensure_ascii=False)
cur.execute("UPDATE config SET value = ? WHERE rowid = ?", (new_val, rowid))
changed.append((rowid, cat, sub, name, key, new_val))
except Exception:
continue
conn.commit()
return changed
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--apply", action="store_true", help="Persist changes to DB")
parser.add_argument("--backup", type=str, default=None, help="Path to write backup JSON of changed rows")
parser.add_argument("--quiet", action="store_true", help="Minimize output")
args = parser.parse_args()
conn = sqlite3.connect(str(DB))
candidates = find_candidates(conn)
if not args.quiet:
print(f"Found {len(candidates)} candidate rows for migration")
for r in candidates[:50]:
rowid, cat, sub, name, key, val = r
print(f"row {rowid}: {cat}.{sub}.{name} {key} -> {val[:200]!r}")
if not candidates:
return 0
if args.backup:
out_path = Path(args.backup)
data = [dict(rowid=r[0], category=r[1], subtype=r[2], item_name=r[3], key=r[4], value=r[5]) for r in candidates]
out_path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
if not args.quiet:
print(f"Wrote backup to {out_path}")
if args.apply:
changed = apply_migration(conn, candidates)
if not args.quiet:
print(f"Applied migration to {len(changed)} rows")
return 0
if not args.quiet:
print("Dry-run; re-run with --apply to persist changes")
return 0
if __name__ == "__main__":
raise SystemExit(main())