This commit is contained in:
nose
2025-12-13 12:09:50 -08:00
parent 30eb628aa3
commit 52a79b0086
16 changed files with 729 additions and 655 deletions

189
CLI.py
View File

@@ -779,14 +779,14 @@ def _create_cmdlet_cli():
if startup_table:
startup_table.set_no_choice(True).set_preserve_order(True)
def _add_startup_check(name: str, status: str, detail: str = "") -> None:
def _add_startup_check(status: str, name: str, store_or_provider: str, detail: str = "") -> None:
if startup_table is None:
return
row = startup_table.add_row()
row.add_column("Check", name)
row.add_column("Status", status)
if detail:
row.add_column("Detail", detail)
row.add_column("Name", name)
row.add_column("Store/Provi", store_or_provider)
row.add_column("Detail", detail or "")
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
store_cfg = cfg.get("store")
@@ -831,67 +831,150 @@ def _create_cmdlet_cli():
# Run startup checks and render table
try:
from hydrus_health_check import (
initialize_mpv_health_check,
initialize_matrix_health_check,
initialize_hydrus_health_check,
initialize_local_library_scan,
initialize_cookies_check,
initialize_debrid_health_check,
)
from hydrus_health_check import initialize_cookies_check
def _run_check(name: str, fn: Callable[[], Tuple[bool, Optional[str]]], skip_reason: Optional[str] = None) -> None:
if skip_reason:
_add_startup_check(name, "SKIPPED", skip_reason)
return
# MPV availability is validated by MPV.MPV.__init__.
try:
from MPV.mpv_ipc import MPV
MPV()
try:
ok, detail = fn()
status = "ENABLED" if name in {"MPV", "Hydrus", "Matrix", "Debrid"} else ("FOUND" if name == "Cookies" else "SCANNED")
if name == "Matrix":
status = "ENABLED" if ok else "DISABLED"
elif name == "Folder Stores":
status = "SCANNED" if ok else "SKIPPED"
elif name == "Cookies":
status = "FOUND" if ok else "MISSING"
elif name in {"MPV", "Hydrus", "Debrid"}:
status = "ENABLED" if ok else "DISABLED"
_add_startup_check(name, status, detail or "")
except Exception as exc: # Best-effort: never block startup
_add_startup_check(name, "ERROR", str(exc))
import shutil
_run_check("MPV", lambda: initialize_mpv_health_check(emit_debug=False))
mpv_path = shutil.which("mpv")
except Exception:
mpv_path = None
_add_startup_check("ENABLED", "MPV", "N/A", mpv_path or "Available")
except Exception as exc:
_add_startup_check("DISABLED", "MPV", "N/A", str(exc))
store_registry = None
if config:
# Instantiate store registry once; store __init__ performs its own validation.
try:
from Store import Store as StoreRegistry
store_registry = StoreRegistry(config=config, suppress_debug=True)
except Exception:
store_registry = None
# Only show checks that are configured in config.conf
if _has_store_subtype(config, "hydrusnetwork"):
_run_check("Hydrus", lambda: initialize_hydrus_health_check(config, emit_debug=False))
# HydrusNetwork self-validates in its __init__. We derive instance status from
# store instantiation rather than a separate Hydrus-specific health check.
store_cfg = config.get("store")
hydrus_cfg = store_cfg.get("hydrusnetwork", {}) if isinstance(store_cfg, dict) else {}
if isinstance(hydrus_cfg, dict):
for instance_name, instance_cfg in hydrus_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
url_val = str(instance_cfg.get("URL") or "").strip()
# Hydrus instances - add individual rows for each configured instance
from hydrus_health_check import _SERVICE_STATE
for instance_name, instance_info in _SERVICE_STATE.get("hydrusnetwork_stores", {}).items():
status = "ENABLED" if instance_info.get("ok") else "DISABLED"
_add_startup_check(f" {instance_name}", status, f"{instance_info.get('url')} - {instance_info.get('detail')}")
ok = bool(store_registry and store_registry.is_available(name_key))
status = "ENABLED" if ok else "DISABLED"
if ok:
total = None
try:
if store_registry:
backend = store_registry[name_key]
total = getattr(backend, "total_count", None)
except Exception:
total = None
detail = (url_val + (" - " if url_val else "")) + "Connected"
if isinstance(total, int) and total >= 0:
detail += f" (Total: {total})"
else:
err = None
if store_registry:
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
detail = (url_val + (" - " if url_val else "")) + (err or "Unavailable")
_add_startup_check(status, name_key, "hydrusnetwork", detail)
if _has_provider(config, "matrix"):
_run_check("Matrix", lambda: initialize_matrix_health_check(config, emit_debug=False))
# Matrix availability is validated by Provider.matrix.Matrix.__init__.
try:
from Provider.matrix import Matrix
provider = Matrix(config)
matrix_conf = config.get("provider", {}).get("matrix", {}) if isinstance(config, dict) else {}
homeserver = str(matrix_conf.get("homeserver") or "").strip()
room_id = str(matrix_conf.get("room_id") or "").strip()
if homeserver and not homeserver.startswith("http"):
homeserver = f"https://{homeserver}"
target = homeserver.rstrip("/")
if room_id:
target = (target + (" " if target else "")) + f"room:{room_id}"
if provider.validate():
_add_startup_check("ENABLED", "Matrix", "matrix", target or "Connected")
else:
missing: list[str] = []
if not homeserver:
missing.append("homeserver")
if not room_id:
missing.append("room_id")
if not (matrix_conf.get("access_token") or matrix_conf.get("password")):
missing.append("access_token/password")
detail = "Not configured" + (f" ({', '.join(missing)})" if missing else "")
_add_startup_check("DISABLED", "Matrix", "matrix", detail)
except Exception as exc:
_add_startup_check("DISABLED", "Matrix", "matrix", str(exc))
if _has_store_subtype(config, "folder"):
# Folder stores - add individual rows for each configured store
ok, detail = initialize_local_library_scan(config, emit_debug=False)
if ok or detail != "No folder stores configured":
from hydrus_health_check import _SERVICE_STATE
for store_name, store_info in _SERVICE_STATE.get("folder_stores", {}).items():
status = "SCANNED" if store_info.get("ok") else "ERROR"
_add_startup_check(f" {store_name}", status, f"{store_info.get('path')} - {store_info.get('detail')}")
if not _SERVICE_STATE.get("folder_stores"):
_add_startup_check("Folder Stores", "SCANNED", detail)
# Folder local scan/index is performed by Store.Folder.__init__.
store_cfg = config.get("store")
folder_cfg = store_cfg.get("folder", {}) if isinstance(store_cfg, dict) else {}
if isinstance(folder_cfg, dict) and folder_cfg:
for instance_name, instance_cfg in folder_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
path_val = str(instance_cfg.get("PATH") or instance_cfg.get("path") or "").strip()
ok = bool(store_registry and store_registry.is_available(name_key))
if ok and store_registry:
backend = store_registry[name_key]
scan_ok = bool(getattr(backend, "scan_ok", True))
scan_detail = str(getattr(backend, "scan_detail", "") or "")
status = "SCANNED" if scan_ok else "ERROR"
detail = (path_val + (" - " if path_val else "")) + (scan_detail or "Up to date")
_add_startup_check(status, name_key, "folder", detail)
else:
err = None
if store_registry:
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
detail = (path_val + (" - " if path_val else "")) + (err or "Unavailable")
_add_startup_check("ERROR", name_key, "folder", detail)
else:
_add_startup_check("Folder Stores", "SKIPPED", detail)
_add_startup_check("SKIPPED", "Folder", "folder", "No folder stores configured")
if _has_store_subtype(config, "debrid"):
_run_check("Debrid", lambda: initialize_debrid_health_check(config, emit_debug=False))
# Debrid availability is validated by API.alldebrid.AllDebridClient.__init__.
try:
from config import get_debrid_api_key
_run_check("Cookies", lambda: initialize_cookies_check(config, emit_debug=False))
api_key = get_debrid_api_key(config)
if not api_key:
_add_startup_check("DISABLED", "Debrid", "debrid", "Not configured")
else:
from API.alldebrid import AllDebridClient
client = AllDebridClient(api_key)
base_url = str(getattr(client, "base_url", "") or "").strip()
_add_startup_check("ENABLED", "Debrid", "debrid", base_url or "Connected")
except Exception as exc:
_add_startup_check("DISABLED", "Debrid", "debrid", str(exc))
# Cookies are used by yt-dlp; keep this centralized utility.
try:
ok, detail = initialize_cookies_check(config, emit_debug=False)
_add_startup_check("FOUND" if ok else "MISSING", "Cookies", "N/A", detail or "Not found")
except Exception as exc:
_add_startup_check("ERROR", "Cookies", "N/A", str(exc))
if startup_table is not None and startup_table.rows:
print()
@@ -1156,14 +1239,14 @@ def _execute_pipeline(tokens: list):
and the actual items being selected to help diagnose reordering issues.
"""
try:
print(f"[debug] {label}: sel={selection_indices} rows={len(table_obj.rows) if table_obj and hasattr(table_obj, 'rows') else 'n/a'} items={len(items_list) if items_list is not None else 'n/a'}")
debug(f"[debug] {label}: sel={selection_indices} rows={len(table_obj.rows) if table_obj and hasattr(table_obj, 'rows') else 'n/a'} items={len(items_list) if items_list is not None else 'n/a'}")
if table_obj and hasattr(table_obj, 'rows') and items_list:
# Show correspondence: displayed row # -> source_index -> item hash/title
for i in selection_indices:
if 0 <= i < len(table_obj.rows):
row = table_obj.rows[i]
src_idx = getattr(row, 'source_index', None)
print(f"[debug] @{i+1} -> row_index={i}, source_index={src_idx}", end='')
debug(f"[debug] @{i+1} -> row_index={i}, source_index={src_idx}", end='')
if src_idx is not None and 0 <= src_idx < len(items_list):
item = items_list[src_idx]
# Try to show hash/title for verification
@@ -1181,9 +1264,9 @@ def _execute_pipeline(tokens: list):
else:
print(" -> [source_index out of range]")
if resolved_list is not None:
print(f"[debug] resolved_len={len(resolved_list)}")
debug(f"[debug] resolved_len={len(resolved_list)}")
except Exception as e:
print(f"[debug] error in _debug_selection: {e}")
debug(f"[debug] error in _debug_selection: {e}")
# Split tokens by pipe operator
stages = []