2026-01-11 00:39:17 -08:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
import shutil
|
2026-01-19 03:14:30 -08:00
|
|
|
from typing import Any, Dict, List
|
2026-01-11 00:39:17 -08:00
|
|
|
|
2026-02-11 19:06:38 -08:00
|
|
|
from SYS.cmdlet_spec import Cmdlet
|
2026-01-11 00:39:17 -08:00
|
|
|
from SYS import pipeline as ctx
|
2026-01-18 10:50:42 -08:00
|
|
|
from SYS.result_table import Table
|
2026-01-19 03:14:30 -08:00
|
|
|
from SYS.logger import set_debug, debug
|
2026-03-25 22:39:30 -07:00
|
|
|
from cmdnat._status_shared import (
|
|
|
|
|
add_startup_check as _add_startup_check,
|
|
|
|
|
default_provider_ping_targets as _default_provider_ping_targets,
|
|
|
|
|
has_provider as _has_provider,
|
|
|
|
|
has_store_subtype as _has_store_subtype,
|
|
|
|
|
has_tool as _has_tool,
|
|
|
|
|
ping_first as _ping_first,
|
|
|
|
|
ping_url as _ping_url,
|
|
|
|
|
provider_display_name as _provider_display_name,
|
|
|
|
|
)
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
CMDLET = Cmdlet(
|
|
|
|
|
name=".status",
|
|
|
|
|
summary="Check and display service/provider status",
|
|
|
|
|
usage=".status",
|
|
|
|
|
arg=[],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _run(result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
2026-01-18 10:50:42 -08:00
|
|
|
startup_table = Table(
|
2026-01-11 00:39:17 -08:00
|
|
|
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
|
|
|
|
|
)
|
2026-01-18 10:50:42 -08:00
|
|
|
startup_table._interactive(True)._perseverance(True)
|
2026-01-11 00:39:17 -08:00
|
|
|
startup_table.set_value_case("upper")
|
|
|
|
|
|
|
|
|
|
debug_enabled = bool(config.get("debug", False))
|
2026-01-18 10:50:42 -08:00
|
|
|
try:
|
|
|
|
|
# Ensure global debug state follows config so HTTPClient and other helpers
|
|
|
|
|
# emit debug-level information during the status check.
|
|
|
|
|
set_debug(debug_enabled)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
debug(f"Status check: debug_enabled={debug_enabled}")
|
2026-01-11 00:39:17 -08:00
|
|
|
_add_startup_check(startup_table, "ENABLED" if debug_enabled else "DISABLED", "DEBUGGING")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# MPV check
|
|
|
|
|
try:
|
|
|
|
|
from MPV.mpv_ipc import MPV
|
|
|
|
|
MPV()
|
|
|
|
|
mpv_path = shutil.which("mpv")
|
|
|
|
|
_add_startup_check(startup_table, "ENABLED", "MPV", detail=mpv_path or "Available")
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"MPV check OK: path={mpv_path or 'Available'}")
|
2026-01-11 00:39:17 -08:00
|
|
|
except Exception as exc:
|
|
|
|
|
_add_startup_check(startup_table, "DISABLED", "MPV", detail=str(exc))
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"MPV check failed: {exc}")
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
# Store Registry
|
|
|
|
|
store_registry = None
|
|
|
|
|
try:
|
|
|
|
|
from Store import Store as StoreRegistry
|
|
|
|
|
store_registry = StoreRegistry(config=config, suppress_debug=True)
|
2026-01-18 10:50:42 -08:00
|
|
|
try:
|
|
|
|
|
backends = store_registry.list_backends()
|
|
|
|
|
except Exception:
|
|
|
|
|
backends = []
|
|
|
|
|
debug(f"StoreRegistry initialized. backends={backends}")
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
debug(f"StoreRegistry initialization failed: {exc}")
|
|
|
|
|
store_registry = None
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
# Hydrus
|
|
|
|
|
if _has_store_subtype(config, "hydrusnetwork"):
|
|
|
|
|
hcfg = config.get("store", {}).get("hydrusnetwork", {})
|
|
|
|
|
for iname, icfg in hcfg.items():
|
|
|
|
|
if not isinstance(icfg, dict): continue
|
|
|
|
|
nkey = str(icfg.get("NAME") or iname)
|
|
|
|
|
uval = str(icfg.get("URL") or "").strip()
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Hydrus network check: name={nkey}, url={uval}")
|
2026-01-11 00:39:17 -08:00
|
|
|
ok = bool(store_registry and store_registry.is_available(nkey))
|
|
|
|
|
status = "ENABLED" if ok else "DISABLED"
|
|
|
|
|
files = None
|
|
|
|
|
detail = uval
|
|
|
|
|
if ok and store_registry:
|
|
|
|
|
try:
|
|
|
|
|
backend = store_registry[nkey]
|
|
|
|
|
files = getattr(backend, "total_count", None)
|
|
|
|
|
if files is None and hasattr(backend, "get_total_count"):
|
|
|
|
|
files = backend.get_total_count()
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Hydrus backend '{nkey}' available: files={files}")
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
debug(f"Hydrus backend '{nkey}' check failed: {exc}")
|
2026-01-11 00:39:17 -08:00
|
|
|
else:
|
|
|
|
|
err = store_registry.get_backend_error(iname) if store_registry else None
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Hydrus backend '{nkey}' not available: {err}")
|
2026-01-11 00:39:17 -08:00
|
|
|
detail = f"{uval} - {err or 'Unavailable'}"
|
|
|
|
|
_add_startup_check(startup_table, status, nkey, store="hydrusnetwork", files=files, detail=detail)
|
|
|
|
|
|
|
|
|
|
# Providers
|
|
|
|
|
pcfg = config.get("provider", {})
|
|
|
|
|
if isinstance(pcfg, dict) and pcfg:
|
|
|
|
|
from ProviderCore.registry import list_providers, list_search_providers, list_file_providers
|
|
|
|
|
from Provider.metadata_provider import list_metadata_providers
|
|
|
|
|
|
|
|
|
|
p_avail = list_providers(config) or {}
|
|
|
|
|
s_avail = list_search_providers(config) or {}
|
|
|
|
|
f_avail = list_file_providers(config) or {}
|
|
|
|
|
m_avail = list_metadata_providers(config) or {}
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Provider registries: providers={list(p_avail.keys())}, search={list(s_avail.keys())}, file={list(f_avail.keys())}, metadata={list(m_avail.keys())}")
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
already = {"matrix"}
|
|
|
|
|
for pname in pcfg.keys():
|
|
|
|
|
prov = str(pname).lower()
|
|
|
|
|
if prov in already: continue
|
|
|
|
|
display = _provider_display_name(prov)
|
|
|
|
|
|
|
|
|
|
if prov == "alldebrid":
|
|
|
|
|
try:
|
|
|
|
|
from Provider.alldebrid import _get_debrid_api_key
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
api_key = _get_debrid_api_key(config)
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"AllDebrid configured: api_key_present={bool(api_key)}")
|
2026-01-11 00:39:17 -08:00
|
|
|
if not api_key:
|
|
|
|
|
_add_startup_check(startup_table, "DISABLED", display, provider=prov, detail="Not configured")
|
|
|
|
|
else:
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
_add_startup_check(startup_table, "ENABLED", display, provider=prov, detail=getattr(client, "base_url", "Connected"))
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"AllDebrid client connected: base_url={getattr(client, 'base_url', 'unknown')}")
|
2026-01-11 00:39:17 -08:00
|
|
|
except Exception as exc:
|
|
|
|
|
_add_startup_check(startup_table, "DISABLED", display, provider=prov, detail=str(exc))
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"AllDebrid check failed: {exc}")
|
2026-01-11 00:39:17 -08:00
|
|
|
already.add(prov)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
is_known = prov in p_avail or prov in s_avail or prov in f_avail or prov in m_avail
|
|
|
|
|
if not is_known:
|
|
|
|
|
_add_startup_check(startup_table, "UNKNOWN", display, provider=prov, detail="Not registered")
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Provider {prov} not registered")
|
2026-01-11 00:39:17 -08:00
|
|
|
else:
|
|
|
|
|
ok_val = p_avail.get(prov) or s_avail.get(prov) or f_avail.get(prov) or m_avail.get(prov)
|
|
|
|
|
detail = "Configured" if ok_val else "Not configured"
|
|
|
|
|
ping_targets = _default_provider_ping_targets(prov)
|
|
|
|
|
if ping_targets:
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Provider {prov} ping targets: {ping_targets}")
|
2026-01-11 00:39:17 -08:00
|
|
|
pok, pdet = _ping_first(ping_targets)
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Provider {prov} ping result: ok={pok}, detail={pdet}")
|
2026-01-11 00:39:17 -08:00
|
|
|
detail = pdet if ok_val else f"{detail} | {pdet}"
|
|
|
|
|
_add_startup_check(startup_table, "ENABLED" if ok_val else "DISABLED", display, provider=prov, detail=detail)
|
|
|
|
|
already.add(prov)
|
|
|
|
|
|
|
|
|
|
# Matrix
|
|
|
|
|
if _has_provider(config, "matrix"):
|
|
|
|
|
try:
|
|
|
|
|
from Provider.matrix import Matrix
|
|
|
|
|
m_prov = Matrix(config)
|
|
|
|
|
mcfg = config.get("provider", {}).get("matrix", {})
|
|
|
|
|
hs = str(mcfg.get("homeserver") or "").strip()
|
|
|
|
|
rid = str(mcfg.get("room_id") or "").strip()
|
|
|
|
|
detail = f"{hs} room:{rid}"
|
2026-01-18 10:50:42 -08:00
|
|
|
valid = False
|
|
|
|
|
try:
|
|
|
|
|
valid = bool(m_prov.validate())
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
debug(f"Matrix validate failed: {exc}")
|
|
|
|
|
_add_startup_check(startup_table, "ENABLED" if valid else "DISABLED", "Matrix", provider="matrix", detail=detail)
|
|
|
|
|
debug(f"Matrix check: homeserver={hs}, room_id={rid}, validate={valid}")
|
2026-01-11 00:39:17 -08:00
|
|
|
except Exception as exc:
|
|
|
|
|
_add_startup_check(startup_table, "DISABLED", "Matrix", provider="matrix", detail=str(exc))
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Matrix instantiation failed: {exc}")
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
# Cookies
|
|
|
|
|
try:
|
|
|
|
|
from tool.ytdlp import YtDlpTool
|
|
|
|
|
cf = YtDlpTool(config).resolve_cookiefile()
|
|
|
|
|
_add_startup_check(startup_table, "FOUND" if cf else "MISSING", "Cookies", detail=str(cf) if cf else "Not found")
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Cookies: resolved cookiefile={cf}")
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
debug(f"Cookies check failed: {exc}")
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
debug(f"Status check failed: {exc}")
|
|
|
|
|
|
|
|
|
|
if startup_table.rows:
|
|
|
|
|
# Mark as rendered to prevent CLI.py from auto-printing it to stdout
|
|
|
|
|
# (avoiding duplication in TUI logs, while keeping it in TUI Results)
|
|
|
|
|
setattr(startup_table, "_rendered_by_cmdlet", True)
|
|
|
|
|
ctx.set_current_stage_table(startup_table)
|
2026-01-18 10:50:42 -08:00
|
|
|
debug(f"Status check completed: {len(startup_table.rows)} checks recorded")
|
2026-01-11 00:39:17 -08:00
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
CMDLET.exec = _run
|