2025-11-25 20:09:33 -08:00
|
|
|
"""Hydrus API health check and initialization.
|
|
|
|
|
|
|
|
|
|
Provides startup health checks for Hydrus API availability and gracefully
|
|
|
|
|
disables Hydrus features if the API is unavailable.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import logging
|
|
|
|
|
import sys
|
|
|
|
|
|
2025-11-27 10:59:01 -08:00
|
|
|
from helper.logger import log, debug
|
2025-11-25 20:09:33 -08:00
|
|
|
from typing import Tuple, Optional, Dict, Any
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2025-12-11 12:47:30 -08:00
|
|
|
# Global state for all service availability checks - consolidated from 12 separate globals
|
|
|
|
|
_SERVICE_STATE = {
|
|
|
|
|
"hydrus": {"available": None, "reason": None, "complete": False},
|
|
|
|
|
"hydrusnetwork_stores": {}, # Track individual Hydrus instances
|
|
|
|
|
"debrid": {"available": None, "reason": None, "complete": False},
|
|
|
|
|
"mpv": {"available": None, "reason": None, "complete": False},
|
|
|
|
|
"matrix": {"available": None, "reason": None, "complete": False},
|
|
|
|
|
}
|
2025-11-27 10:59:01 -08:00
|
|
|
|
2025-12-01 14:42:30 -08:00
|
|
|
# Global state for Cookies availability
|
|
|
|
|
_COOKIES_FILE_PATH: Optional[str] = None
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
def check_hydrus_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Check if Hydrus API is available by pinging it.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
config: Application configuration dictionary
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Tuple of (is_available: bool, reason: Optional[str])
|
|
|
|
|
- (True, None) if Hydrus is available
|
|
|
|
|
- (False, reason) if Hydrus is unavailable with reason
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from helper.hydrus import is_available as _is_hydrus_available
|
|
|
|
|
|
|
|
|
|
logger.info("[Hydrus Health Check] Pinging Hydrus API...")
|
|
|
|
|
is_available, reason = _is_hydrus_available(config, use_cache=False)
|
|
|
|
|
|
|
|
|
|
if is_available:
|
2025-12-07 00:21:30 -08:00
|
|
|
logger.info("[Hydrus Health Check] Hydrus API is AVAILABLE")
|
2025-11-25 20:09:33 -08:00
|
|
|
return True, None
|
|
|
|
|
else:
|
|
|
|
|
reason_str = f": {reason}" if reason else ""
|
2025-12-07 00:21:30 -08:00
|
|
|
logger.warning(f"[Hydrus Health Check] Hydrus API is UNAVAILABLE{reason_str}")
|
2025-11-25 20:09:33 -08:00
|
|
|
return False, reason
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
error_msg = str(e)
|
2025-12-07 00:21:30 -08:00
|
|
|
logger.error(f"[Hydrus Health Check] Error checking Hydrus availability: {error_msg}")
|
2025-11-25 20:09:33 -08:00
|
|
|
return False, error_msg
|
|
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
def initialize_hydrus_health_check(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Initialize Hydrus health check at startup."""
|
|
|
|
|
global _SERVICE_STATE
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.info("[Startup] Starting Hydrus health check...")
|
2025-12-11 12:47:30 -08:00
|
|
|
is_available, reason = check_hydrus_availability(config)
|
|
|
|
|
_SERVICE_STATE["hydrus"]["available"] = is_available
|
|
|
|
|
_SERVICE_STATE["hydrus"]["reason"] = reason
|
|
|
|
|
_SERVICE_STATE["hydrus"]["complete"] = True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Track individual Hydrus instances
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
store_config = config.get("store", {})
|
|
|
|
|
hydrusnetwork = store_config.get("hydrusnetwork", {})
|
|
|
|
|
for instance_name, instance_config in hydrusnetwork.items():
|
|
|
|
|
if isinstance(instance_config, dict):
|
|
|
|
|
url = instance_config.get("url")
|
|
|
|
|
access_key = instance_config.get("Hydrus-Client-API-Access-Key")
|
|
|
|
|
if url and access_key:
|
|
|
|
|
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = {
|
|
|
|
|
"ok": is_available,
|
|
|
|
|
"url": url,
|
|
|
|
|
"detail": reason if not is_available else "Connected"
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = {
|
|
|
|
|
"ok": False,
|
|
|
|
|
"url": url or "Not configured",
|
|
|
|
|
"detail": "Missing credentials"
|
|
|
|
|
}
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception as e:
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.debug(f"Could not enumerate Hydrus instances: {e}")
|
|
|
|
|
|
|
|
|
|
if emit_debug:
|
|
|
|
|
status = 'ENABLED' if is_available else f'DISABLED - {reason or "Connection failed"}'
|
|
|
|
|
debug(f"Hydrus: {status}", file=sys.stderr)
|
|
|
|
|
return is_available, reason
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_debrid_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if Debrid API is available."""
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
|
|
|
|
from helper.http_client import HTTPClient
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.info("[Debrid Health Check] Pinging Debrid API...")
|
|
|
|
|
with HTTPClient(timeout=10.0, verify_ssl=True) as client:
|
|
|
|
|
response = client.get('https://api.alldebrid.com/v4/ping')
|
|
|
|
|
result = response.json()
|
|
|
|
|
if result.get('status') == 'success' and result.get('data', {}).get('ping') == 'pong':
|
|
|
|
|
logger.info("[Debrid Health Check] Debrid API is AVAILABLE")
|
|
|
|
|
return True, None
|
|
|
|
|
return False, "Invalid API response"
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception as e:
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.warning(f"[Debrid Health Check] Debrid API error: {e}")
|
|
|
|
|
return False, str(e)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
def initialize_debrid_health_check(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Initialize Debrid health check at startup."""
|
|
|
|
|
global _SERVICE_STATE
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.info("[Startup] Starting Debrid health check...")
|
2025-12-11 12:47:30 -08:00
|
|
|
is_available, reason = check_debrid_availability(config)
|
|
|
|
|
_SERVICE_STATE["debrid"]["available"] = is_available
|
|
|
|
|
_SERVICE_STATE["debrid"]["reason"] = reason
|
|
|
|
|
_SERVICE_STATE["debrid"]["complete"] = True
|
|
|
|
|
if emit_debug:
|
|
|
|
|
status = 'ENABLED' if is_available else f'DISABLED - {reason or "Connection failed"}'
|
|
|
|
|
debug(f"Debrid: {status}", file=sys.stderr)
|
|
|
|
|
return is_available, reason
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_mpv_availability() -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Check if MPV is available (installed and runnable).
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Tuple of (is_available: bool, reason: Optional[str])
|
|
|
|
|
"""
|
2025-12-11 12:47:30 -08:00
|
|
|
global _SERVICE_STATE
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
if _SERVICE_STATE["mpv"]["complete"] and _SERVICE_STATE["mpv"]["available"] is not None:
|
|
|
|
|
return _SERVICE_STATE["mpv"]["available"], _SERVICE_STATE["mpv"]["reason"]
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
import shutil
|
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
|
|
logger.info("[MPV Health Check] Checking for MPV executable...")
|
|
|
|
|
|
|
|
|
|
mpv_path = shutil.which("mpv")
|
|
|
|
|
if not mpv_path:
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.warning(f"[MPV Health Check] ❌ MPV is UNAVAILABLE: Executable 'mpv' not found in PATH")
|
|
|
|
|
return False, "Executable 'mpv' not found in PATH"
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Try to get version to confirm it works
|
|
|
|
|
try:
|
|
|
|
|
result = subprocess.run(
|
|
|
|
|
[mpv_path, "--version"],
|
|
|
|
|
capture_output=True,
|
|
|
|
|
text=True,
|
|
|
|
|
timeout=2
|
|
|
|
|
)
|
|
|
|
|
if result.returncode == 0:
|
|
|
|
|
version_line = result.stdout.split('\n')[0]
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.info(f"[MPV Health Check] MPV is AVAILABLE ({version_line})")
|
2025-11-25 20:09:33 -08:00
|
|
|
return True, None
|
|
|
|
|
else:
|
2025-12-11 12:47:30 -08:00
|
|
|
reason = f"MPV returned non-zero exit code: {result.returncode}"
|
|
|
|
|
logger.warning(f"[MPV Health Check] ❌ MPV is UNAVAILABLE: {reason}")
|
|
|
|
|
return False, reason
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception as e:
|
2025-12-11 12:47:30 -08:00
|
|
|
reason = f"Error running MPV: {e}"
|
|
|
|
|
logger.warning(f"[MPV Health Check] ❌ MPV is UNAVAILABLE: {reason}")
|
|
|
|
|
return False, reason
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
def initialize_mpv_health_check(emit_debug: bool = True) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Initialize MPV health check at startup and return (is_available, reason)."""
|
|
|
|
|
global _SERVICE_STATE
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
logger.info("[Startup] Starting MPV health check...")
|
2025-12-11 12:47:30 -08:00
|
|
|
is_available, reason = check_mpv_availability()
|
|
|
|
|
_SERVICE_STATE["mpv"]["available"] = is_available
|
|
|
|
|
_SERVICE_STATE["mpv"]["reason"] = reason
|
|
|
|
|
_SERVICE_STATE["mpv"]["complete"] = True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
if emit_debug:
|
2025-11-25 20:09:33 -08:00
|
|
|
if is_available:
|
2025-12-11 12:47:30 -08:00
|
|
|
debug("MPV: ENABLED - All MPV features available", file=sys.stderr)
|
|
|
|
|
elif reason != "Not configured":
|
|
|
|
|
debug(f"MPV: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
return is_available, reason
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_matrix_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Check if Matrix homeserver is reachable and credentials are valid.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
config: Application configuration dictionary
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Tuple of (is_available: bool, reason: Optional[str])
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
import requests
|
|
|
|
|
matrix_conf = config.get('storage', {}).get('matrix', {})
|
|
|
|
|
homeserver = matrix_conf.get('homeserver')
|
|
|
|
|
access_token = matrix_conf.get('access_token')
|
|
|
|
|
|
|
|
|
|
if not homeserver:
|
|
|
|
|
return False, "Not configured"
|
|
|
|
|
|
|
|
|
|
if not homeserver.startswith('http'):
|
|
|
|
|
homeserver = f"https://{homeserver}"
|
|
|
|
|
|
|
|
|
|
# Check versions endpoint (no auth required)
|
|
|
|
|
try:
|
|
|
|
|
resp = requests.get(f"{homeserver}/_matrix/client/versions", timeout=5)
|
|
|
|
|
if resp.status_code != 200:
|
|
|
|
|
return False, f"Homeserver returned {resp.status_code}"
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return False, f"Homeserver unreachable: {e}"
|
|
|
|
|
|
|
|
|
|
# Check auth if token provided (whoami)
|
|
|
|
|
if access_token:
|
|
|
|
|
try:
|
|
|
|
|
headers = {"Authorization": f"Bearer {access_token}"}
|
|
|
|
|
resp = requests.get(f"{homeserver}/_matrix/client/v3/account/whoami", headers=headers, timeout=5)
|
|
|
|
|
if resp.status_code != 200:
|
|
|
|
|
return False, f"Authentication failed: {resp.status_code}"
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return False, f"Auth check failed: {e}"
|
|
|
|
|
|
|
|
|
|
return True, None
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return False, str(e)
|
|
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
def initialize_matrix_health_check(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, Optional[str]]:
|
|
|
|
|
"""Initialize Matrix health check at startup and return (is_available, reason)."""
|
|
|
|
|
global _SERVICE_STATE
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
logger.info("[Startup] Starting Matrix health check...")
|
2025-12-11 12:47:30 -08:00
|
|
|
is_available, reason = check_matrix_availability(config)
|
|
|
|
|
_SERVICE_STATE["matrix"]["available"] = is_available
|
|
|
|
|
_SERVICE_STATE["matrix"]["reason"] = reason
|
|
|
|
|
_SERVICE_STATE["matrix"]["complete"] = True
|
2025-11-27 10:59:01 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
if emit_debug:
|
2025-11-27 10:59:01 -08:00
|
|
|
if is_available:
|
|
|
|
|
debug("Matrix: ENABLED - Homeserver reachable", file=sys.stderr)
|
2025-12-11 12:47:30 -08:00
|
|
|
elif reason != "Not configured":
|
|
|
|
|
debug(f"Matrix: DISABLED - {reason}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
return is_available, reason
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Unified getter functions for service availability - all use _SERVICE_STATE
|
2025-11-25 20:09:33 -08:00
|
|
|
def is_hydrus_available() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if Hydrus is available (from cached health check)."""
|
|
|
|
|
return _SERVICE_STATE["hydrus"]["available"] is True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_hydrus_unavailable_reason() -> Optional[str]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Get the reason why Hydrus is unavailable."""
|
|
|
|
|
return _SERVICE_STATE["hydrus"]["reason"] if not is_hydrus_available() else None
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_hydrus_check_complete() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if the Hydrus health check has been completed."""
|
|
|
|
|
return _SERVICE_STATE["hydrus"]["complete"]
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def disable_hydrus_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually disable all Hydrus features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["hydrus"]["available"] = False
|
|
|
|
|
_SERVICE_STATE["hydrus"]["reason"] = "Manually disabled or lost connection"
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.warning("[Hydrus] Features manually disabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def enable_hydrus_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually enable Hydrus features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["hydrus"]["available"] = True
|
|
|
|
|
_SERVICE_STATE["hydrus"]["reason"] = None
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.info("[Hydrus] Features manually enabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_debrid_available() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if Debrid is available (from cached health check)."""
|
|
|
|
|
return _SERVICE_STATE["debrid"]["available"] is True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_debrid_unavailable_reason() -> Optional[str]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Get the reason why Debrid is unavailable."""
|
|
|
|
|
return _SERVICE_STATE["debrid"]["reason"] if not is_debrid_available() else None
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_debrid_check_complete() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if the Debrid health check has been completed."""
|
|
|
|
|
return _SERVICE_STATE["debrid"]["complete"]
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def disable_debrid_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually disable all Debrid features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["debrid"]["available"] = False
|
|
|
|
|
_SERVICE_STATE["debrid"]["reason"] = "Manually disabled or lost connection"
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.warning("[Debrid] Features manually disabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def enable_debrid_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually enable Debrid features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["debrid"]["available"] = True
|
|
|
|
|
_SERVICE_STATE["debrid"]["reason"] = None
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.info("[Debrid] Features manually enabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_mpv_available() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if MPV is available (from cached health check)."""
|
|
|
|
|
return _SERVICE_STATE["mpv"]["available"] is True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
def get_mpv_unavailable_reason() -> Optional[str]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Get the reason why MPV is unavailable."""
|
|
|
|
|
return _SERVICE_STATE["mpv"]["reason"] if not is_mpv_available() else None
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_mpv_check_complete() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if the MPV health check has been completed."""
|
|
|
|
|
return _SERVICE_STATE["mpv"]["complete"]
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def disable_mpv_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually disable all MPV features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["mpv"]["available"] = False
|
|
|
|
|
_SERVICE_STATE["mpv"]["reason"] = "Manually disabled or lost connection"
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.warning("[MPV] Features manually disabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def enable_mpv_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually enable MPV features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["mpv"]["available"] = True
|
|
|
|
|
_SERVICE_STATE["mpv"]["reason"] = None
|
2025-11-25 20:09:33 -08:00
|
|
|
logger.info("[MPV] Features manually enabled")
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_matrix_available() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if Matrix is available (from cached health check)."""
|
|
|
|
|
return _SERVICE_STATE["matrix"]["available"] is True
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_matrix_unavailable_reason() -> Optional[str]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Get the reason why Matrix is unavailable."""
|
|
|
|
|
return _SERVICE_STATE["matrix"]["reason"] if not is_matrix_available() else None
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_matrix_check_complete() -> bool:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Check if the Matrix health check has been completed."""
|
|
|
|
|
return _SERVICE_STATE["matrix"]["complete"]
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def disable_matrix_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually disable all Matrix features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["matrix"]["available"] = False
|
|
|
|
|
_SERVICE_STATE["matrix"]["reason"] = "Manually disabled or lost connection"
|
2025-11-27 10:59:01 -08:00
|
|
|
logger.warning("[Matrix] Features manually disabled")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def enable_matrix_features() -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Manually enable Matrix features (for testing/fallback)."""
|
|
|
|
|
global _SERVICE_STATE
|
|
|
|
|
_SERVICE_STATE["matrix"]["available"] = True
|
|
|
|
|
_SERVICE_STATE["matrix"]["reason"] = None
|
2025-11-27 10:59:01 -08:00
|
|
|
logger.info("[Matrix] Features manually enabled")
|
2025-11-27 18:35:06 -08:00
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
def initialize_local_library_scan(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, str]:
|
|
|
|
|
"""Initialize and scan all folder stores at startup.
|
|
|
|
|
|
|
|
|
|
Returns a tuple of (success, detail_message).
|
2025-11-27 18:35:06 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
Note: Individual store results are stored in _SERVICE_STATE["folder_stores"]
|
|
|
|
|
for the CLI to display as separate table rows.
|
|
|
|
|
|
|
|
|
|
This ensures that any new files in configured folder stores are indexed
|
2025-11-27 18:35:06 -08:00
|
|
|
and their sidecar files are imported and cleaned up.
|
|
|
|
|
"""
|
2025-12-11 12:47:30 -08:00
|
|
|
from helper.folder_store import LocalLibraryInitializer
|
|
|
|
|
from helper.store import Folder
|
2025-11-27 18:35:06 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.info("[Startup] Starting folder store scans...")
|
2025-11-27 18:35:06 -08:00
|
|
|
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
# Get all configured folder stores from config
|
|
|
|
|
folder_sources = config.get("store", {}).get("folder", {})
|
|
|
|
|
if not isinstance(folder_sources, dict) or not folder_sources:
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug("⚠️ Folder stores: SKIPPED - No folder stores configured", file=sys.stderr)
|
|
|
|
|
return False, "No folder stores configured"
|
|
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
total_new_files = 0
|
|
|
|
|
total_sidecars = 0
|
|
|
|
|
failed_stores = []
|
|
|
|
|
store_results = {}
|
|
|
|
|
|
|
|
|
|
for store_name, store_config in folder_sources.items():
|
|
|
|
|
if not isinstance(store_config, dict):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
store_path = store_config.get("path")
|
|
|
|
|
if not store_path:
|
|
|
|
|
continue
|
2025-11-27 18:35:06 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
try:
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
storage_path = Path(str(store_path)).expanduser()
|
|
|
|
|
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f"Scanning folder store '{store_name}' at: {storage_path}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
# Migrate the folder store to hash-based naming (only runs once per location)
|
|
|
|
|
Folder.migrate_location(str(storage_path))
|
|
|
|
|
|
|
|
|
|
initializer = LocalLibraryInitializer(storage_path)
|
|
|
|
|
stats = initializer.scan_and_index()
|
|
|
|
|
|
|
|
|
|
# Accumulate stats
|
|
|
|
|
new_files = stats.get('files_new', 0)
|
|
|
|
|
sidecars = stats.get('sidecars_imported', 0)
|
|
|
|
|
total_new_files += new_files
|
|
|
|
|
total_sidecars += sidecars
|
|
|
|
|
|
|
|
|
|
# Record result for this store
|
|
|
|
|
if new_files > 0 or sidecars > 0:
|
|
|
|
|
result_detail = f"New: {new_files}, Sidecars: {sidecars}"
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f" {store_name}: {result_detail}", file=sys.stderr)
|
|
|
|
|
else:
|
|
|
|
|
result_detail = "Up to date"
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f" {store_name}: {result_detail}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
results.append(f"{store_name}: {result_detail}")
|
|
|
|
|
store_results[store_name] = {
|
|
|
|
|
"path": str(storage_path),
|
|
|
|
|
"detail": result_detail,
|
|
|
|
|
"ok": True
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"[Startup] Failed to scan folder store '{store_name}': {e}", exc_info=True)
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f" {store_name}: ERROR - {e}", file=sys.stderr)
|
|
|
|
|
failed_stores.append(store_name)
|
|
|
|
|
store_results[store_name] = {
|
|
|
|
|
"path": str(store_config.get("path", "?")),
|
|
|
|
|
"detail": f"ERROR - {e}",
|
|
|
|
|
"ok": False
|
|
|
|
|
}
|
2025-11-27 18:35:06 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Store individual results for CLI to display
|
|
|
|
|
_SERVICE_STATE["folder_stores"] = store_results
|
2025-11-27 18:35:06 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Build detail message
|
|
|
|
|
if failed_stores:
|
|
|
|
|
detail = f"Scanned {len(results)} stores ({len(failed_stores)} failed); Total new: {total_new_files}, Sidecars: {total_sidecars}"
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f"Folder stores scan complete: {detail}", file=sys.stderr)
|
|
|
|
|
return len(failed_stores) < len(results), detail
|
2025-11-27 18:35:06 -08:00
|
|
|
else:
|
2025-12-11 12:47:30 -08:00
|
|
|
detail = f"Scanned {len(results)} stores; Total new: {total_new_files}, Sidecars: {total_sidecars}"
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f"Folder stores scan complete: {detail}", file=sys.stderr)
|
|
|
|
|
return True, detail
|
2025-11-27 18:35:06 -08:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-11 12:47:30 -08:00
|
|
|
logger.error(f"[Startup] Failed to scan folder stores: {e}", exc_info=True)
|
|
|
|
|
if emit_debug:
|
|
|
|
|
debug(f"⚠️ Folder stores: ERROR - Scan failed: {e}", file=sys.stderr)
|
|
|
|
|
return False, f"Scan failed: {e}"
|
2025-12-01 14:42:30 -08:00
|
|
|
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
def initialize_cookies_check(emit_debug: bool = True) -> Tuple[bool, str]:
|
|
|
|
|
"""Check for cookies.txt in the application root directory.
|
|
|
|
|
|
|
|
|
|
Returns a tuple of (found, detail_message).
|
|
|
|
|
"""
|
2025-12-01 14:42:30 -08:00
|
|
|
global _COOKIES_FILE_PATH
|
|
|
|
|
|
|
|
|
|
# Assume CLI.py is in the root
|
|
|
|
|
root_dir = Path(__file__).parent
|
|
|
|
|
cookies_path = root_dir / "cookies.txt"
|
|
|
|
|
|
|
|
|
|
if cookies_path.exists():
|
|
|
|
|
_COOKIES_FILE_PATH = str(cookies_path)
|
2025-12-11 12:47:30 -08:00
|
|
|
if emit_debug:
|
|
|
|
|
debug(f"Cookies: ENABLED - Found cookies.txt", file=sys.stderr)
|
|
|
|
|
return True, str(cookies_path)
|
2025-12-01 14:42:30 -08:00
|
|
|
else:
|
|
|
|
|
_COOKIES_FILE_PATH = None
|
2025-12-11 12:47:30 -08:00
|
|
|
return False, "Not found"
|
2025-12-01 14:42:30 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_cookies_file_path() -> Optional[str]:
|
|
|
|
|
"""Get the path to the cookies.txt file if it exists."""
|
|
|
|
|
return _COOKIES_FILE_PATH
|