This commit is contained in:
nose
2025-12-13 00:18:30 -08:00
parent 85750247cc
commit 30eb628aa3
18 changed files with 1056 additions and 407 deletions

9
.gitignore vendored
View File

@@ -3,7 +3,8 @@
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
config.json config.conf
config.d/
# C extensions # C extensions
*.so *.so
# Distribution / packaging # Distribution / packaging
@@ -81,7 +82,8 @@ target/
# IPython # IPython
profile_default/ profile_default/
ipython_config.py ipython_config.py
config.json config.conf
config.d/
# pyenv # pyenv
# For a library or package, you might want to ignore these files since the code is # For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in: # intended to run in multiple environments; otherwise, check them in:
@@ -216,4 +218,5 @@ luac.out
*.hex *.hex
config.json config.conf
config.d/

View File

@@ -1157,7 +1157,7 @@ def is_available(config: dict[str, Any], use_cache: bool = True) -> tuple[bool,
url = (get_hydrus_url(config, "home") or "").strip() url = (get_hydrus_url(config, "home") or "").strip()
if not url: if not url:
reason = "Hydrus URL not configured (check config.json HydrusNetwork.home.url)" reason = "Hydrus URL not configured (check config.conf store.hydrusnetwork.home.URL)"
_HYDRUS_AVAILABLE = False _HYDRUS_AVAILABLE = False
_HYDRUS_UNAVAILABLE_REASON = reason _HYDRUS_UNAVAILABLE_REASON = reason
return False, reason return False, reason
@@ -1245,7 +1245,7 @@ def get_client(config: dict[str, Any]) -> HydrusNetwork:
# Use new config helpers # Use new config helpers
hydrus_url = (get_hydrus_url(config, "home") or "").strip() hydrus_url = (get_hydrus_url(config, "home") or "").strip()
if not hydrus_url: if not hydrus_url:
raise RuntimeError("Hydrus URL is not configured (check config.json HydrusNetwork.home.url)") raise RuntimeError("Hydrus URL is not configured (check config.conf store.hydrusnetwork.home.URL)")
access_key = get_hydrus_access_key(config, "home") or "" access_key = get_hydrus_access_key(config, "home") or ""
timeout_raw = config.get("HydrusNetwork_Request_Timeout") timeout_raw = config.get("HydrusNetwork_Request_Timeout")

79
CLI.py
View File

@@ -200,7 +200,7 @@ CLI_ROOT = Path(__file__).resolve().parent
def _load_cli_config() -> Dict[str, Any]: def _load_cli_config() -> Dict[str, Any]:
"""Load config.json relative to the CLI script location.""" """Load config.conf relative to the CLI script location."""
try: try:
return deepcopy(load_config(config_dir=CLI_ROOT)) return deepcopy(load_config(config_dir=CLI_ROOT))
except Exception: except Exception:
@@ -697,7 +697,7 @@ def _create_cmdlet_cli():
# Initialize cookies check for yt-dlp # Initialize cookies check for yt-dlp
from hydrus_health_check import initialize_cookies_check from hydrus_health_check import initialize_cookies_check
initialize_cookies_check() initialize_cookies_check(config, emit_debug=False)
# Initialize debug logging if enabled # Initialize debug logging if enabled
if config: if config:
@@ -788,6 +788,22 @@ def _create_cmdlet_cli():
if detail: if detail:
row.add_column("Detail", detail) row.add_column("Detail", detail)
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
store_cfg = cfg.get("store")
if not isinstance(store_cfg, dict):
return False
bucket = store_cfg.get(subtype)
if not isinstance(bucket, dict):
return False
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
def _has_provider(cfg: dict, name: str) -> bool:
provider_cfg = cfg.get("provider")
if not isinstance(provider_cfg, dict):
return False
block = provider_cfg.get(str(name).strip().lower())
return isinstance(block, dict) and bool(block)
# Load config and initialize debug logging # Load config and initialize debug logging
config = {} config = {}
try: try:
@@ -846,37 +862,36 @@ def _create_cmdlet_cli():
_run_check("MPV", lambda: initialize_mpv_health_check(emit_debug=False)) _run_check("MPV", lambda: initialize_mpv_health_check(emit_debug=False))
if config: if config:
_run_check("Hydrus", lambda: initialize_hydrus_health_check(config, emit_debug=False)) # Only show checks that are configured in config.conf
if _has_store_subtype(config, "hydrusnetwork"):
# Hydrus instances - add individual rows for each instance _run_check("Hydrus", lambda: initialize_hydrus_health_check(config, emit_debug=False))
from hydrus_health_check import _SERVICE_STATE
for instance_name, instance_info in _SERVICE_STATE.get("hydrusnetwork_stores", {}).items():
status = "ENABLED" if instance_info.get("ok") else "DISABLED"
_add_startup_check(f" {instance_name}", status, f"{instance_info.get('url')} - {instance_info.get('detail')}")
_run_check("Matrix", lambda: initialize_matrix_health_check(config, emit_debug=False))
# Folder stores - add individual rows for each store
ok, detail = initialize_local_library_scan(config, emit_debug=False)
if ok or detail != "No folder stores configured":
# Add individual store rows
from hydrus_health_check import _SERVICE_STATE
for store_name, store_info in _SERVICE_STATE.get("folder_stores", {}).items():
status = "SCANNED" if store_info.get("ok") else "ERROR"
_add_startup_check(f" {store_name}", status, f"{store_info.get('path')} - {store_info.get('detail')}")
if not _SERVICE_STATE.get("folder_stores"):
_add_startup_check("Folder Stores", "SCANNED", detail)
else:
_add_startup_check("Folder Stores", "SKIPPED", detail)
_run_check("Debrid", lambda: initialize_debrid_health_check(config, emit_debug=False))
else:
_add_startup_check("Hydrus", "SKIPPED", "No config loaded")
_add_startup_check("Matrix", "SKIPPED", "No config loaded")
_add_startup_check("Folder Stores", "SKIPPED", "No config loaded")
_add_startup_check("Debrid", "SKIPPED", "No config loaded")
_run_check("Cookies", lambda: initialize_cookies_check(emit_debug=False)) # Hydrus instances - add individual rows for each configured instance
from hydrus_health_check import _SERVICE_STATE
for instance_name, instance_info in _SERVICE_STATE.get("hydrusnetwork_stores", {}).items():
status = "ENABLED" if instance_info.get("ok") else "DISABLED"
_add_startup_check(f" {instance_name}", status, f"{instance_info.get('url')} - {instance_info.get('detail')}")
if _has_provider(config, "matrix"):
_run_check("Matrix", lambda: initialize_matrix_health_check(config, emit_debug=False))
if _has_store_subtype(config, "folder"):
# Folder stores - add individual rows for each configured store
ok, detail = initialize_local_library_scan(config, emit_debug=False)
if ok or detail != "No folder stores configured":
from hydrus_health_check import _SERVICE_STATE
for store_name, store_info in _SERVICE_STATE.get("folder_stores", {}).items():
status = "SCANNED" if store_info.get("ok") else "ERROR"
_add_startup_check(f" {store_name}", status, f"{store_info.get('path')} - {store_info.get('detail')}")
if not _SERVICE_STATE.get("folder_stores"):
_add_startup_check("Folder Stores", "SCANNED", detail)
else:
_add_startup_check("Folder Stores", "SKIPPED", detail)
if _has_store_subtype(config, "debrid"):
_run_check("Debrid", lambda: initialize_debrid_health_check(config, emit_debug=False))
_run_check("Cookies", lambda: initialize_cookies_check(config, emit_debug=False))
if startup_table is not None and startup_table.rows: if startup_table is not None and startup_table.rows:
print() print()

View File

@@ -15,7 +15,7 @@ class Matrix(FileProvider):
def validate(self) -> bool: def validate(self) -> bool:
if not self.config: if not self.config:
return False return False
matrix_conf = self.config.get("storage", {}).get("matrix", {}) matrix_conf = self.config.get("provider", {}).get("matrix", {})
return bool( return bool(
matrix_conf.get("homeserver") matrix_conf.get("homeserver")
and matrix_conf.get("room_id") and matrix_conf.get("room_id")
@@ -27,7 +27,7 @@ class Matrix(FileProvider):
if not path.exists(): if not path.exists():
raise FileNotFoundError(f"File not found: {file_path}") raise FileNotFoundError(f"File not found: {file_path}")
matrix_conf = self.config.get("storage", {}).get("matrix", {}) matrix_conf = self.config.get("provider", {}).get("matrix", {})
homeserver = matrix_conf.get("homeserver") homeserver = matrix_conf.get("homeserver")
access_token = matrix_conf.get("access_token") access_token = matrix_conf.get("access_token")
room_id = matrix_conf.get("room_id") room_id = matrix_conf.get("room_id")

View File

@@ -34,8 +34,25 @@ class Folder(Store):
"""""" """"""
# Track which locations have already been migrated to avoid repeated migrations # Track which locations have already been migrated to avoid repeated migrations
_migrated_locations = set() _migrated_locations = set()
def __new__(cls, *args: Any, **kwargs: Any) -> "Folder":
return super().__new__(cls)
setattr(__new__, "keys", ("NAME", "PATH"))
def __init__(self, location: Optional[str] = None, name: Optional[str] = None) -> None: def __init__(
self,
location: Optional[str] = None,
name: Optional[str] = None,
*,
NAME: Optional[str] = None,
PATH: Optional[str] = None,
) -> None:
if name is None and NAME is not None:
name = str(NAME)
if location is None and PATH is not None:
location = str(PATH)
self._location = location self._location = location
self._name = name self._name = name

View File

@@ -17,8 +17,32 @@ class HydrusNetwork(Store):
Each instance represents a specific Hydrus client connection. Each instance represents a specific Hydrus client connection.
Maintains its own HydrusClient with session key. Maintains its own HydrusClient with session key.
""" """
def __new__(cls, *args: Any, **kwargs: Any) -> "HydrusNetwork":
instance = super().__new__(cls)
name = kwargs.get("NAME")
api = kwargs.get("API")
url = kwargs.get("URL")
if name is not None:
setattr(instance, "NAME", str(name))
if api is not None:
setattr(instance, "API", str(api))
if url is not None:
setattr(instance, "URL", str(url))
return instance
setattr(__new__, "keys", ("NAME", "API", "URL"))
def __init__(self, instance_name: str, api_key: str, url: str) -> None: def __init__(
self,
instance_name: Optional[str] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
*,
NAME: Optional[str] = None,
API: Optional[str] = None,
URL: Optional[str] = None,
) -> None:
"""Initialize Hydrus storage backend. """Initialize Hydrus storage backend.
Args: Args:
@@ -27,18 +51,41 @@ class HydrusNetwork(Store):
url: Hydrus client URL (e.g., 'http://192.168.1.230:45869') url: Hydrus client URL (e.g., 'http://192.168.1.230:45869')
""" """
from API.HydrusNetwork import HydrusNetwork as HydrusClient from API.HydrusNetwork import HydrusNetwork as HydrusClient
if instance_name is None and NAME is not None:
instance_name = str(NAME)
if api_key is None and API is not None:
api_key = str(API)
if url is None and URL is not None:
url = str(URL)
if not instance_name or not api_key or not url:
raise ValueError("HydrusNetwork requires NAME, API, and URL")
self._instance_name = instance_name self.NAME = instance_name
self._api_key = api_key self.API = api_key
self._url = url self.URL = url
# Create persistent client with session key for this instance # Create persistent client with session key for this instance
self._client = HydrusClient(url=url, access_key=api_key) self._client = HydrusClient(url=url, access_key=api_key)
# Self health-check: acquire a session key immediately so broken configs
# fail-fast and the registry can skip registering this backend.
try:
if self._client is not None:
self._client.ensure_session_key()
except Exception as exc:
# Best-effort cleanup so partially constructed objects don't linger.
try:
self._client = None
except Exception:
pass
raise RuntimeError(f"Hydrus '{self.NAME}' unavailable: {exc}") from exc
def name(self) -> str: def name(self) -> str:
return self._instance_name return self.NAME
def get_name(self) -> str: def get_name(self) -> str:
return self._instance_name return self.NAME
def add_file(self, file_path: Path, **kwargs: Any) -> str: def add_file(self, file_path: Path, **kwargs: Any) -> str:
"""Upload file to Hydrus with full metadata support. """Upload file to Hydrus with full metadata support.
@@ -281,7 +328,7 @@ class HydrusNetwork(Store):
if has_namespace: if has_namespace:
# Explicit namespace search - already filtered by Hydrus tag search # Explicit namespace search - already filtered by Hydrus tag search
# Include this result as-is # Include this result as-is
file_url = f"{self._url.rstrip('/')}/get_files/file?hash={hash_hex}" file_url = f"{self.URL.rstrip('/')}/get_files/file?hash={hash_hex}"
results.append({ results.append({
"hash": hash_hex, "hash": hash_hex,
"url": file_url, "url": file_url,
@@ -289,7 +336,7 @@ class HydrusNetwork(Store):
"title": title, "title": title,
"size": size, "size": size,
"size_bytes": size, "size_bytes": size,
"store": self._instance_name, "store": self.NAME,
"tag": all_tags, "tag": all_tags,
"file_id": file_id, "file_id": file_id,
"mime": mime_type, "mime": mime_type,
@@ -314,7 +361,7 @@ class HydrusNetwork(Store):
break break
if match: if match:
file_url = f"{self._url.rstrip('/')}/get_files/file?hash={hash_hex}" file_url = f"{self.URL.rstrip('/')}/get_files/file?hash={hash_hex}"
results.append({ results.append({
"hash": hash_hex, "hash": hash_hex,
"url": file_url, "url": file_url,
@@ -322,7 +369,7 @@ class HydrusNetwork(Store):
"title": title, "title": title,
"size": size, "size": size,
"size_bytes": size, "size_bytes": size,
"store": self._instance_name, "store": self.NAME,
"tag": all_tags, "tag": all_tags,
"file_id": file_id, "file_id": file_id,
"mime": mime_type, "mime": mime_type,
@@ -345,8 +392,8 @@ class HydrusNetwork(Store):
debug(f"[HydrusNetwork.get_file] Starting for hash: {file_hash[:12]}...") debug(f"[HydrusNetwork.get_file] Starting for hash: {file_hash[:12]}...")
# Build browser URL with access key # Build browser URL with access key
base_url = self._client.url.rstrip('/') base_url = str(self.URL).rstrip('/')
access_key = self._client.access_key access_key = str(self.API)
browser_url = f"{base_url}/get_files/file?hash={file_hash}&Hydrus-Client-API-Access-Key={access_key}" browser_url = f"{base_url}/get_files/file?hash={file_hash}&Hydrus-Client-API-Access-Key={access_key}"
debug(f"[HydrusNetwork.get_file] Opening URL: {browser_url}") debug(f"[HydrusNetwork.get_file] Opening URL: {browser_url}")

View File

@@ -3,31 +3,107 @@
Concrete store implementations live in the `Store/` package. Concrete store implementations live in the `Store/` package.
This module is the single source of truth for store discovery. This module is the single source of truth for store discovery.
Config schema (canonical): This registry is config-driven:
- Each store subtype (e.g. `hydrusnetwork`) maps to a concrete store class.
{ - Each store class advertises its required config keys via `StoreClass.__new__.keys`.
"store": { - Instances are created from config using those keys (case-insensitive lookup).
"folder": {
"default": {"path": "C:/Media"},
"test": {"path": "C:/Temp"}
},
"hydrusnetwork": {
"home": {"Hydrus-Client-API-Access-Key": "...", "url": "http://..."}
}
}
}
""" """
from __future__ import annotations from __future__ import annotations
import importlib
import inspect
import pkgutil
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional from typing import Any, Dict, Iterable, Optional, Type
from SYS.logger import debug from SYS.logger import debug
from Store._base import Store as BaseStore from Store._base import Store as BaseStore
from Store.Folder import Folder
from Store.HydrusNetwork import HydrusNetwork
def _normalize_store_type(value: str) -> str:
return "".join(ch.lower() for ch in str(value or "") if ch.isalnum())
def _normalize_config_key(value: str) -> str:
return str(value or "").strip().upper()
def _get_case_insensitive(mapping: Dict[str, Any], key: str) -> Any:
if key in mapping:
return mapping[key]
desired = _normalize_config_key(key)
for k, v in mapping.items():
if _normalize_config_key(k) == desired:
return v
return None
def _discover_store_classes() -> Dict[str, Type[BaseStore]]:
"""Discover store classes from the Store package.
Convention:
- The store type key is the normalized class name (e.g. HydrusNetwork -> hydrusnetwork).
"""
import Store as store_pkg
discovered: Dict[str, Type[BaseStore]] = {}
for module_info in pkgutil.iter_modules(store_pkg.__path__):
module_name = module_info.name
if module_name in {"__init__", "_base", "registry"}:
continue
module = importlib.import_module(f"Store.{module_name}")
for _, obj in vars(module).items():
if not inspect.isclass(obj):
continue
if obj is BaseStore:
continue
if not issubclass(obj, BaseStore):
continue
discovered[_normalize_store_type(obj.__name__)] = obj
return discovered
def _required_keys_for(store_cls: Type[BaseStore]) -> list[str]:
keys = getattr(store_cls.__new__, "keys", None)
if keys is None:
return []
if isinstance(keys, dict):
return [str(k) for k in keys.keys()]
if isinstance(keys, (list, tuple, set, frozenset)):
return [str(k) for k in keys]
if isinstance(keys, str):
return [keys]
raise TypeError(f"Unsupported __new__.keys type for {store_cls.__name__}: {type(keys)}")
def _build_kwargs(store_cls: Type[BaseStore], instance_name: str, instance_config: Any) -> Dict[str, Any]:
if isinstance(instance_config, dict):
cfg_dict = dict(instance_config)
else:
cfg_dict = {}
required = _required_keys_for(store_cls)
# If NAME is required but not present, allow the instance key to provide it.
if any(_normalize_config_key(k) == "NAME" for k in required) and _get_case_insensitive(cfg_dict, "NAME") is None:
cfg_dict["NAME"] = str(instance_name)
kwargs: Dict[str, Any] = {}
missing: list[str] = []
for key in required:
value = _get_case_insensitive(cfg_dict, key)
if value is None or value == "":
missing.append(str(key))
continue
kwargs[str(key)] = value
if missing:
raise ValueError(f"Missing required keys for {store_cls.__name__}: {', '.join(missing)}")
return kwargs
class Store: class Store:
@@ -42,43 +118,36 @@ class Store:
if not isinstance(store_cfg, dict): if not isinstance(store_cfg, dict):
store_cfg = {} store_cfg = {}
folder_cfg = store_cfg.get("folder") classes_by_type = _discover_store_classes()
if isinstance(folder_cfg, dict): for raw_store_type, instances in store_cfg.items():
for name, value in folder_cfg.items(): if not isinstance(instances, dict):
path_val: Optional[str] continue
if isinstance(value, dict):
path_val = value.get("path")
elif isinstance(value, (str, bytes)):
path_val = str(value)
else:
path_val = None
if not path_val: store_type = _normalize_store_type(str(raw_store_type))
continue store_cls = classes_by_type.get(store_type)
if store_cls is None:
location = str(Path(str(path_val)).expanduser()) if not self._suppress_debug:
self._backends[str(name)] = Folder(location=location, name=str(name)) debug(f"[Store] Unknown store type '{raw_store_type}'")
continue
hydrus_cfg = store_cfg.get("hydrusnetwork")
if isinstance(hydrus_cfg, dict):
for instance_name, instance_config in hydrus_cfg.items():
if not isinstance(instance_config, dict):
continue
api_key = instance_config.get("Hydrus-Client-API-Access-Key")
url = instance_config.get("url")
if not api_key or not url:
continue
for instance_name, instance_config in instances.items():
try: try:
self._backends[str(instance_name)] = HydrusNetwork( kwargs = _build_kwargs(store_cls, str(instance_name), instance_config)
instance_name=str(instance_name),
api_key=str(api_key), # Convenience normalization for filesystem-like paths.
url=str(url), for key in list(kwargs.keys()):
) if _normalize_config_key(key) in {"PATH", "LOCATION"}:
kwargs[key] = str(Path(str(kwargs[key])).expanduser())
backend = store_cls(**kwargs)
backend_name = str(kwargs.get("NAME") or instance_name)
self._backends[backend_name] = backend
except Exception as exc: except Exception as exc:
if not self._suppress_debug: if not self._suppress_debug:
debug(f"[Store] Failed to register Hydrus instance '{instance_name}': {exc}") debug(
f"[Store] Failed to register {store_cls.__name__} instance '{instance_name}': {exc}"
)
def list_backends(self) -> list[str]: def list_backends(self) -> list[str]:
return sorted(self._backends.keys()) return sorted(self._backends.keys())

View File

@@ -99,26 +99,18 @@ class ExportModal(ModalScreen):
return (target_type, format_options) return (target_type, format_options)
def _get_library_options(self) -> list: def _get_library_options(self) -> list:
"""Get available library options from config.json.""" """Get available library options from config.conf."""
options = [("Local", "local")] options = [("Local", "local")]
try: try:
# Try to load config from config import load_config, get_hydrus_access_key, get_hydrus_url, get_debrid_api_key
config_path = Path(__file__).parent.parent / "config.json" config = load_config()
if not config_path.exists():
return options hydrus_url = (get_hydrus_url(config, "home") or "").strip()
hydrus_key = (get_hydrus_access_key(config, "home") or "").strip()
with open(config_path, 'r') as f: if self.hydrus_available and hydrus_url and hydrus_key:
config = json.load(f)
# Check if Hydrus is configured AND available (supports both new and old format)
from config import get_hydrus_instance
hydrus_instance = get_hydrus_instance(config, "home")
if self.hydrus_available and hydrus_instance and hydrus_instance.get("key") and hydrus_instance.get("url"):
options.append(("Hydrus Network", "hydrus")) options.append(("Hydrus Network", "hydrus"))
# Check if Debrid is configured AND available (supports both new and old format)
from config import get_debrid_api_key
debrid_api_key = get_debrid_api_key(config) debrid_api_key = get_debrid_api_key(config)
if self.debrid_available and debrid_api_key: if self.debrid_available and debrid_api_key:
options.append(("Debrid", "debrid")) options.append(("Debrid", "debrid"))

View File

@@ -34,6 +34,28 @@ def _extract_title_tag(tags: List[str]) -> Optional[str]:
return None return None
def _extract_item_tags(res: Any) -> List[str]:
if isinstance(res, models.PipeObject):
raw = getattr(res, "tag", None)
elif isinstance(res, dict):
raw = res.get("tag")
else:
raw = None
if isinstance(raw, list):
return [str(t) for t in raw if t is not None]
if isinstance(raw, str) and raw.strip():
return [raw]
return []
def _set_item_tags(res: Any, tags: List[str]) -> None:
if isinstance(res, models.PipeObject):
res.tag = tags
elif isinstance(res, dict):
res["tag"] = tags
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None: def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
"""Update result object/dict title fields and columns in-place.""" """Update result object/dict title fields and columns in-place."""
if not title_value: if not title_value:
@@ -332,6 +354,8 @@ class Add_Tag(Cmdlet):
store_override = parsed.get("store") store_override = parsed.get("store")
store_registry = Store(config)
for res in results: for res in results:
store_name: Optional[str] store_name: Optional[str]
raw_hash: Optional[str] raw_hash: Optional[str]
@@ -350,8 +374,90 @@ class Add_Tag(Cmdlet):
continue continue
if not store_name: if not store_name:
log("[add_tag] Error: Missing -store and item has no store field", file=sys.stderr) store_name = None
return 1
# If the item isn't in a configured store backend yet (e.g., store=PATH) but has a local file,
# treat add-tag as a pipeline mutation (carry tags forward for add-file) instead of a store write.
if not store_override:
store_name_str = str(store_name) if store_name is not None else ""
local_mode_requested = (not store_name_str) or (store_name_str.upper() == "PATH") or (store_name_str.lower() == "local")
is_known_backend = bool(store_name_str) and store_registry.is_available(store_name_str)
if local_mode_requested and raw_path:
try:
if Path(str(raw_path)).expanduser().exists():
existing_tag_list = _extract_item_tags(res)
existing_lower = {t.lower() for t in existing_tag_list if isinstance(t, str)}
item_tag_to_add = list(tag_to_add)
item_tag_to_add = collapse_namespace_tag(item_tag_to_add, "title", prefer="last")
if duplicate_arg:
parts = str(duplicate_arg).split(':')
source_ns = ""
targets: list[str] = []
if len(parts) > 1:
source_ns = parts[0]
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
else:
parts2 = str(duplicate_arg).split(',')
if len(parts2) > 1:
source_ns = parts2[0]
targets = [t.strip() for t in parts2[1:] if t.strip()]
if source_ns and targets:
source_prefix = source_ns.lower() + ":"
for t in existing_tag_list:
if not t.lower().startswith(source_prefix):
continue
value = t.split(":", 1)[1]
for target_ns in targets:
new_tag = f"{target_ns}:{value}"
if new_tag.lower() not in existing_lower:
item_tag_to_add.append(new_tag)
removed_namespace_tag: list[str] = []
for new_tag in item_tag_to_add:
if not isinstance(new_tag, str) or ":" not in new_tag:
continue
ns = new_tag.split(":", 1)[0].strip()
if not ns:
continue
ns_prefix = ns.lower() + ":"
for t in existing_tag_list:
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
removed_namespace_tag.append(t)
removed_namespace_tag = sorted({t for t in removed_namespace_tag})
actual_tag_to_add = [
t
for t in item_tag_to_add
if isinstance(t, str) and t.lower() not in existing_lower
]
updated_tag_list = [t for t in existing_tag_list if t not in removed_namespace_tag]
updated_tag_list.extend(actual_tag_to_add)
_set_item_tags(res, updated_tag_list)
final_title = _extract_title_tag(updated_tag_list)
_apply_title_to_result(res, final_title)
total_added += len(actual_tag_to_add)
total_modified += 1 if (removed_namespace_tag or actual_tag_to_add) else 0
ctx.emit(res)
continue
except Exception:
pass
if local_mode_requested:
log("[add_tag] Error: Missing usable local path for tagging (or provide -store)", file=sys.stderr)
return 1
if store_name_str and not is_known_backend:
log(f"[add_tag] Error: Unknown store '{store_name_str}'. Available: {store_registry.list_backends()}", file=sys.stderr)
return 1
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash) resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
if not resolved_hash and raw_path: if not resolved_hash and raw_path:
@@ -371,7 +477,7 @@ class Add_Tag(Cmdlet):
continue continue
try: try:
backend = Store(config)[str(store_name)] backend = store_registry[str(store_name)]
except Exception as exc: except Exception as exc:
log(f"[add_tag] Error: Unknown store '{store_name}': {exc}", file=sys.stderr) log(f"[add_tag] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
return 1 return 1

View File

@@ -35,6 +35,28 @@ def _extract_title_tag(tags: List[str]) -> Optional[str]:
return None return None
def _extract_item_tags(res: Any) -> List[str]:
if isinstance(res, models.PipeObject):
raw = getattr(res, "tag", None)
elif isinstance(res, dict):
raw = res.get("tag")
else:
raw = None
if isinstance(raw, list):
return [str(t) for t in raw if t is not None]
if isinstance(raw, str) and raw.strip():
return [raw]
return []
def _set_item_tags(res: Any, tags: List[str]) -> None:
if isinstance(res, models.PipeObject):
res.tag = tags
elif isinstance(res, dict):
res["tag"] = tags
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None: def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
"""Update result object/dict title fields and columns in-place.""" """Update result object/dict title fields and columns in-place."""
if not title_value: if not title_value:
@@ -304,6 +326,8 @@ class Add_Tag(Cmdlet):
store_override = parsed.get("store") store_override = parsed.get("store")
store_registry = Store(config)
for res in results: for res in results:
store_name: Optional[str] store_name: Optional[str]
raw_hash: Optional[str] raw_hash: Optional[str]
@@ -322,8 +346,90 @@ class Add_Tag(Cmdlet):
continue continue
if not store_name: if not store_name:
log("[add_tags] Error: Missing -store and item has no store field", file=sys.stderr) store_name = None
return 1
# If the item isn't in a configured store backend yet (e.g., store=PATH) but has a local file,
# treat add-tags as a pipeline mutation (carry tags forward for add-file) instead of a store write.
if not store_override:
store_name_str = str(store_name) if store_name is not None else ""
local_mode_requested = (not store_name_str) or (store_name_str.upper() == "PATH") or (store_name_str.lower() == "local")
is_known_backend = bool(store_name_str) and store_registry.is_available(store_name_str)
if local_mode_requested and raw_path:
try:
if Path(str(raw_path)).expanduser().exists():
existing_tags_list = _extract_item_tags(res)
existing_lower = {t.lower() for t in existing_tags_list if isinstance(t, str)}
item_tags_to_add = list(tags_to_add)
item_tags_to_add = collapse_namespace_tags(item_tags_to_add, "title", prefer="last")
if duplicate_arg:
parts = str(duplicate_arg).split(':')
source_ns = ""
targets: list[str] = []
if len(parts) > 1:
source_ns = parts[0]
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
else:
parts2 = str(duplicate_arg).split(',')
if len(parts2) > 1:
source_ns = parts2[0]
targets = [t.strip() for t in parts2[1:] if t.strip()]
if source_ns and targets:
source_prefix = source_ns.lower() + ":"
for t in existing_tags_list:
if not t.lower().startswith(source_prefix):
continue
value = t.split(":", 1)[1]
for target_ns in targets:
new_tag = f"{target_ns}:{value}"
if new_tag.lower() not in existing_lower:
item_tags_to_add.append(new_tag)
removed_namespace_tags: list[str] = []
for new_tag in item_tags_to_add:
if not isinstance(new_tag, str) or ":" not in new_tag:
continue
ns = new_tag.split(":", 1)[0].strip()
if not ns:
continue
ns_prefix = ns.lower() + ":"
for t in existing_tags_list:
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
removed_namespace_tags.append(t)
removed_namespace_tags = sorted({t for t in removed_namespace_tags})
actual_tags_to_add = [
t
for t in item_tags_to_add
if isinstance(t, str) and t.lower() not in existing_lower
]
updated_tags_list = [t for t in existing_tags_list if t not in removed_namespace_tags]
updated_tags_list.extend(actual_tags_to_add)
_set_item_tags(res, updated_tags_list)
final_title = _extract_title_tag(updated_tags_list)
_apply_title_to_result(res, final_title)
total_added += len(actual_tags_to_add)
total_modified += 1 if (removed_namespace_tags or actual_tags_to_add) else 0
ctx.emit(res)
continue
except Exception:
pass
if local_mode_requested:
log("[add_tags] Error: Missing usable local path for tagging (or provide -store)", file=sys.stderr)
return 1
if store_name_str and not is_known_backend:
log(f"[add_tags] Error: Unknown store '{store_name_str}'. Available: {store_registry.list_backends()}", file=sys.stderr)
return 1
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash) resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
if not resolved_hash and raw_path: if not resolved_hash and raw_path:
@@ -343,7 +449,7 @@ class Add_Tag(Cmdlet):
continue continue
try: try:
backend = Store(config)[str(store_name)] backend = store_registry[str(store_name)]
except Exception as exc: except Exception as exc:
log(f"[add_tags] Error: Unknown store '{store_name}': {exc}", file=sys.stderr) log(f"[add_tags] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
return 1 return 1

View File

@@ -267,15 +267,13 @@ class Download_File(Cmdlet):
log(f"Invalid storage location: {e}", file=sys.stderr) log(f"Invalid storage location: {e}", file=sys.stderr)
return None return None
# Priority 2: Config outfile # Priority 2: Config default output/temp directory
if config and config.get("outfile"): try:
try: from config import resolve_output_dir
return Path(config["outfile"]).expanduser() final_output_dir = resolve_output_dir(config)
except Exception: except Exception:
pass final_output_dir = Path.home() / "Downloads"
# Priority 3: Default (home/Downloads)
final_output_dir = Path.home() / "Downloads"
debug(f"Using default directory: {final_output_dir}") debug(f"Using default directory: {final_output_dir}")
# Ensure directory exists # Ensure directory exists

View File

@@ -338,6 +338,30 @@ def _resolve_entry_and_path(info: Dict[str, Any], output_dir: Path) -> tuple[Dic
raise FileNotFoundError("yt-dlp did not report a downloaded media file") raise FileNotFoundError("yt-dlp did not report a downloaded media file")
def _resolve_entries_and_paths(info: Dict[str, Any], output_dir: Path) -> List[tuple[Dict[str, Any], Path]]:
resolved: List[tuple[Dict[str, Any], Path]] = []
seen: set[str] = set()
for entry in _iter_download_entries(info):
chosen: Optional[Path] = None
for candidate in _candidate_paths(entry, output_dir):
if candidate.is_file():
chosen = candidate
break
if not candidate.is_absolute():
maybe = output_dir / candidate
if maybe.is_file():
chosen = maybe
break
if chosen is None:
continue
key = str(chosen.resolve())
if key in seen:
continue
seen.add(key)
resolved.append((entry, chosen))
return resolved
def _extract_sha256(info: Dict[str, Any]) -> Optional[str]: def _extract_sha256(info: Dict[str, Any]) -> Optional[str]:
for payload in [info] + info.get("entries", []): for payload in [info] + info.get("entries", []):
if not isinstance(payload, dict): if not isinstance(payload, dict):
@@ -679,7 +703,7 @@ def download_media(
opts: DownloadOptions, opts: DownloadOptions,
*, *,
debug_logger: Optional[DebugLogger] = None, debug_logger: Optional[DebugLogger] = None,
) -> DownloadMediaResult: ) -> Any:
"""Download media from URL using yt-dlp or direct HTTP download. """Download media from URL using yt-dlp or direct HTTP download.
Args: Args:
@@ -935,6 +959,48 @@ def download_media(
}, },
) )
# Playlist/album handling: resolve ALL downloaded entries and return multiple results.
# The cmdlet will emit one PipeObject per downloaded file.
if info_dict.get("entries") and not opts.no_playlist:
resolved = _resolve_entries_and_paths(info_dict, opts.output_dir)
if resolved:
results: List[DownloadMediaResult] = []
for entry, media_path in resolved:
hash_value = _extract_sha256(entry) or _extract_sha256(info_dict)
if not hash_value:
try:
hash_value = sha256_file(media_path)
except OSError:
hash_value = None
tags: List[str] = []
if extract_ytdlp_tags:
try:
tags = extract_ytdlp_tags(entry)
except Exception as e:
log(f"Error extracting tags: {e}", file=sys.stderr)
source_url = (
entry.get("webpage_url")
or entry.get("original_url")
or entry.get("url")
or opts.url
)
results.append(
DownloadMediaResult(
path=media_path,
info=entry,
tag=tags,
source_url=source_url,
hash_value=hash_value,
)
)
if not opts.quiet:
debug(f"✓ Downloaded playlist items: {len(results)}")
return results
try: try:
entry, media_path = _resolve_entry_and_path(info_dict, opts.output_dir) entry, media_path = _resolve_entry_and_path(info_dict, opts.output_dir)
except FileNotFoundError as exc: except FileNotFoundError as exc:
@@ -1009,7 +1075,7 @@ def _download_with_timeout(opts: DownloadOptions, timeout_seconds: int = 300) ->
timeout_seconds: Max seconds to wait (default 300s = 5 min) timeout_seconds: Max seconds to wait (default 300s = 5 min)
Returns: Returns:
DownloadMediaResult DownloadMediaResult or List[DownloadMediaResult]
Raises: Raises:
DownloadError: If timeout exceeded DownloadError: If timeout exceeded
@@ -1333,16 +1399,20 @@ class Download_Media(Cmdlet):
debug(f"Starting download with 5-minute timeout...") debug(f"Starting download with 5-minute timeout...")
result_obj = _download_with_timeout(opts, timeout_seconds=300) result_obj = _download_with_timeout(opts, timeout_seconds=300)
debug(f"Download completed, building pipe object...") debug(f"Download completed, building pipe object...")
pipe_obj_dict = self._build_pipe_object(result_obj, url, opts)
debug(f"Emitting result to pipeline...") # Emit one PipeObject per downloaded file (playlists/albums return a list)
pipeline_context.emit(pipe_obj_dict) results_to_emit = result_obj if isinstance(result_obj, list) else [result_obj]
debug(f"Emitting {len(results_to_emit)} result(s) to pipeline...")
# Automatically register url with local library for downloaded in results_to_emit:
if pipe_obj_dict.get("url"): pipe_obj_dict = self._build_pipe_object(downloaded, url, opts)
pipe_obj = coerce_to_pipe_object(pipe_obj_dict) pipeline_context.emit(pipe_obj_dict)
register_url_with_local_library(pipe_obj, config)
# Automatically register url with local library
downloaded_count += 1 if pipe_obj_dict.get("url"):
pipe_obj = coerce_to_pipe_object(pipe_obj_dict)
register_url_with_local_library(pipe_obj, config)
downloaded_count += len(results_to_emit)
debug("✓ Downloaded and emitted") debug("✓ Downloaded and emitted")
except DownloadError as e: except DownloadError as e:
@@ -1373,18 +1443,15 @@ class Download_Media(Cmdlet):
log(f"Invalid storage location: {e}", file=sys.stderr) log(f"Invalid storage location: {e}", file=sys.stderr)
return None return None
# Priority 2: Config outfile # Priority 2: Config default output/temp directory
if config and config.get("outfile"): try:
try: from config import resolve_output_dir
return Path(config["outfile"]).expanduser() final_output_dir = resolve_output_dir(config)
except Exception: except Exception:
pass final_output_dir = Path.home() / "Videos"
# Priority 3: Default (home/Videos)
final_output_dir = Path.home() / "Videos"
debug(f"Using default directory: {final_output_dir}") debug(f"Using default directory: {final_output_dir}")
# Ensure directory exists
try: try:
final_output_dir.mkdir(parents=True, exist_ok=True) final_output_dir.mkdir(parents=True, exist_ok=True)
except Exception as e: except Exception as e:

View File

@@ -8,6 +8,9 @@ import sys
from SYS.logger import log from SYS.logger import log
import subprocess as _subprocess import subprocess as _subprocess
import shutil as _shutil import shutil as _shutil
import re as _re
from config import resolve_output_dir
from ._shared import ( from ._shared import (
Cmdlet, Cmdlet,
@@ -34,9 +37,7 @@ except ImportError:
try: try:
from metadata import ( from metadata import (
read_tags_from_file, read_tags_from_file,
write_tags_to_file,
dedup_tags_by_namespace, dedup_tags_by_namespace,
write_metadata
) )
HAS_METADATA_API = True HAS_METADATA_API = True
except ImportError: except ImportError:
@@ -105,8 +106,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
source_hashes: List[str] = [] source_hashes: List[str] = []
source_url: List[str] = [] source_url: List[str] = []
source_tags: List[str] = [] # NEW: collect tags from source files source_tags: List[str] = [] # NEW: collect tags from source files
source_relationships: List[str] = [] # NEW: collect relationships from source files
for item in files_to_merge: for item in files_to_merge:
raw_path = get_pipe_object_path(item) raw_path = get_pipe_object_path(item)
target_path = None target_path = None
@@ -191,7 +190,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
output_path = output_override output_path = output_override
else: else:
first_file = source_files[0] first_file = source_files[0]
output_path = first_file.parent / f"{first_file.stem} (merged).{_ext_for_format(output_format)}" try:
base_dir = resolve_output_dir(config)
except Exception:
base_dir = first_file.parent
output_path = Path(base_dir) / f"{first_file.stem} (merged).{_ext_for_format(output_format)}"
# Ensure output directory exists # Ensure output directory exists
output_path.parent.mkdir(parents=True, exist_ok=True) output_path.parent.mkdir(parents=True, exist_ok=True)
@@ -217,61 +220,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
merged_tags: List[str] = [f"title:{output_path.stem}"] merged_tags: List[str] = [f"title:{output_path.stem}"]
# Create .tag sidecar file for the merged output using unified API # Merge tags from source files into the emitted PipeObject only.
tags_path = output_path.with_suffix(output_path.suffix + '.tag') # Sidecar files (.tag/.metadata) are written only during explicit filesystem export (add-file to a path).
try: if source_tags and HAS_METADATA_API:
# Merge tags from source files using metadata API merged_source_tags = dedup_tags_by_namespace(source_tags)
if source_tags and HAS_METADATA_API: merged_tags.extend(merged_source_tags)
# Use dedup function to normalize and deduplicate log(f"Merged {len(merged_source_tags)} unique tags from source files", file=sys.stderr)
merged_source_tags = dedup_tags_by_namespace(source_tags) elif source_tags:
merged_tags.extend(merged_source_tags) merged_tags.extend(list(dict.fromkeys(source_tags))) # Preserve order, remove duplicates
log(f"Merged {len(merged_source_tags)} unique tags from source files", file=sys.stderr)
elif source_tags:
# Fallback: simple deduplication if metadata API unavailable
merged_tags.extend(list(dict.fromkeys(source_tags))) # Preserve order, remove duplicates
# Write merged tags to sidecar file
if HAS_METADATA_API and write_tags_to_file:
# Use unified API for file writing
source_hashes_list = source_hashes if source_hashes else None
source_url_list = source_url if source_url else None
write_tags_to_file(tags_path, merged_tags, source_hashes_list, source_url_list)
else:
# Fallback: manual file writing
tags_lines = []
# Add hash first (if available)
if source_hashes:
tags_lines.append(f"hash:{source_hashes[0]}")
# Add regular tags
tags_lines.extend(merged_tags)
# Add known url
if source_url:
for url in source_url:
tags_lines.append(f"url:{url}")
# Add relationships (if available)
if source_relationships:
for rel in source_relationships:
tags_lines.append(f"relationship:{rel}")
with open(tags_path, 'w', encoding='utf-8') as f:
f.write('\n'.join(tags_lines) + '\n')
log(f"Created sidecar: {tags_path.name}", file=sys.stderr)
# Also create .metadata file using centralized function
try:
if HAS_METADATA_API and write_metadata:
write_metadata(output_path, source_hashes[0] if source_hashes else None, source_url, source_relationships)
log(f"Created metadata: {output_path.name}.metadata", file=sys.stderr)
except Exception as e:
log(f"Warning: Could not create metadata file: {e}", file=sys.stderr)
except Exception as e:
log(f"Warning: Could not create sidecar: {e}", file=sys.stderr)
# Emit a PipeObject-compatible dict so the merged file can be piped to next command # Emit a PipeObject-compatible dict so the merged file can be piped to next command
try: try:
@@ -287,6 +243,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
tag=merged_tags, tag=merged_tags,
url=source_url, url=source_url,
media_kind=file_kind, media_kind=file_kind,
store="PATH",
) )
# Clear previous results to ensure only the merged file is passed down # Clear previous results to ensure only the merged file is passed down
ctx.clear_last_result() ctx.clear_last_result()
@@ -424,6 +381,33 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
logger.info(f"[merge-file] Chapter: {title} @ {chapters[-1]['time_str']} (duration: {duration_sec:.2f}s)") logger.info(f"[merge-file] Chapter: {title} @ {chapters[-1]['time_str']} (duration: {duration_sec:.2f}s)")
current_time_ms += int(duration_sec * 1000) current_time_ms += int(duration_sec * 1000)
# If these came from a playlist/album, titles often look like:
# "Book Name - Chapter"
# If *all* titles share the same "Book Name" prefix, strip it.
if len(chapters) >= 2:
split_re = _re.compile(r"^(?P<prefix>.+?)\s+-\s+(?P<chapter>.+)$")
prefixes: List[str] = []
stripped_titles: List[str] = []
all_match = True
for ch in chapters:
raw_title = str(ch.get('title') or '').strip()
m = split_re.match(raw_title)
if not m:
all_match = False
break
prefix = m.group('prefix').strip()
chapter_title = m.group('chapter').strip()
if not prefix or not chapter_title:
all_match = False
break
prefixes.append(prefix.casefold())
stripped_titles.append(chapter_title)
if all_match and prefixes and len(set(prefixes)) == 1:
for idx, ch in enumerate(chapters):
ch['title'] = stripped_titles[idx]
logger.info(f"[merge-file] Stripped common title prefix for chapters: {prefixes[0]}")
# Step 2: Create concat demuxer file # Step 2: Create concat demuxer file
concat_file = output.parent / f".concat_{output.stem}.txt" concat_file = output.parent / f".concat_{output.stem}.txt"

421
config.py
View File

@@ -1,19 +1,271 @@
"""Unified configuration helpers for downlow.""" """Unified configuration helpers.
Configuration is defined exclusively via the modular `.conf` format.
- Required: `temp`
- Optional: stores, providers, and other settings
- Modular: optional fragments in `config.d/*.conf` are merged in lexicographic order
"""
from __future__ import annotations from __future__ import annotations
import json import re
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
from pathlib import Path
from SYS.logger import log from SYS.logger import log
DEFAULT_CONFIG_FILENAME = "config.json" DEFAULT_CONFIG_FILENAME = "config.conf"
SCRIPT_DIR = Path(__file__).resolve().parent SCRIPT_DIR = Path(__file__).resolve().parent
_CONFIG_CACHE: Dict[str, Dict[str, Any]] = {} _CONFIG_CACHE: Dict[str, Dict[str, Any]] = {}
def _strip_inline_comment(line: str) -> str:
# Keep it simple: only strip full-line comments and inline comments that start after whitespace.
# Users can always quote values that contain '#' or ';'.
stripped = line.strip()
if not stripped:
return ""
if stripped.startswith("#") or stripped.startswith(";"):
return ""
return line
def _parse_scalar(value: str) -> Any:
v = value.strip()
if not v:
return ""
if (v.startswith('"') and v.endswith('"')) or (v.startswith("'") and v.endswith("'")):
return v[1:-1]
low = v.lower()
if low in {"true", "yes", "on", "1"}:
return True
if low in {"false", "no", "off", "0"}:
return False
if re.fullmatch(r"-?\d+", v):
try:
return int(v)
except Exception:
return v
if re.fullmatch(r"-?\d+\.\d+", v):
try:
return float(v)
except Exception:
return v
return v
def _set_nested(d: Dict[str, Any], dotted_key: str, value: Any) -> None:
parts = [p for p in dotted_key.split(".") if p]
if not parts:
return
cur: Dict[str, Any] = d
for p in parts[:-1]:
nxt = cur.get(p)
if not isinstance(nxt, dict):
nxt = {}
cur[p] = nxt
cur = nxt
cur[parts[-1]] = value
def _merge_dict_inplace(base: Dict[str, Any], patch: Dict[str, Any]) -> Dict[str, Any]:
for k, v in patch.items():
if isinstance(v, dict) and isinstance(base.get(k), dict):
_merge_dict_inplace(base[k], v) # type: ignore[index]
else:
base[k] = v
return base
def _apply_conf_block(config: Dict[str, Any], kind: str, subtype: str, block: Dict[str, Any]) -> None:
kind_l = str(kind).strip().lower()
subtype_l = str(subtype).strip().lower()
if kind_l == "store":
# Store instances are keyed by NAME (preferred). If a block uses `name=...`,
# normalize it into NAME to keep a single canonical key.
name = block.get("NAME")
if not name:
name = block.get("name")
if name:
block = dict(block)
block.pop("name", None)
block["NAME"] = name
if not name:
return
name_l = str(name).strip().lower()
payload = dict(block)
store = config.setdefault("store", {})
if not isinstance(store, dict):
config["store"] = {}
store = config["store"]
bucket = store.setdefault(subtype_l, {})
if not isinstance(bucket, dict):
store[subtype_l] = {}
bucket = store[subtype_l]
existing = bucket.get(name_l)
if isinstance(existing, dict):
_merge_dict_inplace(existing, payload)
else:
bucket[name_l] = payload
return
if kind_l == "provider":
provider_name = str(subtype).strip().lower()
provider = config.setdefault("provider", {})
if not isinstance(provider, dict):
config["provider"] = {}
provider = config["provider"]
existing = provider.get(provider_name)
if isinstance(existing, dict):
_merge_dict_inplace(existing, block)
else:
provider[provider_name] = dict(block)
return
def parse_conf_text(text: str, *, base: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""Parse a lightweight .conf format into the app's config dict.
Supported patterns:
- Top-level key/value: temp="C:\\Users\\Me\\Downloads"
- Sections: [store=folder] + name/path lines
- Sections: [store=hydrusnetwork] + name/access key/url lines
- Sections: [provider=OpenLibrary] + email/password lines
- Dotted keys: store.folder.default.path="C:\\Media" (optional)
"""
config: Dict[str, Any] = dict(base or {})
current_kind: Optional[str] = None
current_subtype: Optional[str] = None
current_block: Dict[str, Any] = {}
def flush() -> None:
nonlocal current_kind, current_subtype, current_block
if current_kind and current_subtype and current_block:
_apply_conf_block(config, current_kind, current_subtype, current_block)
current_kind = None
current_subtype = None
current_block = {}
for raw_line in text.splitlines():
line = _strip_inline_comment(raw_line)
if not line.strip():
continue
stripped = line.strip()
if stripped.startswith("[") and stripped.endswith("]"):
flush()
header = stripped[1:-1].strip()
if "=" in header:
k, v = header.split("=", 1)
current_kind = k.strip()
current_subtype = v.strip()
else:
# Unknown header style; ignore block
current_kind = None
current_subtype = None
continue
if "=" not in stripped:
continue
key, value = stripped.split("=", 1)
key = key.strip()
parsed_val = _parse_scalar(value)
if current_kind and current_subtype:
current_block[key] = parsed_val
else:
if "." in key:
_set_nested(config, key, parsed_val)
else:
config[key] = parsed_val
flush()
return config
def _load_conf_config(base_dir: Path, config_path: Path) -> Dict[str, Any]:
config: Dict[str, Any] = {}
raw = config_path.read_text(encoding="utf-8")
config = parse_conf_text(raw, base=config)
conf_dir = base_dir / "config.d"
if conf_dir.exists() and conf_dir.is_dir():
for frag in sorted(conf_dir.glob("*.conf")):
try:
frag_raw = frag.read_text(encoding="utf-8")
config = parse_conf_text(frag_raw, base=config)
except OSError as exc:
log(f"Failed to read {frag}: {exc}")
return config
def _format_conf_value(val: Any) -> str:
if isinstance(val, bool):
return "true" if val else "false"
if isinstance(val, (int, float)):
return str(val)
if val is None:
return '""'
s = str(val)
s = s.replace('"', '\\"')
return f'"{s}"'
def _serialize_conf(config: Dict[str, Any]) -> str:
lines: list[str] = []
# Top-level scalars first
for key in sorted(config.keys()):
if key in {"store", "provider"}:
continue
value = config.get(key)
if isinstance(value, dict):
continue
lines.append(f"{key}={_format_conf_value(value)}")
# Store blocks
store = config.get("store")
if isinstance(store, dict):
for subtype in sorted(store.keys()):
bucket = store.get(subtype)
if not isinstance(bucket, dict):
continue
for name in sorted(bucket.keys()):
block = bucket.get(name)
if not isinstance(block, dict):
continue
lines.append("")
lines.append(f"[store={subtype}]")
lines.append(f"name={_format_conf_value(name)}")
for k in sorted(block.keys()):
lines.append(f"{k}={_format_conf_value(block.get(k))}")
# Provider blocks
provider = config.get("provider")
if isinstance(provider, dict):
for prov in sorted(provider.keys()):
block = provider.get(prov)
if not isinstance(block, dict):
continue
lines.append("")
lines.append(f"[provider={prov}]")
for k in sorted(block.keys()):
lines.append(f"{k}={_format_conf_value(block.get(k))}")
return "\n".join(lines).rstrip() + "\n"
def _make_cache_key(config_dir: Optional[Path], filename: str, actual_path: Optional[Path]) -> str: def _make_cache_key(config_dir: Optional[Path], filename: str, actual_path: Optional[Path]) -> str:
if actual_path: if actual_path:
return str(actual_path.resolve()) return str(actual_path.resolve())
@@ -37,7 +289,7 @@ def get_hydrus_instance(config: Dict[str, Any], instance_name: str = "home") ->
Returns: Returns:
Dict with access key and URL, or None if not found Dict with access key and URL, or None if not found
""" """
# Try current format first: config["store"]["hydrusnetwork"]["home"] # Canonical: config["store"]["hydrusnetwork"]["home"]
store = config.get("store", {}) store = config.get("store", {})
if isinstance(store, dict): if isinstance(store, dict):
hydrusnetwork = store.get("hydrusnetwork", {}) hydrusnetwork = store.get("hydrusnetwork", {})
@@ -45,35 +297,14 @@ def get_hydrus_instance(config: Dict[str, Any], instance_name: str = "home") ->
instance = hydrusnetwork.get(instance_name) instance = hydrusnetwork.get(instance_name)
if isinstance(instance, dict): if isinstance(instance, dict):
return instance return instance
# Try legacy format: config["storage"]["hydrus"]
storage = config.get("storage", {})
if isinstance(storage, dict):
hydrus_config = storage.get("hydrus", {})
if isinstance(hydrus_config, dict):
instance = hydrus_config.get(instance_name)
if isinstance(instance, dict):
return instance
# Fall back to old format: config["HydrusNetwork"]
hydrus_network = config.get("HydrusNetwork")
if not isinstance(hydrus_network, dict):
return None
instance = hydrus_network.get(instance_name)
if isinstance(instance, dict):
return instance
return None return None
def get_hydrus_access_key(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]: def get_hydrus_access_key(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]:
"""Get Hydrus access key for an instance. """Get Hydrus access key for an instance.
Supports multiple formats: Config format:
- Current: config["store"]["hydrusnetwork"][name]["Hydrus-Client-API-Access-Key"] - config["store"]["hydrusnetwork"][name]["API"]
- Legacy: config["storage"]["hydrus"][name]["key"]
- Old: config["HydrusNetwork_Access_Key"]
Args: Args:
config: Configuration dict config: Configuration dict
@@ -84,26 +315,17 @@ def get_hydrus_access_key(config: Dict[str, Any], instance_name: str = "home") -
""" """
instance = get_hydrus_instance(config, instance_name) instance = get_hydrus_instance(config, instance_name)
if instance: if instance:
# Try current format key name key = instance.get("API")
key = instance.get("Hydrus-Client-API-Access-Key") return str(key).strip() if key else None
if key:
return str(key).strip() return None
# Try legacy key name
key = instance.get("key")
if key:
return str(key).strip()
# Fall back to old flat format
key = config.get("HydrusNetwork_Access_Key")
return str(key).strip() if key else None
def get_hydrus_url(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]: def get_hydrus_url(config: Dict[str, Any], instance_name: str = "home") -> Optional[str]:
"""Get Hydrus URL for an instance. """Get Hydrus URL for an instance.
Supports both old flat format and new nested format: Config format:
- Old: config["HydrusNetwork_URL"] or constructed from IP/Port/HTTPS - config["store"]["hydrusnetwork"][name]["URL"]
- New: config["HydrusNetwork"][instance_name]["url"]
Args: Args:
config: Configuration dict config: Configuration dict
@@ -113,15 +335,8 @@ def get_hydrus_url(config: Dict[str, Any], instance_name: str = "home") -> Optio
URL string, or None if not found URL string, or None if not found
""" """
instance = get_hydrus_instance(config, instance_name) instance = get_hydrus_instance(config, instance_name)
url = instance.get("url") if instance else config.get("HydrusNetwork_URL") url = instance.get("URL") if instance else None
if url: # Check if not None and not empty return str(url).strip() if url else None
return str(url).strip()
# Build from IP/Port/HTTPS if not found
host = str(config.get("HydrusNetwork_IP") or "localhost").strip() or "localhost"
port = str(config.get("HydrusNetwork_Port") or "45869").strip()
scheme = "https" if str(config.get("HydrusNetwork_Use_HTTPS") or "").strip().lower() in {"1", "true", "yes", "on"} else "http"
authority = host if not (":" in host and not host.startswith("[")) else f"[{host}]"
return f"{scheme}://{authority}:{port}"
@@ -205,10 +420,10 @@ def get_local_storage_path(config: Dict[str, Any]) -> Optional[Path]:
def get_debrid_api_key(config: Dict[str, Any], service: str = "All-debrid") -> Optional[str]: def get_debrid_api_key(config: Dict[str, Any], service: str = "All-debrid") -> Optional[str]:
"""Get Debrid API key from config. """Get Debrid API key from config.
Supports both formats: Config format:
- New: config["storage"]["debrid"]["All-debrid"] - config["store"]["debrid"][<name>]["api_key"]
- Old: config["Debrid"]["All-debrid"] where <name> is the store name (e.g. "all-debrid")
Args: Args:
config: Configuration dict config: Configuration dict
@@ -217,21 +432,23 @@ def get_debrid_api_key(config: Dict[str, Any], service: str = "All-debrid") -> O
Returns: Returns:
API key string if found, None otherwise API key string if found, None otherwise
""" """
# Try new format first store = config.get("store", {})
storage = config.get("storage", {}) if not isinstance(store, dict):
if isinstance(storage, dict): return None
debrid_config = storage.get("debrid", {})
if isinstance(debrid_config, dict): debrid_config = store.get("debrid", {})
api_key = debrid_config.get(service) if not isinstance(debrid_config, dict):
if api_key: # Check if not None and not empty return None
return str(api_key).strip() if api_key else None
service_key = str(service).strip().lower()
# Fall back to old format entry = debrid_config.get(service_key)
debrid_config = config.get("Debrid", {})
if isinstance(debrid_config, dict): if isinstance(entry, dict):
api_key = debrid_config.get(service) api_key = entry.get("api_key")
if api_key: # Check if not None and not empty return str(api_key).strip() if api_key else None
return str(api_key).strip() if api_key else None
if isinstance(entry, str):
return entry.strip() or None
return None return None
@@ -273,7 +490,7 @@ def get_provider_credentials(config: Dict[str, Any], provider: str) -> Optional[
def resolve_cookies_path(config: Dict[str, Any], script_dir: Optional[Path] = None) -> Optional[Path]: def resolve_cookies_path(config: Dict[str, Any], script_dir: Optional[Path] = None) -> Optional[Path]:
value = config.get("cookies") or config.get("Cookies_Path") value = config.get("cookies")
if value: if value:
candidate = Path(str(value)).expanduser() candidate = Path(str(value)).expanduser()
if candidate.is_file(): if candidate.is_file():
@@ -300,43 +517,18 @@ def load_config(config_dir: Optional[Path] = None, filename: str = DEFAULT_CONFI
if cache_key in _CONFIG_CACHE: if cache_key in _CONFIG_CACHE:
return _CONFIG_CACHE[cache_key] return _CONFIG_CACHE[cache_key]
try: if config_path.suffix.lower() != ".conf":
raw = config_path.read_text(encoding="utf-8") log(f"Unsupported config format: {config_path.name} (only .conf is supported)")
except FileNotFoundError:
# Try alternate filename if default not found
if filename == DEFAULT_CONFIG_FILENAME:
alt_path = base_dir / "downlow.json"
try:
raw = alt_path.read_text(encoding="utf-8")
config_path = alt_path
cache_key = _make_cache_key(config_dir, filename, alt_path)
except FileNotFoundError:
_CONFIG_CACHE[cache_key] = {}
return {}
except OSError as exc:
log(f"Failed to read {alt_path}: {exc}")
_CONFIG_CACHE[cache_key] = {}
return {}
else:
_CONFIG_CACHE[cache_key] = {}
return {}
except OSError as exc:
log(f"Failed to read {config_path}: {exc}")
_CONFIG_CACHE[cache_key] = {} _CONFIG_CACHE[cache_key] = {}
return {} return {}
raw = raw.strip()
if not raw:
_CONFIG_CACHE[cache_key] = {}
return {}
try: try:
data = json.loads(raw) data = _load_conf_config(base_dir, config_path)
except json.JSONDecodeError as exc: except FileNotFoundError:
log(f"Invalid JSON in {config_path}: {exc}")
_CONFIG_CACHE[cache_key] = {} _CONFIG_CACHE[cache_key] = {}
return {} return {}
if not isinstance(data, dict): except OSError as exc:
log(f"Expected object in {config_path}, got {type(data).__name__}") log(f"Failed to read {config_path}: {exc}")
_CONFIG_CACHE[cache_key] = {} _CONFIG_CACHE[cache_key] = {}
return {} return {}
@@ -360,25 +552,12 @@ def save_config(
) -> None: ) -> None:
base_dir = config_dir or SCRIPT_DIR base_dir = config_dir or SCRIPT_DIR
config_path = base_dir / filename config_path = base_dir / filename
# Load existing config to preserve keys that aren't being changed if config_path.suffix.lower() != ".conf":
raise RuntimeError(f"Unsupported config format: {config_path.name} (only .conf is supported)")
try: try:
existing_raw = config_path.read_text(encoding="utf-8") config_path.write_text(_serialize_conf(config), encoding="utf-8")
existing_data = json.loads(existing_raw.strip())
if isinstance(existing_data, dict):
# Merge: existing config as base, then overlay with new config
merged = existing_data.copy()
merged.update(config)
config = merged
except (FileNotFoundError, OSError, json.JSONDecodeError):
# File doesn't exist or is invalid, use provided config as-is
pass
try:
config_path.write_text(
json.dumps(config, ensure_ascii=False, indent=2, sort_keys=True) + "\n",
encoding="utf-8",
)
except OSError as exc: except OSError as exc:
raise RuntimeError(f"Failed to write config to {config_path}: {exc}") from exc raise RuntimeError(f"Failed to write config to {config_path}: {exc}") from exc

View File

@@ -26,67 +26,98 @@ _COOKIES_FILE_PATH: Optional[str] = None
def check_hydrus_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]: def check_hydrus_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
"""Check if Hydrus API is available by pinging it. """Check Hydrus availability by instantiating configured HydrusNetwork stores.
Args: HydrusNetwork now self-checks in its __init__ (it acquires a session key).
config: Application configuration dictionary If construction fails, the store is treated as unavailable and the error
message becomes the failure reason.
Returns:
Tuple of (is_available: bool, reason: Optional[str])
- (True, None) if Hydrus is available
- (False, reason) if Hydrus is unavailable with reason
""" """
try: store_config = config.get("store", {})
from API.HydrusNetwork import is_available as _is_hydrus_available hydrusnetwork = store_config.get("hydrusnetwork", {}) if isinstance(store_config, dict) else {}
logger.info("[Hydrus Health Check] Pinging Hydrus API...") if not isinstance(hydrusnetwork, dict) or not hydrusnetwork:
is_available, reason = _is_hydrus_available(config, use_cache=False) return False, "Not configured"
if is_available: from Store.HydrusNetwork import HydrusNetwork
logger.info("[Hydrus Health Check] Hydrus API is AVAILABLE")
return True, None any_ok = False
else: last_reason: Optional[str] = None
reason_str = f": {reason}" if reason else "" for instance_name, instance_config in hydrusnetwork.items():
logger.warning(f"[Hydrus Health Check] Hydrus API is UNAVAILABLE{reason_str}") if not isinstance(instance_config, dict):
return False, reason continue
url = instance_config.get("URL")
except Exception as e: access_key = instance_config.get("API")
error_msg = str(e) if not url or not access_key:
logger.error(f"[Hydrus Health Check] Error checking Hydrus availability: {error_msg}") last_reason = "Missing credentials"
return False, error_msg continue
try:
HydrusNetwork(NAME=str(instance_name), API=str(access_key), URL=str(url))
any_ok = True
except Exception as exc:
last_reason = str(exc)
if any_ok:
return True, None
return False, last_reason or "No reachable Hydrus instances"
def initialize_hydrus_health_check(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, Optional[str]]: def initialize_hydrus_health_check(config: Dict[str, Any], emit_debug: bool = True) -> Tuple[bool, Optional[str]]:
"""Initialize Hydrus health check at startup.""" """Initialize Hydrus health check at startup."""
global _SERVICE_STATE global _SERVICE_STATE
logger.info("[Startup] Starting Hydrus health check...") logger.info("[Startup] Starting Hydrus health check...")
is_available, reason = check_hydrus_availability(config)
_SERVICE_STATE["hydrus"]["available"] = is_available is_available = False
_SERVICE_STATE["hydrus"]["reason"] = reason reason: Optional[str] = None
_SERVICE_STATE["hydrus"]["complete"] = True
# Track individual Hydrus instances # Track individual Hydrus instances (per-instance construction to capture reasons)
_SERVICE_STATE["hydrusnetwork_stores"] = {}
try: try:
store_config = config.get("store", {}) store_config = config.get("store", {})
hydrusnetwork = store_config.get("hydrusnetwork", {}) hydrusnetwork = store_config.get("hydrusnetwork", {}) if isinstance(store_config, dict) else {}
for instance_name, instance_config in hydrusnetwork.items(): if isinstance(hydrusnetwork, dict):
if isinstance(instance_config, dict): from Store.HydrusNetwork import HydrusNetwork
url = instance_config.get("url") first_error: Optional[str] = None
access_key = instance_config.get("Hydrus-Client-API-Access-Key") for instance_name, instance_config in hydrusnetwork.items():
if url and access_key: if not isinstance(instance_config, dict):
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = { continue
"ok": is_available, url = instance_config.get("URL")
"url": url, access_key = instance_config.get("API")
"detail": reason if not is_available else "Connected"
} if not url or not access_key:
else:
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = { _SERVICE_STATE["hydrusnetwork_stores"][instance_name] = {
"ok": False, "ok": False,
"url": url or "Not configured", "url": url or "Not configured",
"detail": "Missing credentials" "detail": "Missing credentials",
} }
continue
try:
HydrusNetwork(NAME=str(instance_name), API=str(access_key), URL=str(url))
is_available = True
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = {
"ok": True,
"url": str(url),
"detail": "Connected",
}
except Exception as exc:
if first_error is None:
first_error = str(exc)
_SERVICE_STATE["hydrusnetwork_stores"][instance_name] = {
"ok": False,
"url": str(url),
"detail": str(exc),
}
if not is_available:
reason = first_error or "No reachable Hydrus instances"
except Exception as e: except Exception as e:
logger.debug(f"Could not enumerate Hydrus instances: {e}") logger.debug(f"Could not enumerate Hydrus instances: {e}")
is_available = False
reason = str(e)
_SERVICE_STATE["hydrus"]["available"] = is_available
_SERVICE_STATE["hydrus"]["reason"] = reason
_SERVICE_STATE["hydrus"]["complete"] = True
if emit_debug: if emit_debug:
status = 'ENABLED' if is_available else f'DISABLED - {reason or "Connection failed"}' status = 'ENABLED' if is_available else f'DISABLED - {reason or "Connection failed"}'
@@ -97,6 +128,14 @@ def initialize_hydrus_health_check(config: Dict[str, Any], emit_debug: bool = Tr
def check_debrid_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]: def check_debrid_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
"""Check if Debrid API is available.""" """Check if Debrid API is available."""
try: try:
try:
from config import get_debrid_api_key
# Require at least one configured key to consider Debrid configured.
if not get_debrid_api_key(config):
return False, "Not configured"
except Exception:
return False, "Not configured"
from API.HTTP import HTTPClient from API.HTTP import HTTPClient
logger.info("[Debrid Health Check] Pinging Debrid API...") logger.info("[Debrid Health Check] Pinging Debrid API...")
with HTTPClient(timeout=10.0, verify_ssl=True) as client: with HTTPClient(timeout=10.0, verify_ssl=True) as client:
@@ -198,7 +237,7 @@ def check_matrix_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[st
""" """
try: try:
import requests import requests
matrix_conf = config.get('storage', {}).get('matrix', {}) matrix_conf = config.get('provider', {}).get('matrix', {})
homeserver = matrix_conf.get('homeserver') homeserver = matrix_conf.get('homeserver')
access_token = matrix_conf.get('access_token') access_token = matrix_conf.get('access_token')
@@ -483,21 +522,23 @@ def initialize_local_library_scan(config: Dict[str, Any], emit_debug: bool = Tru
return False, f"Scan failed: {e}" return False, f"Scan failed: {e}"
def initialize_cookies_check(emit_debug: bool = True) -> Tuple[bool, str]: def initialize_cookies_check(config: Optional[Dict[str, Any]] = None, emit_debug: bool = True) -> Tuple[bool, str]:
"""Check for cookies.txt in the application root directory. """Resolve cookies file path from config, falling back to cookies.txt in app root.
Returns a tuple of (found, detail_message). Returns a tuple of (found, detail_message).
""" """
global _COOKIES_FILE_PATH global _COOKIES_FILE_PATH
# Assume CLI.py is in the root try:
root_dir = Path(__file__).parent from config import resolve_cookies_path
cookies_path = root_dir / "cookies.txt" cookies_path = resolve_cookies_path(config or {}, script_dir=Path(__file__).parent)
except Exception:
if cookies_path.exists(): cookies_path = None
if cookies_path and cookies_path.exists():
_COOKIES_FILE_PATH = str(cookies_path) _COOKIES_FILE_PATH = str(cookies_path)
if emit_debug: if emit_debug:
debug(f"Cookies: ENABLED - Found cookies.txt", file=sys.stderr) debug(f"Cookies: ENABLED - Found cookies file", file=sys.stderr)
return True, str(cookies_path) return True, str(cookies_path)
else: else:
_COOKIES_FILE_PATH = None _COOKIES_FILE_PATH = None

View File

@@ -97,7 +97,12 @@ class PipeObject:
if file_path_display != "N/A" and len(file_path_display) > 50: if file_path_display != "N/A" and len(file_path_display) > 50:
file_path_display = "..." + file_path_display[-47:] file_path_display = "..." + file_path_display[-47:]
url_display = self.url or "N/A" url_display: Any = self.url or "N/A"
if isinstance(url_display, (list, tuple, set)):
parts = [str(x) for x in url_display if x]
url_display = ", ".join(parts) if parts else "N/A"
elif url_display != "N/A":
url_display = str(url_display)
if url_display != "N/A" and len(url_display) > 48: if url_display != "N/A" and len(url_display) > 48:
url_display = url_display[:45] + "..." url_display = url_display[:45] + "..."

View File

@@ -4,6 +4,33 @@
- Image - Image
- Text - Text
### Configuration
The default config format is `config.conf`.
- Required: `temp` (where temporary/intermediate files are written)
- Optional: any number of stores and provider credentials
- Modular: you can add extra `.conf` fragments in `config.d\*.conf` (they will be merged)
Example `config.conf`:
```ini
temp="C:\\Users\\Admin\\Downloads"
[store=folder]
name="default"
path="C:\\Media Machina"
[store=hydrusnetwork]
name="home"
Hydrus-Client-API-Access-Key="..."
url="http://localhost:45869"
[provider=OpenLibrary]
email="user@example.com"
password="..."
```
### File Store ### File Store
- HydrusNetwork https://github.com/hydrusnetwork/hydrus - HydrusNetwork https://github.com/hydrusnetwork/hydrus
- Local drive (C://mylibrary/path) - Local drive (C://mylibrary/path)

View File

@@ -19,17 +19,12 @@ server and uses it as a remote storage backend through the RemoteStorageBackend.
### On PC: ### On PC:
1. Install requests: pip install requests 1. Install requests: pip install requests
2. Add to config.json: 2. Add to config.conf:
{ [store=remote]
"remote_storages": [ name="phone"
{ url="http://192.168.1.100:5000"
"name": "phone", api_key="mysecretkey"
"url": "http://192.168.1.100:5000", timeout=30
"api_key": "mysecretkey",
"timeout": 30
}
]
}
Note: API key is optional. Works on WiFi or cellular data. Note: API key is optional. Works on WiFi or cellular data.
## USAGE ## USAGE
@@ -497,15 +492,13 @@ def main():
print(f"Health URL: http://{local_ip}:{args.port}/health") print(f"Health URL: http://{local_ip}:{args.port}/health")
print(f"API Key: {'Enabled - ' + ('***' + args.api_key[-4:]) if args.api_key else 'Disabled (no auth)'}") print(f"API Key: {'Enabled - ' + ('***' + args.api_key[-4:]) if args.api_key else 'Disabled (no auth)'}")
print(f"Debug Mode: {args.debug}") print(f"Debug Mode: {args.debug}")
print(f"\n📋 Config for config.json:") print("\n📋 Config for config.conf:")
config_entry = { print("[store=remote]")
"name": "phone", print("name=\"phone\"")
"url": f"http://{local_ip}:{args.port}", print(f"url=\"http://{local_ip}:{args.port}\"")
"timeout": 30
}
if args.api_key: if args.api_key:
config_entry["api_key"] = args.api_key print(f"api_key=\"{args.api_key}\"")
print(json.dumps(config_entry, indent=2)) print("timeout=30")
print(f"\n{'='*70}\n") print(f"\n{'='*70}\n")
try: try: