This commit is contained in:
2026-02-11 18:16:07 -08:00
parent cc715e1fef
commit 1d0de1118b
27 changed files with 1167 additions and 1075 deletions

1
.gitignore vendored
View File

@@ -224,6 +224,7 @@ MPV/ffmpeg/*
Log/
Log/medeia_macina/telegram.session
mpv_logs_with_db.txt
*.session
example.py
test*

View File

@@ -32,122 +32,8 @@ except Exception: # pragma: no cover - optional dependency
logger = logging.getLogger(__name__)
def _resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
"""Return the httpx verify argument, preferring system-aware bundles.
Order of precedence:
1. If verify_ssl is not True (False or path), return it.
2. Respect existing SSL_CERT_FILE env var if present.
3. Prefer `pip_system_certs` if present and it exposes a bundle path.
4. Prefer `certifi_win32`/similar helpers by invoking them and reading certifi.where().
5. Fall back to `certifi.where()` if available.
6. Otherwise, return True to let httpx use system defaults.
"""
if verify_ssl is not True:
return verify_ssl
env_cert = os.environ.get("SSL_CERT_FILE")
if env_cert:
return env_cert
def _try_module_bundle(mod_name: str) -> Optional[str]:
# Prefer checking sys.modules first (helps test injection / monkeypatching)
mod = sys.modules.get(mod_name)
if mod is None:
# Avoid raising ModuleNotFoundError so debuggers and callers aren't interrupted.
# Check for module availability before attempting to import it.
try:
import importlib.util
spec = importlib.util.find_spec(mod_name)
if spec is None:
return None
import importlib
mod = importlib.import_module(mod_name)
except Exception:
# Treat any import/initialization failure as module not available.
return None
# Common APIs that return a bundle path
for attr in ("where", "get_ca_bundle", "bundle_path", "get_bundle_path", "get_bundle"):
fn = getattr(mod, attr, None)
if callable(fn):
try:
res = fn()
if res:
return res
except Exception:
continue
elif isinstance(fn, str) and fn:
return fn
# Some helpers (e.g., certifi_win32) expose an action to merge system certs
for call_attr in ("add_windows_store_certs", "add_system_certs", "merge_system_certs"):
fn = getattr(mod, call_attr, None)
if callable(fn):
try:
fn()
try:
import certifi as _certifi
res = _certifi.where()
if res:
return res
except Exception:
logger.exception("Failed while probing certifi helper inner block")
except Exception:
logger.exception("Failed while invoking cert helper function")
return None
# Prefer helpful modules if available (use safe checks to avoid first-chance import errors)
for mod_name in ("pip_system_certs", "certifi_win32"):
path = _try_module_bundle(mod_name)
if path:
try:
os.environ["SSL_CERT_FILE"] = path
except Exception:
logger.exception("Failed to set SSL_CERT_FILE environment variable")
logger.info(f"SSL_CERT_FILE not set; using bundle from {mod_name}: {path}")
return path
# Fallback to certifi
try:
import certifi # type: ignore
path = certifi.where()
if path:
try:
os.environ["SSL_CERT_FILE"] = path
except Exception:
logger.exception("Failed to set SSL_CERT_FILE environment variable during certifi fallback")
logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}")
return path
except Exception:
logger.exception("Failed to probe certifi for trust bundle")
# Fallback to certifi
try:
import certifi # type: ignore
path = certifi.where()
if path:
try:
os.environ["SSL_CERT_FILE"] = path
except Exception:
logger.exception("Failed to set SSL_CERT_FILE environment variable during certifi fallback")
logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}")
return path
except Exception:
logger.exception("Failed to probe certifi for trust bundle")
return True
def get_requests_verify_value(verify_ssl: bool = True) -> Union[bool, str]:
"""Expose the verified value for reuse outside of HTTPClient (requests sessions)."""
return _resolve_verify_value(verify_ssl)
from API.ssl_certs import resolve_verify_value as _resolve_verify_value
from API.ssl_certs import get_requests_verify_value
# Default configuration
DEFAULT_TIMEOUT = 30.0
@@ -444,13 +330,16 @@ class HTTPClient:
"HTTPClient must be used with context manager (with statement)"
)
# Merge headers
if "headers" in kwargs and kwargs["headers"]:
headers = self._get_headers()
headers.update(kwargs["headers"])
kwargs["headers"] = headers
else:
kwargs["headers"] = self._get_headers()
# Merge headers once per call (do not rebuild for every retry attempt).
merged_headers = self._get_headers()
extra_headers = kwargs.get("headers")
if extra_headers:
try:
merged_headers.update(extra_headers)
except Exception:
# If headers is not a mapping, keep it as-is and let httpx raise.
merged_headers = extra_headers
kwargs["headers"] = merged_headers
last_exception: Exception | None = None

View File

@@ -37,7 +37,7 @@
"(rapidgator\\.net/file/[0-9]{7,8})"
],
"regexp": "((rapidgator\\.net|rg\\.to|rapidgator\\.asia)/file/([0-9a-zA-Z]{32}))|((rapidgator\\.net/file/[0-9]{7,8}))",
"status": false
"status": true
},
"turbobit": {
"name": "turbobit",
@@ -222,20 +222,6 @@
],
"regexp": "(dailyuploads\\.net/[0-9a-zA-Z]{12})"
},
"ddl": {
"name": "ddl",
"type": "premium",
"domains": [
"ddl.to",
"ddownload.com"
],
"regexps": [
"(ddownload\\.com/[0-9a-zA-Z]{12})",
"ddl\\.to/([0-9a-zA-Z]{12})"
],
"regexp": "((ddownload\\.com/[0-9a-zA-Z]{12}))|(ddl\\.to/([0-9a-zA-Z]{12}))",
"status": false
},
"dropapk": {
"name": "dropapk",
"type": "premium",

58
API/httpx_shared.py Normal file
View File

@@ -0,0 +1,58 @@
"""Shared `httpx.Client` helper.
Creating short-lived httpx clients disables connection pooling and costs extra CPU.
This module provides a small singleton client for callers that just need basic
GETs without the full HTTPClient wrapper.
"""
from __future__ import annotations
import threading
from typing import Dict, Optional
import httpx
from API.ssl_certs import resolve_verify_value
_DEFAULT_USER_AGENT = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0 Safari/537.36"
)
_lock = threading.Lock()
_shared_client: Optional[httpx.Client] = None
def get_shared_httpx_client(
*,
timeout: float = 30.0,
verify_ssl: bool = True,
headers: Optional[Dict[str, str]] = None,
) -> httpx.Client:
"""Return a process-wide shared synchronous httpx.Client."""
global _shared_client
if _shared_client is None:
with _lock:
if _shared_client is None:
base_headers = {"User-Agent": _DEFAULT_USER_AGENT}
if headers:
base_headers.update({str(k): str(v) for k, v in headers.items()})
_shared_client = httpx.Client(
timeout=timeout,
verify=resolve_verify_value(verify_ssl),
headers=base_headers,
)
return _shared_client
def close_shared_httpx_client() -> None:
global _shared_client
client = _shared_client
_shared_client = None
if client is not None:
try:
client.close()
except Exception:
pass

68
API/requests_client.py Normal file
View File

@@ -0,0 +1,68 @@
"""Shared `requests` session helper.
Many providers still use `requests` directly. Reusing a Session provides:
- Connection pooling (fewer TCP/TLS handshakes)
- Lower CPU overhead per request
This module intentionally avoids importing the heavy httpx-based stack.
"""
from __future__ import annotations
import threading
from typing import Any, Dict, Optional
import requests
from requests.adapters import HTTPAdapter
from API.ssl_certs import resolve_verify_value
_DEFAULT_USER_AGENT = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0 Safari/537.36"
)
_local = threading.local()
def get_requests_session(
*,
user_agent: str = _DEFAULT_USER_AGENT,
verify_ssl: bool = True,
pool_connections: int = 16,
pool_maxsize: int = 16,
) -> requests.Session:
"""Return a thread-local shared Session configured for pooling."""
session: Optional[requests.Session] = getattr(_local, "session", None)
if session is not None:
return session
session = requests.Session()
session.headers.update({"User-Agent": str(user_agent or _DEFAULT_USER_AGENT)})
# Expand connection pool; keep max_retries=0 to avoid semantic changes.
adapter = HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=0)
session.mount("http://", adapter)
session.mount("https://", adapter)
# Configure verify once.
session.verify = resolve_verify_value(verify_ssl)
_local.session = session
return session
def request(
method: str,
url: str,
*,
params: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
timeout: Optional[float] = None,
**kwargs: Any,
) -> requests.Response:
"""Convenience wrapper around the shared Session."""
sess = get_requests_session()
return sess.request(method, url, params=params, headers=headers, timeout=timeout, **kwargs)

109
API/ssl_certs.py Normal file
View File

@@ -0,0 +1,109 @@
"""SSL certificate bundle resolution helpers.
This module is intentionally lightweight (no httpx import) so it can be used by
providers that still rely on `requests` without paying the import cost of the
full HTTP client stack.
"""
from __future__ import annotations
import logging
import os
import sys
from typing import Optional, Union
logger = logging.getLogger(__name__)
def resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
"""Return the value suitable for `requests`/`httpx` verify parameters.
- If verify_ssl is not True (False or a path-like string), it is returned.
- Respects an existing SSL_CERT_FILE env var.
- Tries optional helpers (`pip_system_certs`, `certifi_win32`).
- Falls back to `certifi.where()`.
- Otherwise returns True.
"""
if verify_ssl is not True:
return verify_ssl
env_cert = os.environ.get("SSL_CERT_FILE")
if env_cert:
return env_cert
def _try_module_bundle(mod_name: str) -> Optional[str]:
mod = sys.modules.get(mod_name)
if mod is None:
try:
import importlib.util
spec = importlib.util.find_spec(mod_name)
if spec is None:
return None
import importlib
mod = importlib.import_module(mod_name)
except Exception:
return None
for attr in ("where", "get_ca_bundle", "bundle_path", "get_bundle_path", "get_bundle"):
fn = getattr(mod, attr, None)
if callable(fn):
try:
res = fn()
if res:
return str(res)
except Exception:
continue
elif isinstance(fn, str) and fn:
return fn
for call_attr in ("add_windows_store_certs", "add_system_certs", "merge_system_certs"):
fn = getattr(mod, call_attr, None)
if callable(fn):
try:
fn()
try:
import certifi as _certifi # type: ignore
res = _certifi.where()
if res:
return str(res)
except Exception:
logger.exception("Failed while probing certifi helper inner block")
except Exception:
logger.exception("Failed while invoking cert helper function")
return None
for mod_name in ("pip_system_certs", "certifi_win32"):
path = _try_module_bundle(mod_name)
if path:
try:
os.environ["SSL_CERT_FILE"] = path
except Exception:
logger.exception("Failed to set SSL_CERT_FILE environment variable")
logger.info(f"SSL_CERT_FILE not set; using bundle from {mod_name}: {path}")
return path
try:
import certifi # type: ignore
path = certifi.where()
if path:
try:
os.environ["SSL_CERT_FILE"] = path
except Exception:
logger.exception("Failed to set SSL_CERT_FILE environment variable during certifi fallback")
logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}")
return str(path)
except Exception:
logger.exception("Failed to probe certifi for trust bundle")
return True
def get_requests_verify_value(verify_ssl: bool = True) -> Union[bool, str]:
"""Backwards-friendly alias for call sites that only care about requests."""
return resolve_verify_value(verify_ssl)

View File

@@ -17,7 +17,8 @@ from API.Tidal import (
stringify,
)
from ProviderCore.base import Provider, SearchResult, parse_inline_query_arguments
from cmdlet._shared import get_field
from SYS.field_access import get_field
from Provider.tidal_manifest import resolve_tidal_manifest_path
from SYS import pipeline as pipeline_context
from SYS.logger import debug, log
@@ -1178,11 +1179,6 @@ class HIFI(Provider):
except Exception:
pass
try:
from cmdlet._shared import resolve_tidal_manifest_path
except Exception:
return None
resolved = resolve_tidal_manifest_path({"full_metadata": md, "path": raw_path, "title": getattr(result, "title", "")})
if not resolved:
return None
@@ -1223,9 +1219,11 @@ class HIFI(Provider):
# As a fallback, try downloading the URL directly if it looks like a file.
try:
import httpx
from API.httpx_shared import get_shared_httpx_client
resp = httpx.get(resolved_text, timeout=float(getattr(self, "api_timeout", 10.0)))
timeout_val = float(getattr(self, "api_timeout", 10.0))
client = get_shared_httpx_client(timeout=timeout_val)
resp = client.get(resolved_text, timeout=timeout_val)
resp.raise_for_status()
content = resp.content
direct_path = output_dir / f"{stem}.bin"

View File

@@ -17,7 +17,8 @@ from API.Tidal import (
stringify,
)
from ProviderCore.base import Provider, SearchResult
from cmdlet._shared import get_field
from SYS.field_access import get_field
from Provider.tidal_manifest import resolve_tidal_manifest_path
from SYS import pipeline as pipeline_context
from SYS.logger import debug, log
@@ -144,6 +145,62 @@ class Tidal(Provider):
meta["view"] = self._get_view(query, filters)
return meta
def postprocess_search_results(
self,
*,
query: str,
results: List[SearchResult],
filters: Optional[Dict[str, Any]] = None,
limit: int = 50,
table_type: str = "",
table_meta: Optional[Dict[str, Any]] = None,
) -> Tuple[List[SearchResult], Optional[str], Optional[Dict[str, Any]]]:
_ = query
_ = filters
_ = table_type
# Provider-specific UX: if an artist search yields exactly one artist row,
# auto-expand directly to albums (preserves historical cmdlet behavior).
try:
view = (table_meta or {}).get("view") if isinstance(table_meta, dict) else None
if str(view or "").strip().lower() != "artist":
return results, None, None
except Exception:
return results, None, None
if not isinstance(results, list) or len(results) != 1:
return results, None, None
artist_res = results[0]
artist_name = str(getattr(artist_res, "title", "") or "").strip()
artist_md = getattr(artist_res, "full_metadata", None)
artist_id: Optional[int] = None
if isinstance(artist_md, dict):
raw_id = artist_md.get("artistId") or artist_md.get("id")
try:
artist_id = int(raw_id) if raw_id is not None else None
except Exception:
artist_id = None
# Use a floor of 200 to keep the expanded album list useful.
want = max(int(limit or 0), 200)
try:
album_results = self._albums_for_artist(
artist_id=artist_id,
artist_name=artist_name,
limit=want,
)
except Exception:
album_results = []
if not album_results:
return results, None, None
meta_out: Dict[str, Any] = dict(table_meta or {}) if isinstance(table_meta, dict) else {}
meta_out["view"] = "album"
return album_results, "tidal.album", meta_out
def __init__(self, config: Optional[Dict[str, Any]] = None) -> None:
super().__init__(config)
self.api_urls = self._resolve_api_urls()
@@ -1304,11 +1361,6 @@ class Tidal(Provider):
except Exception:
pass
try:
from cmdlet._shared import resolve_tidal_manifest_path
except Exception:
return None
resolved = resolve_tidal_manifest_path({"full_metadata": md, "path": raw_path, "title": getattr(result, "title", "")})
if not resolved:
return None
@@ -1349,9 +1401,11 @@ class Tidal(Provider):
# As a fallback, try downloading the URL directly if it looks like a file.
try:
import httpx
from API.httpx_shared import get_shared_httpx_client
resp = httpx.get(resolved_text, timeout=float(getattr(self, "api_timeout", 10.0)))
timeout_val = float(getattr(self, "api_timeout", 10.0))
client = get_shared_httpx_client(timeout=timeout_val)
resp = client.get(resolved_text, timeout=timeout_val)
resp.raise_for_status()
content = resp.content
direct_path = output_dir / f"{stem}.bin"

View File

@@ -514,7 +514,7 @@ class InternetArchive(Provider):
quiet_mode: bool,
) -> Optional[int]:
"""Generic hook for download-file to show a selection table for IA items."""
from cmdlet._shared import get_field as sh_get_field
from SYS.field_access import get_field as sh_get_field
return maybe_show_formats_table(
raw_urls=[url] if url else [],
piped_items=[item] if item else [],

View File

@@ -4,6 +4,8 @@ import html as html_std
import logging
import re
import requests
from API.requests_client import get_requests_session
import sys
import time
from pathlib import Path
@@ -294,7 +296,7 @@ def _enrich_book_tags_from_isbn(isbn: str,
# 1) OpenLibrary API lookup by ISBN (short timeout, silent failure).
try:
url = f"https://openlibrary.org/api/books?bibkeys=ISBN:{isbn_clean}&jscmd=data&format=json"
resp = requests.get(url, timeout=4)
resp = get_requests_session().get(url, timeout=4)
resp.raise_for_status()
data = resp.json()
if isinstance(data, dict) and data:
@@ -407,14 +409,11 @@ def _fetch_libgen_details_html(
try:
if timeout is None:
timeout = (DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT)
session = requests.Session()
session.headers.update(
{
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0 Safari/537.36",
}
)
with session.get(str(url), stream=True, timeout=timeout) as resp:
session = get_requests_session()
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0 Safari/537.36",
}
with session.get(str(url), stream=True, timeout=timeout, headers=headers) as resp:
resp.raise_for_status()
ct = str(resp.headers.get("Content-Type", "")).lower()
if "text/html" not in ct:
@@ -1111,13 +1110,15 @@ class LibgenSearch:
"""Robust LibGen searcher."""
def __init__(self, session: Optional[requests.Session] = None):
self.session = session or requests.Session()
self.session.headers.update(
{
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
}
)
self.session = session or get_requests_session()
# Ensure a modern browser UA is present without clobbering existing one.
if not any(k.lower() == "user-agent" for k in (self.session.headers or {})):
self.session.headers.update(
{
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
}
)
def _search_libgen_json(
self,
@@ -1901,7 +1902,7 @@ def download_from_mirror(
) -> Tuple[bool,
Optional[Path]]:
"""Download file from a LibGen mirror URL with optional progress tracking."""
session = session or requests.Session()
session = session or get_requests_session()
# Ensure a modern browser User-Agent is used for downloads to avoid mirror blocks.
if not any(
k.lower() == "user-agent"

View File

@@ -8,7 +8,7 @@ from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Tuple
from urllib.parse import quote
import requests
from API.requests_client import get_requests_session
from ProviderCore.base import Provider, SearchResult
from SYS.provider_helpers import TableProviderMixin
@@ -189,7 +189,7 @@ def _matrix_health_check(*,
if not base:
return False, "Matrix homeserver missing"
resp = requests.get(f"{base}/_matrix/client/versions", timeout=5)
resp = get_requests_session().get(f"{base}/_matrix/client/versions", timeout=5)
if resp.status_code != 200:
return False, f"Homeserver returned {resp.status_code}"
@@ -197,7 +197,7 @@ def _matrix_health_check(*,
headers = {
"Authorization": f"Bearer {access_token}"
}
resp = requests.get(
resp = get_requests_session().get(
f"{base}/_matrix/client/v3/account/whoami",
headers=headers,
timeout=5
@@ -234,6 +234,8 @@ class Matrix(TableProviderMixin, Provider):
4. Selection triggers upload of pending files to selected rooms
"""
EXPOSE_AS_FILE_PROVIDER = False
@classmethod
def config_schema(cls) -> List[Dict[str, Any]]:
return [
@@ -388,7 +390,7 @@ class Matrix(TableProviderMixin, Provider):
headers = {
"Authorization": f"Bearer {token}"
}
resp = requests.get(
resp = get_requests_session().get(
f"{base}/_matrix/client/v3/joined_rooms",
headers=headers,
timeout=10
@@ -442,7 +444,7 @@ class Matrix(TableProviderMixin, Provider):
# Best-effort room name lookup (safe to fail).
try:
encoded = quote(room_id, safe="")
name_resp = requests.get(
name_resp = get_requests_session().get(
f"{base}/_matrix/client/v3/rooms/{encoded}/state/m.room.name",
headers=headers,
timeout=5,
@@ -491,7 +493,7 @@ class Matrix(TableProviderMixin, Provider):
total_bytes=int(path.stat().st_size),
label="upload"
)
resp = requests.post(
resp = get_requests_session().post(
upload_url,
headers=headers,
data=wrapped,
@@ -539,7 +541,7 @@ class Matrix(TableProviderMixin, Provider):
send_headers = {
"Authorization": f"Bearer {token}"
}
send_resp = requests.put(send_url, headers=send_headers, json=payload)
send_resp = get_requests_session().put(send_url, headers=send_headers, json=payload)
if send_resp.status_code != 200:
raise Exception(f"Matrix send message failed: {send_resp.text}")
@@ -588,7 +590,7 @@ class Matrix(TableProviderMixin, Provider):
"msgtype": "m.text",
"body": message
}
send_resp = requests.put(send_url, headers=send_headers, json=payload)
send_resp = get_requests_session().put(send_url, headers=send_headers, json=payload)
if send_resp.status_code != 200:
raise Exception(f"Matrix send text failed: {send_resp.text}")

View File

@@ -4,12 +4,12 @@ from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, cast
import html as html_std
import re
import requests
import sys
import json
import subprocess
from API.HTTP import HTTPClient
from API.requests_client import get_requests_session
from ProviderCore.base import SearchResult
try:
from Provider.Tidal import Tidal
@@ -86,7 +86,7 @@ class ITunesProvider(MetadataProvider):
"limit": limit
}
try:
resp = requests.get(
resp = get_requests_session().get(
"https://itunes.apple.com/search",
params=params,
timeout=10
@@ -137,7 +137,7 @@ class OpenLibraryMetadataProvider(MetadataProvider):
else:
q = query_clean
resp = requests.get(
resp = get_requests_session().get(
"https://openlibrary.org/search.json",
params={
"q": q,
@@ -243,7 +243,7 @@ class GoogleBooksMetadataProvider(MetadataProvider):
q = query_clean
try:
resp = requests.get(
resp = get_requests_session().get(
"https://www.googleapis.com/books/v1/volumes",
params={
"q": q,
@@ -369,7 +369,7 @@ class ISBNsearchMetadataProvider(MetadataProvider):
url = f"https://isbnsearch.org/isbn/{isbn}"
try:
resp = requests.get(url, timeout=10)
resp = get_requests_session().get(url, timeout=10)
resp.raise_for_status()
html = str(resp.text or "")
if not html:
@@ -1059,7 +1059,10 @@ def fetch_archive_item_metadata(archive_id: str,
ident = str(archive_id or "").strip()
if not ident:
return {}
resp = requests.get(f"https://archive.org/metadata/{ident}", timeout=int(timeout))
resp = get_requests_session().get(
f"https://archive.org/metadata/{ident}",
timeout=int(timeout),
)
resp.raise_for_status()
data = resp.json() if resp is not None else {}
if not isinstance(data, dict):

View File

@@ -16,7 +16,8 @@ from urllib.parse import urlparse
import requests
from API.HTTP import HTTPClient, get_requests_verify_value
from API.HTTP import HTTPClient
from API.requests_client import get_requests_session
from ProviderCore.base import Provider, SearchResult
from SYS.utils import sanitize_filename
from SYS.cli_syntax import get_field, get_free_text, parse_query
@@ -27,8 +28,6 @@ from Provider.metadata_provider import (
)
from SYS.utils import unique_path
# Resolve lazily to avoid import-time module checks (prevents debugger first-chance noise)
_ARCHIVE_VERIFY_VALUE = None # will be resolved on first session creation
_DEFAULT_ARCHIVE_SCALE = 4
_QUALITY_TO_ARCHIVE_SCALE = {
"high": 2,
@@ -38,12 +37,7 @@ _QUALITY_TO_ARCHIVE_SCALE = {
def _create_archive_session() -> requests.Session:
session = requests.Session()
global _ARCHIVE_VERIFY_VALUE
if _ARCHIVE_VERIFY_VALUE is None:
_ARCHIVE_VERIFY_VALUE = get_requests_verify_value()
session.verify = _ARCHIVE_VERIFY_VALUE
return session
return get_requests_session()
try:
from Crypto.Cipher import AES # type: ignore
@@ -590,10 +584,9 @@ class OpenLibrary(Provider):
if not ident:
return False, "no-archive-id"
try:
resp = requests.get(
resp = get_requests_session().get(
f"https://archive.org/metadata/{ident}",
timeout=8,
verify=_ARCHIVE_VERIFY_VALUE,
)
resp.raise_for_status()
data = resp.json() if resp is not None else {}
@@ -804,10 +797,9 @@ class OpenLibrary(Provider):
"""Check for a directly downloadable original PDF in Archive.org metadata."""
try:
metadata_url = f"https://archive.org/metadata/{book_id}"
response = requests.get(
response = get_requests_session().get(
metadata_url,
timeout=6,
verify=_ARCHIVE_VERIFY_VALUE,
)
response.raise_for_status()
metadata = response.json()
@@ -822,11 +814,10 @@ class OpenLibrary(Provider):
pdf_url = (
f"https://archive.org/download/{book_id}/{filename.replace(' ', '%20')}"
)
check_response = requests.head(
check_response = get_requests_session().head(
pdf_url,
timeout=4,
allow_redirects=True,
verify=_ARCHIVE_VERIFY_VALUE,
)
if check_response.status_code == 200:
return True, pdf_url

284
Provider/tidal_manifest.py Normal file
View File

@@ -0,0 +1,284 @@
"""Tidal/HIFI manifest helpers.
This module intentionally lives with the provider code (not cmdlets).
It contains best-effort helpers for turning proxy-provided Tidal "manifest"
values into a playable input reference:
- A local MPD file path (persisted to temp)
- Or a direct URL (when the manifest is JSON with `urls`)
Callers may pass either a SearchResult-like object (with `.full_metadata`) or
pipeline dicts.
"""
from __future__ import annotations
import base64
import hashlib
import json
import re
import sys
import tempfile
from pathlib import Path
from typing import Any, Dict, Optional
from API.httpx_shared import get_shared_httpx_client
from SYS.logger import log
def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
"""Persist the Tidal manifest (MPD) and return a local path or URL.
Resolution order:
1) `_tidal_manifest_path` (existing local file)
2) `_tidal_manifest_url` (existing remote URL)
3) decode `manifest` and:
- if JSON with `urls`: return the first URL
- if MPD XML: persist under `%TEMP%/medeia/tidal/` and return path
If `manifest` is missing but a track id exists, the function will attempt a
best-effort fetch from the public proxy endpoints to populate `manifest`.
"""
metadata: Any = None
if isinstance(item, dict):
metadata = item.get("full_metadata") or item.get("metadata")
else:
metadata = getattr(item, "full_metadata", None) or getattr(item, "metadata", None)
if not isinstance(metadata, dict):
return None
existing_path = metadata.get("_tidal_manifest_path")
if existing_path:
try:
resolved = Path(str(existing_path))
if resolved.is_file():
return str(resolved)
except Exception:
pass
existing_url = metadata.get("_tidal_manifest_url")
if existing_url and isinstance(existing_url, str):
candidate = existing_url.strip()
if candidate:
return candidate
raw_manifest = metadata.get("manifest")
if not raw_manifest:
_maybe_fetch_track_manifest(item, metadata)
raw_manifest = metadata.get("manifest")
if not raw_manifest:
return None
manifest_str = "".join(str(raw_manifest or "").split())
if not manifest_str:
return None
manifest_bytes: bytes
try:
manifest_bytes = base64.b64decode(manifest_str, validate=True)
except Exception:
try:
manifest_bytes = base64.b64decode(manifest_str, validate=False)
except Exception:
try:
manifest_bytes = manifest_str.encode("utf-8")
except Exception:
return None
if not manifest_bytes:
return None
head = (manifest_bytes[:1024] or b"").lstrip()
if head.startswith((b"{", b"[")):
return _resolve_json_manifest_urls(metadata, manifest_bytes)
looks_like_mpd = head.startswith((b"<?xml", b"<MPD")) or (b"<MPD" in head)
if not looks_like_mpd:
manifest_mime = str(metadata.get("manifestMimeType") or "").strip().lower()
try:
metadata["_tidal_manifest_error"] = (
f"Decoded manifest is not an MPD XML (mime: {manifest_mime or 'unknown'})"
)
except Exception:
pass
try:
log(
f"[tidal] Decoded manifest is not an MPD XML for track {metadata.get('trackId') or metadata.get('id')} (mime {manifest_mime or 'unknown'})",
file=sys.stderr,
)
except Exception:
pass
return None
return _persist_mpd_bytes(item, metadata, manifest_bytes)
def _maybe_fetch_track_manifest(item: Any, metadata: Dict[str, Any]) -> None:
"""If we only have a track id, fetch details from the proxy to populate `manifest`."""
try:
already = bool(metadata.get("_tidal_track_details_fetched"))
except Exception:
already = False
track_id = metadata.get("trackId") or metadata.get("id")
if track_id is None:
try:
if isinstance(item, dict):
candidate_path = item.get("path") or item.get("url")
else:
candidate_path = getattr(item, "path", None) or getattr(item, "url", None)
except Exception:
candidate_path = None
if candidate_path:
m = re.search(
r"(tidal|hifi):(?://)?track[\\/](\d+)",
str(candidate_path),
flags=re.IGNORECASE,
)
if m:
track_id = m.group(2)
if already or track_id is None:
return
try:
track_int = int(track_id)
except Exception:
track_int = None
if not track_int or track_int <= 0:
return
try:
client = get_shared_httpx_client()
resp = client.get(
"https://tidal-api.binimum.org/track/",
params={"id": str(track_int)},
timeout=10.0,
)
resp.raise_for_status()
payload = resp.json()
data = payload.get("data") if isinstance(payload, dict) else None
if isinstance(data, dict) and data:
try:
metadata.update(data)
except Exception:
pass
try:
metadata["_tidal_track_details_fetched"] = True
except Exception:
pass
if not metadata.get("url"):
try:
resp_info = client.get(
"https://tidal-api.binimum.org/info/",
params={"id": str(track_int)},
timeout=10.0,
)
resp_info.raise_for_status()
info_payload = resp_info.json()
info_data = info_payload.get("data") if isinstance(info_payload, dict) else None
if isinstance(info_data, dict) and info_data:
try:
for k, v in info_data.items():
if k not in metadata:
metadata[k] = v
except Exception:
pass
try:
if info_data.get("url"):
metadata["url"] = info_data.get("url")
except Exception:
pass
except Exception:
pass
except Exception:
return
def _resolve_json_manifest_urls(metadata: Dict[str, Any], manifest_bytes: bytes) -> Optional[str]:
try:
text = manifest_bytes.decode("utf-8", errors="ignore")
payload = json.loads(text)
urls = payload.get("urls") or []
selected_url = None
for candidate in urls:
if isinstance(candidate, str):
candidate = candidate.strip()
if candidate:
selected_url = candidate
break
if selected_url:
try:
metadata["_tidal_manifest_url"] = selected_url
except Exception:
pass
return selected_url
try:
metadata["_tidal_manifest_error"] = "JSON manifest contained no urls"
except Exception:
pass
log(
f"[tidal] JSON manifest for track {metadata.get('trackId') or metadata.get('id')} had no playable urls",
file=sys.stderr,
)
except Exception as exc:
try:
metadata["_tidal_manifest_error"] = f"Failed to parse JSON manifest: {exc}"
except Exception:
pass
log(
f"[tidal] Failed to parse JSON manifest for track {metadata.get('trackId') or metadata.get('id')}: {exc}",
file=sys.stderr,
)
return None
def _persist_mpd_bytes(item: Any, metadata: Dict[str, Any], manifest_bytes: bytes) -> Optional[str]:
manifest_hash = str(metadata.get("manifestHash") or "").strip()
track_id = metadata.get("trackId") or metadata.get("id")
identifier = manifest_hash or hashlib.sha256(manifest_bytes).hexdigest()
identifier_safe = re.sub(r"[^A-Za-z0-9_-]+", "_", identifier)[:64]
if not identifier_safe:
identifier_safe = hashlib.sha256(manifest_bytes).hexdigest()[:12]
track_safe = "tidal"
if track_id is not None:
track_safe = re.sub(r"[^A-Za-z0-9_-]+", "_", str(track_id))[:32] or "tidal"
manifest_dir = Path(tempfile.gettempdir()) / "medeia" / "tidal"
try:
manifest_dir.mkdir(parents=True, exist_ok=True)
except Exception:
pass
filename = f"tidal-{track_safe}-{identifier_safe[:24]}.mpd"
target_path = manifest_dir / filename
try:
with open(target_path, "wb") as fh:
fh.write(manifest_bytes)
metadata["_tidal_manifest_path"] = str(target_path)
# Best-effort: propagate back into the caller object/dict.
if isinstance(item, dict):
if item.get("full_metadata") is metadata:
item["full_metadata"] = metadata
elif item.get("metadata") is metadata:
item["metadata"] = metadata
else:
extra = getattr(item, "extra", None)
if isinstance(extra, dict):
extra["_tidal_manifest_path"] = str(target_path)
return str(target_path)
except Exception:
return None

View File

@@ -6,6 +6,8 @@ from dataclasses import dataclass
from typing import Any, Dict, List, Optional
import requests
from API.requests_client import get_requests_session
from ProviderCore.base import Provider, SearchResult
from SYS.logger import debug, log
try: # Preferred HTML parser
@@ -66,7 +68,7 @@ class Scraper:
def _get_page(self, page: int) -> List[TorrentInfo]:
url, payload = self._request_data(page)
try:
resp = requests.get(
resp = get_requests_session().get(
url,
params=payload,
headers=self.headers,
@@ -86,7 +88,7 @@ class Scraper:
def _parse_detail(self, url: str) -> Optional[str]: # optional override
try:
resp = requests.get(url, headers=self.headers, timeout=self.timeout)
resp = get_requests_session().get(url, headers=self.headers, timeout=self.timeout)
resp.raise_for_status()
return self._parse_detail_response(resp)
except Exception:

View File

@@ -141,6 +141,10 @@ class Provider(ABC):
# Used for dynamically generating config panels (e.g., missing credentials).
REQUIRED_CONFIG_KEYS: Sequence[str] = ()
# Some providers implement `upload()` but are not intended to be used as
# generic "file host" providers via `add-file -provider ...`.
EXPOSE_AS_FILE_PROVIDER: bool = True
def __init__(self, config: Optional[Dict[str, Any]] = None):
self.config = config or {}
# Prioritize explicit NAME property for the instance name
@@ -233,6 +237,35 @@ class Provider(ABC):
normalized = str(query or "").strip()
return normalized, {}
def postprocess_search_results(
self,
*,
query: str,
results: List[SearchResult],
filters: Optional[Dict[str, Any]] = None,
limit: int = 50,
table_type: str = "",
table_meta: Optional[Dict[str, Any]] = None,
) -> Tuple[List[SearchResult], Optional[str], Optional[Dict[str, Any]]]:
"""Optional hook for provider-specific result transforms.
Cmdlets should avoid hardcoding provider quirks. Providers can override
this to:
- expand/replace result sets (e.g., artist -> albums)
- override the table type
- override table metadata
Returns:
(results, table_type_override, table_meta_override)
"""
_ = query
_ = filters
_ = limit
_ = table_type
_ = table_meta
return results, None, None
# Standard lifecycle/auth hook.
def login(self, **_kwargs: Any) -> bool:
return True

View File

@@ -7,6 +7,7 @@ for those plugins.
from __future__ import annotations
from functools import lru_cache
import importlib
import pkgutil
import sys
@@ -18,7 +19,18 @@ from urllib.parse import urlparse
from SYS.logger import log, debug
from ProviderCore.base import FileProvider, Provider, SearchProvider, SearchResult
from Provider.soulseek import download_soulseek_file
def download_soulseek_file(*args: Any, **kwargs: Any) -> Any:
"""Lazy proxy for the soulseek downloader.
Importing the provider modules can be expensive; keeping this lazy avoids
paying that cost at registry import time.
"""
from Provider.soulseek import download_soulseek_file as _download
return _download(*args, **kwargs)
@dataclass(frozen=True)
@@ -36,7 +48,11 @@ class ProviderInfo:
@property
def supports_upload(self) -> bool:
return self.provider_class.upload is not Provider.upload
try:
exposed = bool(getattr(self.provider_class, "EXPOSE_AS_FILE_PROVIDER", True))
except Exception:
exposed = True
return exposed and (self.provider_class.upload is not Provider.upload)
class ProviderRegistry:
@@ -136,8 +152,8 @@ class ProviderRegistry:
return
self._modules.add(module_name)
for attr in dir(module):
candidate = getattr(module, attr)
# Iterate module dict directly (faster than dir()+getattr()).
for candidate in vars(module).values():
if not isinstance(candidate, type):
continue
if not issubclass(candidate, Provider):
@@ -182,11 +198,64 @@ class ProviderRegistry:
continue
self._register_module(module)
# Pick up any Provider subclasses loaded via other mechanisms.
self._sync_subclasses()
def _try_import_for_name(self, normalized_name: str) -> None:
"""Best-effort import for a single provider module.
This avoids importing every provider module when the caller only needs
one provider (common for CLI usage).
"""
name = str(normalized_name or "").strip().lower()
if not name or not self.package_name:
return
# Keep behavior consistent with full discovery (which skips hifi).
if name == "hifi":
return
candidates: List[str] = [name]
if "-" in name:
candidates.append(name.replace("-", "_"))
if "." in name:
candidates.append(name.split(".", 1)[0])
for mod_name in candidates:
if not mod_name:
continue
module_path = f"{self.package_name}.{mod_name}"
if module_path in self._modules:
continue
try:
module = importlib.import_module(module_path)
except Exception:
continue
self._register_module(module)
# Pick up subclasses in case the module registers indirectly.
self._sync_subclasses()
return
def get(self, name: str) -> Optional[ProviderInfo]:
self.discover()
if not name:
return None
return self._lookup.get(self._normalize(name))
normalized = self._normalize(name)
info = self._lookup.get(normalized)
if info is not None:
return info
# If we haven't done a full discovery yet, try importing just the
# module that matches the requested name.
if not self._discovered:
self._try_import_for_name(normalized)
info = self._lookup.get(normalized)
if info is not None:
return info
# Fall back to full package scan.
self.discover()
return self._lookup.get(normalized)
def iter_providers(self) -> Iterable[ProviderInfo]:
self.discover()
@@ -210,8 +279,14 @@ class ProviderRegistry:
_walk(Provider)
REGISTRY = ProviderRegistry("Provider")
REGISTRY.discover()
REGISTRY._sync_subclasses()
@lru_cache(maxsize=512)
def _provider_url_patterns(provider_class: Type[Provider]) -> Sequence[str]:
try:
return list(provider_class.url_patterns())
except Exception:
return []
def register_provider(
@@ -268,14 +343,67 @@ def _supports_search(provider: Provider) -> bool:
def _supports_upload(provider: Provider) -> bool:
return provider.__class__.upload is not Provider.upload
def _provider_url_patterns(provider_class: Type[Provider]) -> Sequence[str]:
try:
return list(provider_class.url_patterns())
exposed = bool(getattr(provider.__class__, "EXPOSE_AS_FILE_PROVIDER", True))
except Exception:
return []
exposed = True
return exposed and (provider.__class__.upload is not Provider.upload)
def _normalize_choice_entry(entry: Any) -> Optional[Dict[str, Any]]:
if entry is None:
return None
if isinstance(entry, dict):
value = entry.get("value")
text = entry.get("text") or entry.get("label") or value
aliases = entry.get("alias") or entry.get("aliases") or []
value_str = str(value) if value is not None else (str(text) if text is not None else None)
text_str = str(text) if text is not None else value_str
if not value_str or not text_str:
return None
alias_list = [str(a) for a in aliases if a is not None]
return {"value": value_str, "text": text_str, "aliases": alias_list}
return {"value": str(entry), "text": str(entry), "aliases": []}
def _collect_inline_choice_mapping(provider: Provider) -> Dict[str, List[Dict[str, Any]]]:
mapping: Dict[str, List[Dict[str, Any]]] = {}
base = getattr(provider, "QUERY_ARG_CHOICES", None)
if not isinstance(base, dict):
base = getattr(provider, "INLINE_QUERY_FIELD_CHOICES", None)
def _merge_from(obj: Any) -> None:
if not isinstance(obj, dict):
return
for key, value in obj.items():
normalized: List[Dict[str, Any]] = []
seq = value
try:
if callable(seq):
seq = seq()
except Exception:
seq = value
if isinstance(seq, dict):
seq = seq.get("choices") or seq.get("values") or seq
if isinstance(seq, (list, tuple, set)):
for entry in seq:
n = _normalize_choice_entry(entry)
if n:
normalized.append(n)
if normalized:
mapping[str(key).strip().lower()] = normalized
_merge_from(base)
try:
fn = getattr(provider, "inline_query_field_choices", None)
if callable(fn):
_merge_from(fn())
except Exception:
pass
return mapping
def get_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[Provider]:
@@ -422,7 +550,6 @@ def provider_inline_query_choices(
Providers can expose a mapping via ``QUERY_ARG_CHOICES`` (preferred) or
``INLINE_QUERY_FIELD_CHOICES`` / ``inline_query_field_choices()``. The helper
keeps completion logic simple and reusable.
This helper keeps completion logic simple and reusable.
"""
pname = str(provider_name or "").strip().lower()
@@ -436,73 +563,8 @@ def provider_inline_query_choices(
if provider is None:
return []
def _normalize_choice_entry(entry: Any) -> Optional[Dict[str, Any]]:
if entry is None:
return None
if isinstance(entry, dict):
value = entry.get("value")
text = entry.get("text") or entry.get("label") or value
aliases = entry.get("alias") or entry.get("aliases") or []
value_str = str(value) if value is not None else (str(text) if text is not None else None)
text_str = str(text) if text is not None else value_str
if not value_str or not text_str:
return None
alias_list = [str(a) for a in aliases if a is not None]
return {"value": value_str, "text": text_str, "aliases": alias_list}
# string/other primitives
return {"value": str(entry), "text": str(entry), "aliases": []}
def _collect_mapping(p) -> Dict[str, List[Dict[str, Any]]]:
mapping: Dict[str, List[Dict[str, Any]]] = {}
base = getattr(p, "QUERY_ARG_CHOICES", None)
if not isinstance(base, dict):
base = getattr(p, "INLINE_QUERY_FIELD_CHOICES", None)
if isinstance(base, dict):
for k, v in base.items():
normalized: List[Dict[str, Any]] = []
seq = v
try:
if callable(seq):
seq = seq()
except Exception:
seq = v
if isinstance(seq, dict):
seq = seq.get("choices") or seq.get("values") or seq
if isinstance(seq, (list, tuple, set)):
for entry in seq:
n = _normalize_choice_entry(entry)
if n:
normalized.append(n)
if normalized:
mapping[str(k).strip().lower()] = normalized
try:
fn = getattr(p, "inline_query_field_choices", None)
if callable(fn):
extra = fn()
if isinstance(extra, dict):
for k, v in extra.items():
normalized: List[Dict[str, Any]] = []
seq = v
try:
if callable(seq):
seq = seq()
except Exception:
seq = v
if isinstance(seq, dict):
seq = seq.get("choices") or seq.get("values") or seq
if isinstance(seq, (list, tuple, set)):
for entry in seq:
n = _normalize_choice_entry(entry)
if n:
normalized.append(n)
if normalized:
mapping[str(k).strip().lower()] = normalized
except Exception:
pass
return mapping
try:
mapping = _collect_mapping(provider)
mapping = _collect_inline_choice_mapping(provider)
if not mapping:
return []
@@ -556,7 +618,7 @@ def resolve_inline_filters(
if not inline_args:
return filters
mapping = _collect_mapping(provider)
mapping = _collect_inline_choice_mapping(provider)
transforms = field_transforms or {}
for raw_key, raw_val in inline_args.items():

37
SYS/field_access.py Normal file
View File

@@ -0,0 +1,37 @@
"""Lightweight helpers for accessing fields on mixed pipeline objects.
This intentionally avoids importing cmdlet modules so it can be used from
providers and core pipeline code without pulling in the full cmdlet stack.
"""
from __future__ import annotations
from typing import Any, Optional
def get_field(obj: Any, field: str, default: Optional[Any] = None) -> Any:
"""Extract a field from either a dict or object with fallback default.
- Supports dict.get(field)
- Supports getattr(obj, field)
- If obj is a list, uses the first element
- If obj has `.extra` dict (PipeObject pattern), also checks extra[field]
"""
if isinstance(obj, list):
if not obj:
return default
obj = obj[0]
if isinstance(obj, dict):
return obj.get(field, default)
value = getattr(obj, field, None)
if value is not None:
return value
extra_val = getattr(obj, "extra", None)
if isinstance(extra_val, dict):
return extra_val.get(field, default)
return default

265
SYS/pipe_object.py Normal file
View File

@@ -0,0 +1,265 @@
from __future__ import annotations
from typing import Any, Dict, Optional
from SYS import models
def coerce_to_pipe_object(
value: Any,
default_path: Optional[str] = None,
) -> models.PipeObject:
"""Normalize any incoming result to a PipeObject for single-source-of-truth state.
Uses hash+store canonical pattern.
"""
# Debug: Print ResultItem details if coming from search_file.py
try:
from SYS.logger import is_debug_enabled, debug
if (
is_debug_enabled()
and hasattr(value, "__class__")
and value.__class__.__name__ == "ResultItem"
):
debug("[ResultItem -> PipeObject conversion]")
debug(f" title={getattr(value, 'title', None)}")
debug(f" target={getattr(value, 'target', None)}")
debug(f" hash={getattr(value, 'hash', None)}")
debug(f" media_kind={getattr(value, 'media_kind', None)}")
debug(f" tag={getattr(value, 'tag', None)}")
debug(f" tag_summary={getattr(value, 'tag_summary', None)}")
debug(f" size_bytes={getattr(value, 'size_bytes', None)}")
debug(f" duration_seconds={getattr(value, 'duration_seconds', None)}")
debug(f" relationships={getattr(value, 'relationships', None)}")
debug(f" url={getattr(value, 'url', None)}")
debug(
f" full_metadata keys={list(getattr(value, 'full_metadata', {}).keys()) if hasattr(value, 'full_metadata') and value.full_metadata else []}"
)
except Exception:
pass
if isinstance(value, models.PipeObject):
return value
known_keys = {
"hash",
"store",
"tag",
"title",
"url",
"source_url",
"duration",
"metadata",
"warnings",
"path",
"relationships",
"is_temp",
"action",
"parent_hash",
}
# Convert common object-like results into a dict so we can preserve fields like
# hash/store/url when they come from result tables (e.g., get-url emits UrlItem).
#
# Priority:
# 1) explicit to_dict()
# 2) best-effort attribute extraction for known PipeObject-ish fields
if hasattr(value, "to_dict"):
value = value.to_dict()
elif not isinstance(value, dict):
try:
obj_map: Dict[str, Any] = {}
for k in (
"hash",
"store",
"provider",
"prov",
"tag",
"title",
"url",
"source_url",
"duration",
"duration_seconds",
"metadata",
"full_metadata",
"warnings",
"path",
"target",
"relationships",
"is_temp",
"action",
"parent_hash",
"extra",
"media_kind",
):
if hasattr(value, k):
obj_map[k] = getattr(value, k)
if obj_map:
value = obj_map
except Exception:
pass
if isinstance(value, dict):
# Extract hash and store (canonical identifiers)
hash_val = value.get("hash")
store_val = value.get("store") or "PATH"
if not store_val or store_val == "PATH":
try:
extra_store = value.get("extra", {}).get("store")
except Exception:
extra_store = None
if extra_store:
store_val = extra_store
# If no hash, try to compute from path or use placeholder
if not hash_val:
path_val = value.get("path")
if path_val:
try:
from pathlib import Path
from SYS.utils import sha256_file
hash_val = sha256_file(Path(path_val))
except Exception:
hash_val = "unknown"
else:
hash_val = "unknown"
# Extract title from filename if not provided
title_val = value.get("title")
if not title_val:
path_val = value.get("path")
if path_val:
try:
from pathlib import Path
title_val = Path(path_val).stem
except Exception:
pass
extra = {k: v for k, v in value.items() if k not in known_keys}
# Extract URL: prefer direct url field, then url list
from SYS.metadata import normalize_urls
url_list = normalize_urls(value.get("url"))
url_val = url_list[0] if url_list else None
if len(url_list) > 1:
extra["url"] = url_list
# Extract relationships
rels = value.get("relationships") or {}
# Canonical tag: accept list or single string
tag_val: list[str] = []
if "tag" in value:
raw_tag = value["tag"]
if isinstance(raw_tag, list):
tag_val = [str(t) for t in raw_tag if t is not None]
elif isinstance(raw_tag, str):
tag_val = [raw_tag]
# Consolidate path: prefer explicit path key, but NOT target if it's a URL
path_val = value.get("path")
# Only use target as path if it's not a URL (url should stay in url field)
if not path_val and "target" in value:
target = value["target"]
if target and not (
isinstance(target, str)
and (target.startswith("http://") or target.startswith("https://"))
):
path_val = target
# If the path value is actually a URL, move it to url_val and clear path_val
try:
if isinstance(path_val, str) and (
path_val.startswith("http://") or path_val.startswith("https://")
):
# Prefer existing url_val if present, otherwise move path_val into url_val
if not url_val:
url_val = path_val
path_val = None
except Exception:
pass
# Extract media_kind if available
if "media_kind" in value:
extra["media_kind"] = value["media_kind"]
pipe_obj = models.PipeObject(
hash=hash_val,
store=store_val,
provider=str(
value.get("provider")
or value.get("prov")
or value.get("source")
or extra.get("provider")
or extra.get("source")
or ""
).strip()
or None,
tag=tag_val,
title=title_val,
url=url_val,
source_url=value.get("source_url"),
duration=value.get("duration") or value.get("duration_seconds"),
metadata=value.get("metadata") or value.get("full_metadata") or {},
warnings=list(value.get("warnings") or []),
path=path_val,
relationships=rels,
is_temp=bool(value.get("is_temp", False)),
action=value.get("action"),
parent_hash=value.get("parent_hash"),
extra=extra,
)
return pipe_obj
# Fallback: build from path argument or bare value
hash_val = "unknown"
path_val = default_path or getattr(value, "path", None)
url_val: Optional[str] = None
title_val = None
# If the raw value is a string, treat it as either a URL or a file path.
# This is important for @-selection results that are plain URL strings.
if isinstance(value, str):
s = value.strip()
if s.lower().startswith(("http://", "https://")):
url_val = s
path_val = None
else:
path_val = s
if path_val and path_val != "unknown":
try:
from pathlib import Path
from SYS.utils import sha256_file
path_obj = Path(path_val)
hash_val = sha256_file(path_obj)
# Extract title from filename (without extension)
title_val = path_obj.stem
except Exception:
pass
# When coming from a raw URL string, mark it explicitly as URL.
# Otherwise treat it as a local path.
store_val = "URL" if url_val else "PATH"
pipe_obj = models.PipeObject(
hash=hash_val,
store=store_val,
provider=None,
path=str(path_val) if path_val and path_val != "unknown" else None,
title=title_val,
url=url_val,
source_url=url_val,
tag=[],
extra={},
)
return pipe_obj

View File

@@ -1607,7 +1607,7 @@ class PipelineExecutor:
item = selected_items[0]
url = None
try:
from cmdlet._shared import get_field
from SYS.field_access import get_field
url = get_field(item, "url")
except Exception:
@@ -2043,7 +2043,7 @@ class PipelineExecutor:
return False, None
debug(f"@N: _maybe_run_class_selector returned False, continuing")
from cmdlet._shared import coerce_to_pipe_object
from SYS.pipe_object import coerce_to_pipe_object
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
piped_result = (
@@ -2467,7 +2467,7 @@ class PipelineExecutor:
last_items = None
if last_items:
from cmdlet._shared import coerce_to_pipe_object
from SYS.pipe_object import coerce_to_pipe_object
try:
pipe_items = [
@@ -2706,7 +2706,7 @@ class PipelineExecutor:
if (next_cmd in {"delete-tag",
"delete_tag"} and len(filtered) > 1
and all(_is_tag_row(x) for x in filtered)):
from cmdlet._shared import get_field
from SYS.field_access import get_field
tags: List[str] = []
first_hash = None
@@ -2739,7 +2739,7 @@ class PipelineExecutor:
piped_result = grouped
continue
from cmdlet._shared import coerce_to_pipe_object
from SYS.pipe_object import coerce_to_pipe_object
filtered_pipe_objs = [
coerce_to_pipe_object(item) for item in filtered

View File

@@ -8,7 +8,7 @@ import logging
from pathlib import Path
from typing import Optional, Dict, Any, List, Callable, Tuple
from datetime import datetime
from threading import Thread, Lock
from threading import Thread, Lock, Event
import time
from SYS.logger import log
@@ -273,6 +273,7 @@ class WorkerManager:
self.refresh_callbacks: List[Callable] = []
self.refresh_thread: Optional[Thread] = None
self._stop_refresh = False
self._refresh_stop_event = Event()
self._lock = Lock()
self.worker_handlers: Dict[str, WorkerLoggingHandler] = {}
self._worker_last_step: Dict[str, str] = {}
@@ -658,6 +659,7 @@ class WorkerManager:
f"[WorkerManager] Starting auto-refresh with {self.auto_refresh_interval}s interval"
)
self._stop_refresh = False
self._refresh_stop_event.clear()
self.refresh_thread = Thread(target=self._auto_refresh_loop, daemon=True)
self.refresh_thread.start()
@@ -665,6 +667,7 @@ class WorkerManager:
"""Stop the auto-refresh thread."""
logger.info("[WorkerManager] Stopping auto-refresh")
self._stop_refresh = True
self._refresh_stop_event.set()
if self.refresh_thread:
self.refresh_thread.join(timeout=5)
self.refresh_thread = None
@@ -679,7 +682,8 @@ class WorkerManager:
"""Main auto-refresh loop that periodically queries and notifies."""
try:
while not self._stop_refresh:
time.sleep(self.auto_refresh_interval)
if self._refresh_stop_event.wait(self.auto_refresh_interval):
break
# Check if there are active workers
active = self.get_active_workers()

View File

@@ -1354,7 +1354,7 @@ class DownloadModal(ModalScreen):
try:
from pathlib import Path
import requests
from API.requests_client import get_requests_session
from SYS.config import resolve_output_dir
# Create temporary list of playlist items for selection parsing
@@ -1385,7 +1385,7 @@ class DownloadModal(ModalScreen):
try:
logger.info(f"Downloading PDF {idx}/{len(selected_url)}: {url}")
response = requests.get(url, timeout=30)
response = get_requests_session().get(url, timeout=30)
response.raise_for_status()
# Generate filename from URL

View File

@@ -2753,243 +2753,14 @@ def register_url_with_local_library(
return False
def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
"""Persist the Tidal manifest from search results and return a local path."""
metadata = None
if isinstance(item, dict):
metadata = item.get("full_metadata") or item.get("metadata")
else:
metadata = getattr(item, "full_metadata", None) or getattr(item, "metadata", None)
if not isinstance(metadata, dict):
try:
# Provider-specific implementation lives with the provider code.
from Provider.tidal_manifest import resolve_tidal_manifest_path
except Exception: # pragma: no cover
def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
_ = item
return None
existing_path = metadata.get("_tidal_manifest_path")
if existing_path:
try:
resolved = Path(str(existing_path))
if resolved.is_file():
return str(resolved)
except Exception:
pass
existing_url = metadata.get("_tidal_manifest_url")
if existing_url and isinstance(existing_url, str):
candidate = existing_url.strip()
if candidate:
return candidate
raw_manifest = metadata.get("manifest")
if not raw_manifest:
# When piping directly from the Tidal search table, we may only have a track id.
# Fetch track details from the proxy so downstream stages can decode the manifest.
try:
already = bool(metadata.get("_tidal_track_details_fetched"))
except Exception:
already = False
track_id = metadata.get("trackId") or metadata.get("id")
if track_id is None:
try:
if isinstance(item, dict):
candidate_path = item.get("path") or item.get("url")
else:
candidate_path = getattr(item, "path", None) or getattr(item, "url", None)
except Exception:
candidate_path = None
if candidate_path:
m = re.search(
r"(tidal|hifi):(?://)?track[\\/](\d+)",
str(candidate_path),
flags=re.IGNORECASE,
)
if m:
track_id = m.group(2)
if (not already) and track_id is not None:
try:
track_int = int(track_id)
except Exception:
track_int = None
if track_int and track_int > 0:
try:
import httpx
resp = httpx.get(
"https://tidal-api.binimum.org/track/",
params={"id": str(track_int)},
timeout=10.0,
)
resp.raise_for_status()
payload = resp.json()
data = payload.get("data") if isinstance(payload, dict) else None
if isinstance(data, dict) and data:
try:
metadata.update(data)
except Exception:
pass
try:
metadata["_tidal_track_details_fetched"] = True
except Exception:
pass
if not metadata.get("url"):
try:
resp_info = httpx.get(
"https://tidal-api.binimum.org/info/",
params={"id": str(track_int)},
timeout=10.0,
)
resp_info.raise_for_status()
info_payload = resp_info.json()
info_data = info_payload.get("data") if isinstance(info_payload, dict) else None
if isinstance(info_data, dict) and info_data:
try:
for k, v in info_data.items():
if k not in metadata:
metadata[k] = v
except Exception:
pass
try:
if info_data.get("url"):
metadata["url"] = info_data.get("url")
except Exception:
pass
except Exception:
pass
except Exception:
pass
raw_manifest = metadata.get("manifest")
if not raw_manifest:
return None
manifest_str = "".join(str(raw_manifest or "").split())
if not manifest_str:
return None
manifest_bytes: bytes
try:
manifest_bytes = base64.b64decode(manifest_str, validate=True)
except Exception:
try:
manifest_bytes = base64.b64decode(manifest_str, validate=False)
except Exception:
try:
manifest_bytes = manifest_str.encode("utf-8")
except Exception:
return None
if not manifest_bytes:
return None
head = (manifest_bytes[:1024] or b"").lstrip()
if head.startswith((b"{", b"[")):
try:
text = manifest_bytes.decode("utf-8", errors="ignore")
payload = json.loads(text)
urls = payload.get("urls") or []
selected_url = None
for candidate in urls:
if isinstance(candidate, str):
candidate = candidate.strip()
if candidate:
selected_url = candidate
break
if selected_url:
try:
metadata["_tidal_manifest_url"] = selected_url
except Exception:
pass
return selected_url
try:
metadata["_tidal_manifest_error"] = "JSON manifest contained no urls"
except Exception:
pass
log(
f"[tidal] JSON manifest for track {metadata.get('trackId') or metadata.get('id')} had no playable urls",
file=sys.stderr,
)
except Exception as exc:
try:
metadata["_tidal_manifest_error"] = (
f"Failed to parse JSON manifest: {exc}"
)
except Exception:
pass
log(
f"[tidal] Failed to parse JSON manifest for track {metadata.get('trackId') or metadata.get('id')}: {exc}",
file=sys.stderr,
)
return None
looks_like_mpd = (
head.startswith(b"<?xml")
or head.startswith(b"<MPD")
or b"<MPD" in head
)
if not looks_like_mpd:
manifest_mime = str(metadata.get("manifestMimeType") or "").strip().lower()
try:
metadata["_tidal_manifest_error"] = (
f"Decoded manifest is not an MPD XML (mime: {manifest_mime or 'unknown'})"
)
except Exception:
pass
try:
log(
f"[tidal] Decoded manifest is not an MPD XML for track {metadata.get('trackId') or metadata.get('id')} (mime {manifest_mime or 'unknown'})",
file=sys.stderr,
)
except Exception:
pass
return None
manifest_hash = str(metadata.get("manifestHash") or "").strip()
track_id = metadata.get("trackId") or metadata.get("id")
identifier = manifest_hash or hashlib.sha256(manifest_bytes).hexdigest()
identifier_safe = re.sub(r"[^A-Za-z0-9_-]+", "_", identifier)[:64]
if not identifier_safe:
identifier_safe = hashlib.sha256(manifest_bytes).hexdigest()[:12]
track_safe = "tidal"
if track_id is not None:
track_safe = re.sub(r"[^A-Za-z0-9_-]+", "_", str(track_id))[:32]
if not track_safe:
track_safe = "tidal"
# Persist as .mpd for DASH manifests.
ext = "mpd"
manifest_dir = Path(tempfile.gettempdir()) / "medeia" / "tidal"
try:
manifest_dir.mkdir(parents=True, exist_ok=True)
except Exception:
pass
filename = f"tidal-{track_safe}-{identifier_safe[:24]}.{ext}"
target_path = manifest_dir / filename
try:
with open(target_path, "wb") as fh:
fh.write(manifest_bytes)
metadata["_tidal_manifest_path"] = str(target_path)
if isinstance(item, dict):
if item.get("full_metadata") is metadata:
item["full_metadata"] = metadata
elif item.get("metadata") is metadata:
item["metadata"] = metadata
else:
extra = getattr(item, "extra", None)
if isinstance(extra, dict):
extra["_tidal_manifest_path"] = str(target_path)
except Exception:
return None
return str(target_path)
def check_url_exists_in_storage(
urls: Sequence[str],
storage: Any,

View File

@@ -568,14 +568,6 @@ class Add_File(Cmdlet):
progress.step("ingesting file")
if provider_name:
if str(provider_name).strip().lower() == "matrix":
log(
"Matrix uploads are handled by .matrix (not add-file).",
file=sys.stderr,
)
failures += 1
continue
code = self._handle_provider_upload(
media_path,
provider_name,

View File

@@ -287,48 +287,29 @@ class search_file(Cmdlet):
results = provider.search(query, limit=limit, filters=search_filters or None)
debug(f"[search-file] {provider_name} -> {len(results or [])} result(s)")
# Tidal artist UX: if there is exactly one artist match, auto-expand
# directly to albums without requiring an explicit @1 selection.
if (
provider_lower == "tidal"
and table_meta.get("view") == "artist"
and isinstance(results, list)
and len(results) == 1
):
try:
artist_res = results[0]
artist_name = str(getattr(artist_res, "title", "") or "").strip()
artist_md = getattr(artist_res, "full_metadata", None)
artist_id = None
if isinstance(artist_md, dict):
raw_id = artist_md.get("artistId") or artist_md.get("id")
try:
artist_id = int(raw_id) if raw_id is not None else None
except Exception:
artist_id = None
album_results = []
if hasattr(provider, "_albums_for_artist") and callable(getattr(provider, "_albums_for_artist")):
try:
album_results = provider._albums_for_artist( # type: ignore[attr-defined]
artist_id=artist_id,
artist_name=artist_name,
limit=max(int(limit or 0), 200),
)
except Exception:
album_results = []
if album_results:
results = album_results
table_type = "tidal.album"
# Allow providers to apply provider-specific UX transforms (e.g. auto-expansion)
try:
post = getattr(provider, "postprocess_search_results", None)
if callable(post) and isinstance(results, list):
results, table_type_override, table_meta_override = post(
query=query,
results=results,
filters=search_filters or None,
limit=int(limit or 0),
table_type=str(table_type or ""),
table_meta=dict(table_meta) if isinstance(table_meta, dict) else None,
)
if table_type_override:
table_type = str(table_type_override)
table.set_table(table_type)
table_meta["view"] = "album"
if isinstance(table_meta_override, dict) and table_meta_override:
table_meta = dict(table_meta_override)
try:
table.set_table_metadata(table_meta)
except Exception:
pass
except Exception:
pass
except Exception:
pass
if not results:
log(f"No results found for query: {query}", file=sys.stderr)

View File

@@ -8,7 +8,8 @@ import re
from datetime import datetime
from urllib.parse import urlparse, parse_qs
from pathlib import Path
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args, resolve_tidal_manifest_path
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
from Provider.tidal_manifest import resolve_tidal_manifest_path
from SYS.logger import debug, get_thread_stream, is_debug_enabled, set_debug, set_thread_stream
from SYS.result_table import Table
from MPV.mpv_ipc import MPV

View File

@@ -1,500 +0,0 @@
Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
DEBUG: MPV log file: C:\Forgejo\Medios-Macina\Log\medeia-mpv.log
DEBUG: Named pipe not available yet: \\.\pipe\mpv-medios-macina
DEBUG: Named pipe not available yet: \\.\pipe\mpv-medios-macina
MPV log file: C:\Forgejo\Medios-Macina\Log\medeia-mpv.log
DEBUG: Named pipe not available yet: \\.\pipe\mpv-medios-macina
DEBUG: Named pipe not available yet: \\.\pipe\mpv-medios-macina
MPV log (tail):
[ 34.120][d][ao/wasapi] Fixing format
[ 34.120][d][ao/wasapi] IAudioClient::GetDevicePeriod
[ 34.121][v][ao/wasapi] Device period: default 10000 us, minimum 3000 us
[ 34.121][d][ao/wasapi] IAudioClient::Initialize
[ 34.134][d][ao/wasapi] IAudioClient::Initialize pRenderClient
[ 34.134][d][ao/wasapi] IAudioClient::Initialize IAudioClient_SetEventHandle
[ 34.134][d][ao/wasapi] IAudioClient::Initialize IAudioClient_GetBufferSize
[ 34.134][v][ao/wasapi] Buffer frame count: 1056 (22000 us)
[ 34.134][v][ao/wasapi] IAudioClock::GetFrequency gave a frequency of 384000.
[ 34.136][d][ao/wasapi] IAudioClient::Initialize pAudioVolume
[ 34.136][d][ao/wasapi] Entering dispatch loop
[ 34.136][d][ao/wasapi] Init wasapi done
[ 34.136][v][ao/wasapi] device buffer: 1056 samples.
[ 34.136][v][ao/wasapi] using soft-buffer of 9600 samples.
[ 34.136][i][cplayer] AO: [wasapi] 48000Hz stereo 2ch float
[ 34.136][v][cplayer] AO: Description: Windows WASAPI audio output (event mode)
[ 34.136][v][autoconvert] inserting resampler
[ 34.136][v][swresample] format change, reinitializing resampler
[ 34.136][v][swresample] 44100Hz stereo floatp -> 48000Hz stereo float
[ 34.137][v][af] [out] 48000Hz stereo 2ch float
[ 34.139][v][cplayer] audio ready
[ 34.139][v][cplayer] starting audio playback
[ 34.139][d][ao/wasapi] Thread Resume
[ 34.139][d][ao/wasapi] Thread Reset
[ 34.139][d][ao/wasapi] Thread Pause
[ 34.139][d][ao/wasapi] Thread Unpause
[ 34.139][v][cplayer] playback restart complete @ 0.000000, audio=playing, video=eof
[ 34.140][v][cplayer] Set property: user-data/medeia-pipeline-response="" -> 1
[ 34.141][v][cplayer] Set property: user-data/medeia-pipeline-request="{\"data\":{\"url\":\"https://www.youtube.com/watch?v=dQw4w9WgXcQ\"},\"op\":\"ytdlp-formats\",\"id\":\"34082-728967\"}" -> 1
[ 34.148][d][timeline] stream 0: resize index to 256
[ 34.173][v][lavf] EOF reached.
[ 34.173][v][timeline] EOF reached.
[ 35.393][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 35.393][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 35.393][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 35.399][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 35.399][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 35.399][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 35.484][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 35.484][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 35.484][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 35.490][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 35.490][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 35.490][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 35.962][v][ipc_33] Client connected
[ 35.963][v][cplayer] Set property: options/log-file="C:\\Forgejo\\Medios-Macina\\Log\\medeia-mpv.log" -> 1
[ 35.965][v][ipc_33] Client disconnected
[ 35.965][d][ipc_33] Destroying client handle...
[ 35.966][v][ipc_34] Client connected
[ 35.967][v][cplayer] Set property: options/msg-level="all=v" -> 1
[ 35.969][v][ipc_34] Client disconnected
[ 35.969][d][ipc_34] Destroying client handle...
[ 35.970][v][ipc_35] Client connected
[ 35.971][i][cplayer] medeia: log enabled -> C:\Forgejo\Medios-Macina\Log\medeia-mpv.log
[ 35.972][v][ipc_35] Client disconnected
[ 35.972][d][ipc_35] Destroying client handle...
[ 35.973][v][ipc_36] Client connected
[ 35.973][v][ipc_36] Client disconnected
[ 35.973][v][ipc_37] Client connected
[ 35.973][d][ipc_36] Destroying client handle...
[ 35.974][v][cplayer] Set property: options/log-file="C:\\Forgejo\\Medios-Macina\\Log\\medeia-mpv.log" -> 1
[ 35.975][v][ipc_37] Client disconnected
[ 35.975][d][ipc_37] Destroying client handle...
[ 35.975][v][ipc_38] Client connected
[ 35.977][v][cplayer] Set property: options/msg-level="all=v" -> 1
[ 35.978][v][ipc_38] Client disconnected
[ 35.979][d][ipc_38] Destroying client handle...
[ 35.980][v][ipc_39] Client connected
[ 35.981][v][cplayer] Set property: options/log-file="C:\\Forgejo\\Medios-Macina\\Log\\medeia-mpv.log" -> 1
[ 35.983][v][ipc_39] Client disconnected
[ 35.983][d][ipc_39] Destroying client handle...
[ 35.984][v][ipc_40] Client connected
[ 35.985][v][cplayer] Set property: options/msg-level="all=v" -> 1
[ 35.986][v][ipc_40] Client disconnected
[ 35.986][d][ipc_40] Destroying client handle...
[ 35.986][v][ipc_41] Client connected
[ 35.987][i][cplayer] medeia: log enabled -> C:\Forgejo\Medios-Macina\Log\medeia-mpv.log
[ 35.989][v][ipc_41] Client disconnected
[ 35.989][d][ipc_41] Destroying client handle...
[ 36.180][d][cplayer] Run command: cycle, flags=73, args=[name="pause", value="1.000000"]
[ 36.180][v][cplayer] Set property: pause -> 1
[ 36.180][d][ao/wasapi] Thread Pause
[ 37.104][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 37.104][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 37.104][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 37.576][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 37.583][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 37.583][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 38.287][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 38.287][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 38.287][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 38.693][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 38.693][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 38.693][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 38.965][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 38.965][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 38.965][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 38.984][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 38.990][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 38.990][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 39.098][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 39.098][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 39.105][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 39.112][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 39.112][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 39.112][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 41.106][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 41.106][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 41.106][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 41.115][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 41.122][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 41.122][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 42.198][d][cplayer] Run command: begin-vo-dragging, flags=73, args=[]
[ 42.814][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 42.814][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 42.828][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 42.828][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 42.835][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 42.835][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_right", flags="allow-hide-cursor+allow-vo-dragging"]
[ 42.835][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 42.835][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 42.856][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 42.856][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_right"]
[ 42.856][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 42.856][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 44.274][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 44.274][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_right", flags="allow-hide-cursor+allow-vo-dragging"]
[ 44.274][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 44.274][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 44.283][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 44.283][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_right"]
[ 44.283][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 44.283][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 44.297][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 44.297][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 44.297][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 44.305][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 44.305][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 44.305][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 45.258][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 45.258][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 45.258][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 45.264][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 45.270][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 45.271][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 46.370][d][cplayer] Run command: begin-vo-dragging, flags=73, args=[]
[ 48.743][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 48.744][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 50.556][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 50.556][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 50.682][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 50.682][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 50.696][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 50.696][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 50.710][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 50.710][d][cplayer] Run command: enable-section, flags=64, args=[name="wheel", flags="allow-hide-cursor+allow-vo-dragging"]
[ 50.710][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 50.786][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left_dbl"]
[ 50.786][d][cplayer] Run command: disable-section, flags=64, args=[name="wheel"]
[ 50.786][d][cplayer] Run command: disable-section, flags=64, args=[name="mbtn_left"]
[ 52.125][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left_dbl", flags=""]
[ 52.125][d][cplayer] Run command: enable-section, flags=64, args=[name="mbtn_left", flags=""]
[ 52.788][d][cplayer] Run command: script-binding, flags=73, args=[name="uosc/__keybinding1", arg=""]
[ 52.789][d][cplayer] Run command: quit, flags=73, args=[code="0"]
[ 52.789][v][cplayer] EOF code: 5
[ 52.789][d][ad] Uninit decoder.
[ 52.789][d][ao/wasapi] Thread Reset
[ 52.789][d][ao/wasapi] Thread Pause
[ 52.790][d][cplayer] Terminating demuxers...
[ 52.791][d][ffmpeg] AVIOContext: Statistics: 1300631 bytes read, 0 seeks
[ 52.791][d][cplayer] Done terminating demuxers.
[ 52.791][v][cplayer] finished playback, success (reason 3)
[ 52.791][v][cplayer] Running hook: ytdl_hook/on_after_end_file
[ 52.792][v][auto_profiles] Re-evaluating auto profile manga
[ 52.793][v][auto_profiles] Re-evaluating auto profile loop-short
[ 52.793][d][cplayer] Run command: del, flags=64, args=[name="user-data/mpv/ytdl/json-subprocess-result"]
[ 52.793][i][cplayer] Exiting... (Quit)
[ 52.794][d][cplayer] Run command: del, flags=64, args=[name="user-data/mpv/console"]
[ 52.794][d][ipc_8] Destroying client handle...
[ 52.794][d][positioning] Destroying client handle...
[ 52.794][d][commands] Destroying client handle...
[ 52.794][d][select] Destroying client handle...
[ 52.794][d][ipc_3] Destroying client handle...
[ 52.794][d][ipc_32] Destroying client handle...
[ 52.794][d][cplayer] Run command: keybind, flags=64, args=[name="q", cmd="quit", comment=""]
[ 52.794][d][console] Destroying client handle...
[ 52.794][d][auto_profiles] Destroying client handle...
[ 52.795][d][stats] Destroying client handle...
[ 52.796][d][main] Destroying client handle...
[ 52.796][d][ytdl_hook] Destroying client handle...
[ 52.798][d][uosc] Destroying client handle...
[ 52.799][d][SystemMediaTransportControls] Destroying client handle...
[ 52.805][d][ao/wasapi] Uninit wasapi
[ 52.805][d][ao/wasapi] Thread Reset
[ 52.805][d][ao/wasapi] Thread Pause
[ 52.805][d][ao/wasapi] Thread shutdown
[ 52.805][d][ao/wasapi] Thread uninit done
[ 52.805][d][ao/wasapi] Thread return
[ 52.806][d][ao/wasapi] Uninit wasapi done
[ 52.851][v][vo/gpu-next/win32] uninit
Helper logs from database (mpv module, most recent first):
[ERROR] Load URL pipeline failed: timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[WARN] Load URL still processing after 5 seconds
[INFO] [mpv error] ytdl_hook ERROR: Unsupported URL: https://example.com/
[INFO] [mpv error] ytdl_hook youtube-dl failed: unexpected error occurred
[INFO] [mpv error] cplayer Failed to recognize file format.
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [mpv error] vo/gpu-next/libplacebo Failed creating FBO texture! Disabling advanced rendering..
[INFO] [mpv error] vo/gpu-next/libplacebo Failed dispatching scaler.. disabling
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [mpv] (previous line repeated 4x)
[INFO] [mpv warn] input No key binding found for key ';'.
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv] (previous line repeated 2x)
[INFO] [mpv] (previous line repeated 4x)
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv] (previous line repeated 4x)
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv] (previous line repeated 2x)
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv] (previous line repeated 3x)
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv] (previous line repeated 3x)
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [mpv] (previous line repeated 4x)
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [mpv warn] input No key binding found for key 'MBTN_RIGHT_DBL'.
[INFO] [helper] version=2025-12-19 started ipc=\\.\pipe\mpv-medios-macina
[INFO] [helper] file=C:\Forgejo\Medios-Macina\MPV\pipeline_helper.py cwd=C:\Forgejo\Medios-Macina
[INFO] [helper] config_root=C:\Forgejo\Medios-Macina exists=False
[INFO] [helper] requested mpv log messages level=warn
[INFO] [helper] ready heartbeat armed prop=user-data/medeia-pipeline-ready
[INFO] [py] Loaded config from medios.db: providers=4 (alldebrid, soulseek, matrix,
[INFO] [py] telegram), stores=2 (hydrusnetwork, debrid), mtime=2026-02-02T02:46:41.638481Z
[INFO] [py] DEBUG: config_dir=C:\Forgejo\Medios-Macina choices=2
[INFO] [helper] startup store-choices count=2 items=local, rpi
[INFO] [helper] published store-choices to user-data/medeia-store-choices-cached
[INFO] [helper] published config temp to user-data/medeia-config-temp=C:\Users\Admin\AppData\Local\Temp
[INFO] [helper] connected to ipc=\\.\pipe\mpv-medios-macina
[INFO] [mpv] (previous line repeated 3x)
Helper log file: C:\Forgejo\Medios-Macina\Log\medeia-mpv-helper.log
Helper log (tail):
[lua] [2026-02-04 16:56:11] medeia lua loaded version=2025-12-24 script=main
[lua] [2026-02-04 16:56:11] medeia-lua loaded version=2025-12-24
[lua] [2026-02-04 16:56:11] stores: cache_read cached_json=nil len=0
[lua] [2026-02-04 16:56:11] stores: cache_empty cached_json=nil
[lua] [2026-02-04 16:56:11] stores: requesting store-choices via helper (fallback)
[lua] [2026-02-04 16:56:11] stores: failed to load store choices via helper; success=false choices_type=nil stderr= error=helper not running
[lua] [2026-02-04 16:56:12] [KEY] attempting to re-register mbtn_right after UOSC loaded
[lua] [2026-02-04 16:56:26] [input.conf] medios-show-menu called
[lua] [2026-02-04 16:56:26] [MENU] M.show_menu called
[lua] [2026-02-04 16:56:26] [MENU] Built 6 menu items
[lua] [2026-02-04 16:56:26] [MENU] ensure_uosc_loaded returned: true
[lua] [2026-02-04 16:56:26] [MENU] Sending menu JSON to uosc: {"title":"Medios Macina","items":[{"title":"Load URL","value":"script-message medios-load-url"},{"value":"script-binding medios-info","title":"Get Metadata","hint":"Ctrl+i"},{"value":"script-binding m...
[lua] [2026-02-04 16:56:26] [MENU] Menu command sent successfully
[lua] [2026-02-04 16:56:27] medios-load-url handler called
[lua] [2026-02-04 16:56:27] medios-load-url: closing main menu before opening Load URL prompt
[lua] [2026-02-04 16:56:27] open_load_url_prompt called
[lua] [2026-02-04 16:56:27] open_load_url_prompt: sending menu to uosc
[lua] [2026-02-04 16:56:33] [LOAD-URL] Event handler called with: {"type":"search","query":"https://www.youtube.com/watch?v=3IpPonmYx3g","menu_id":"{root}"}
[lua] [2026-02-04 16:56:33] [LOAD-URL] Parsed event: type=search, query=https://www.youtube.com/watch?v=3IpPonmYx3g
[lua] [2026-02-04 16:56:33] [LOAD-URL] Trimmed URL: "https://www.youtube.com/watch?v=3IpPonmYx3g"
[lua] [2026-02-04 16:56:33] [INFO] Load URL started: https://www.youtube.com/watch?v=3IpPonmYx3g
[lua] [2026-02-04 16:56:33] [LOAD-URL] Starting to load: https://www.youtube.com/watch?v=3IpPonmYx3g
[lua] [2026-02-04 16:56:33] [LOAD-URL] Checking if URL can be loaded directly: false
[lua] [2026-02-04 16:56:33] [LOAD-URL] URL requires pipeline helper for processing
[lua] [2026-02-04 16:56:33] [LOAD-URL] Pipeline helper ready: true
[lua] [2026-02-04 16:56:33] [LOAD-URL] Sending to pipeline: .mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[lua] [2026-02-04 16:56:33] [LOAD-URL] Pipeline helper ready: true
[lua] [2026-02-04 16:56:33] M.run_pipeline called with cmd: .mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[lua] [2026-02-04 16:56:34] ipc-async: send request id=22550-814785 cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[lua] [2026-02-04 16:56:35] [LOAD-URL] Event handler called with: {"type":"close"}
[lua] [2026-02-04 16:56:35] [LOAD-URL] Parsed event: type=close, query=nil
[lua] [2026-02-04 16:56:35] [LOAD-URL] Event type is not search: close
[lua] [2026-02-04 16:56:35] [LOAD-URL] Closing menu due to type mismatch
[lua] [2026-02-04 16:56:38] [WARN] Load URL still processing after 5 seconds
[lua] [2026-02-04 16:56:38] [LOAD-URL] Timeout message shown (helper still processing)
[lua] [2026-02-04 16:56:44] M.run_pipeline callback fired: resp=nil, err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[lua] [2026-02-04 16:56:44] pipeline failed cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[lua] [2026-02-04 16:56:44] [LOAD-URL] Pipeline callback received: resp=nil, err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[lua] [2026-02-04 16:56:44] [LOAD-URL] Pipeline error: timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[lua] [2026-02-04 16:56:44] [ERROR] Load URL pipeline failed: timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[lua] [2026-02-04 16:56:44] [LOAD-URL] Closing menu
[lua] [2026-02-04 16:56:44] [LOAD-URL] Sending close-menu command to UOSC
[lua] [2026-02-04 16:56:45] ipc-async: send request id=34082-728967 op=ytdlp-formats
Lua log file: C:\Forgejo\Medios-Macina\Log\medeia-mpv-lua.log
Lua log (tail):
[2026-02-04 16:56:11] medeia lua loaded version=2025-12-24 script=main
[2026-02-04 16:56:11] medeia-lua loaded version=2025-12-24
[2026-02-04 16:56:11] stores: cache_read cached_json=nil len=0
[2026-02-04 16:56:11] stores: cache_empty cached_json=nil
[2026-02-04 16:56:11] stores: requesting store-choices via helper (fallback)
[2026-02-04 16:56:11] stores: failed to load store choices via helper; success=false choices_type=nil stderr= error=helper not running
[2026-02-04 16:56:12] [KEY] attempting to re-register mbtn_right after UOSC loaded
[2026-02-04 16:56:26] [input.conf] medios-show-menu called
[2026-02-04 16:56:26] [MENU] M.show_menu called
[2026-02-04 16:56:26] [MENU] Built 6 menu items
[2026-02-04 16:56:26] [MENU] ensure_uosc_loaded returned: true
[2026-02-04 16:56:26] [MENU] Sending menu JSON to uosc: {"title":"Medios Macina","items":[{"title":"Load URL","value":"script-message medios-load-url"},{"value":"script-binding medios-info","title":"Get Metadata","hint":"Ctrl+i"},{"value":"script-binding m...
[2026-02-04 16:56:26] [MENU] Menu command sent successfully
[2026-02-04 16:56:27] medios-load-url handler called
[2026-02-04 16:56:27] medios-load-url: closing main menu before opening Load URL prompt
[2026-02-04 16:56:27] open_load_url_prompt called
[2026-02-04 16:56:27] open_load_url_prompt: sending menu to uosc
[2026-02-04 16:56:33] [LOAD-URL] Event handler called with: {"type":"search","query":"https://www.youtube.com/watch?v=3IpPonmYx3g","menu_id":"{root}"}
[2026-02-04 16:56:33] [LOAD-URL] Parsed event: type=search, query=https://www.youtube.com/watch?v=3IpPonmYx3g
[2026-02-04 16:56:33] [LOAD-URL] Trimmed URL: "https://www.youtube.com/watch?v=3IpPonmYx3g"
[2026-02-04 16:56:33] [INFO] Load URL started: https://www.youtube.com/watch?v=3IpPonmYx3g
[2026-02-04 16:56:33] [LOAD-URL] Starting to load: https://www.youtube.com/watch?v=3IpPonmYx3g
[2026-02-04 16:56:33] [LOAD-URL] Checking if URL can be loaded directly: false
[2026-02-04 16:56:33] [LOAD-URL] URL requires pipeline helper for processing
[2026-02-04 16:56:33] [LOAD-URL] Pipeline helper ready: true
[2026-02-04 16:56:33] [LOAD-URL] Sending to pipeline: .mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[2026-02-04 16:56:33] [LOAD-URL] Pipeline helper ready: true
[2026-02-04 16:56:33] M.run_pipeline called with cmd: .mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[2026-02-04 16:56:34] ipc-async: send request id=22550-814785 cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play
[2026-02-04 16:56:35] [LOAD-URL] Event handler called with: {"type":"close"}
[2026-02-04 16:56:35] [LOAD-URL] Parsed event: type=close, query=nil
[2026-02-04 16:56:35] [LOAD-URL] Event type is not search: close
[2026-02-04 16:56:35] [LOAD-URL] Closing menu due to type mismatch
[2026-02-04 16:56:38] [WARN] Load URL still processing after 5 seconds
[2026-02-04 16:56:38] [LOAD-URL] Timeout message shown (helper still processing)
[2026-02-04 16:56:44] M.run_pipeline callback fired: resp=nil, err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[2026-02-04 16:56:44] pipeline failed cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[2026-02-04 16:56:44] [LOAD-URL] Pipeline callback received: resp=nil, err=timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[2026-02-04 16:56:44] [LOAD-URL] Pipeline error: timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[2026-02-04 16:56:44] [ERROR] Load URL pipeline failed: timeout waiting response (cmd=.mpv -url "https://www.youtube.com/watch?v=3IpPonmYx3g" -play)
[2026-02-04 16:56:44] [LOAD-URL] Closing menu
[2026-02-04 16:56:44] [LOAD-URL] Sending close-menu command to UOSC
[2026-02-04 16:56:45] ipc-async: send request id=34082-728967 op=ytdlp-formats