2025-12-12 21:55:38 -08:00
|
|
|
from typing import Any, Dict, Sequence, List, Optional
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import json
|
|
|
|
|
import socket
|
|
|
|
|
import re
|
|
|
|
|
import subprocess
|
|
|
|
|
from urllib.parse import urlparse, parse_qs
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
|
|
|
|
from SYS.logger import debug, get_thread_stream, is_debug_enabled, set_debug, set_thread_stream
|
|
|
|
|
from result_table import ResultTable
|
|
|
|
|
from MPV.mpv_ipc import MPV
|
|
|
|
|
import pipeline as ctx
|
|
|
|
|
from models import PipeObject
|
|
|
|
|
|
|
|
|
|
from API.folder import LocalLibrarySearchOptimizer
|
|
|
|
|
from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
|
2025-12-16 23:23:43 -08:00
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
_ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_alldebrid_api_key(config: Optional[Dict[str, Any]]) -> Optional[str]:
|
|
|
|
|
try:
|
|
|
|
|
if not isinstance(config, dict):
|
|
|
|
|
return None
|
|
|
|
|
provider_cfg = config.get("provider")
|
|
|
|
|
if not isinstance(provider_cfg, dict):
|
|
|
|
|
return None
|
|
|
|
|
ad_cfg = provider_cfg.get("alldebrid")
|
|
|
|
|
if not isinstance(ad_cfg, dict):
|
|
|
|
|
return None
|
|
|
|
|
key = ad_cfg.get("api_key")
|
|
|
|
|
if not isinstance(key, str):
|
|
|
|
|
return None
|
|
|
|
|
key = key.strip()
|
|
|
|
|
return key or None
|
|
|
|
|
except Exception:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _is_alldebrid_protected_url(url: str) -> bool:
|
|
|
|
|
try:
|
|
|
|
|
if not isinstance(url, str):
|
|
|
|
|
return False
|
|
|
|
|
u = url.strip()
|
|
|
|
|
if not u.startswith(("http://", "https://")):
|
|
|
|
|
return False
|
|
|
|
|
p = urlparse(u)
|
|
|
|
|
host = (p.netloc or "").lower()
|
|
|
|
|
path = p.path or ""
|
|
|
|
|
# AllDebrid file page links (require auth; not directly streamable by mpv)
|
|
|
|
|
return host == "alldebrid.com" and path.startswith("/f/")
|
|
|
|
|
except Exception:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _maybe_unlock_alldebrid_url(url: str, config: Optional[Dict[str, Any]]) -> str:
|
|
|
|
|
"""Convert AllDebrid protected file URLs into direct streamable links.
|
|
|
|
|
|
|
|
|
|
When AllDebrid returns `https://alldebrid.com/f/...`, that URL typically requires
|
|
|
|
|
authentication. MPV cannot access it without credentials. We transparently call
|
|
|
|
|
the AllDebrid API `link/unlock` (using the configured API key) to obtain a direct
|
|
|
|
|
URL that MPV can stream.
|
|
|
|
|
"""
|
|
|
|
|
if not _is_alldebrid_protected_url(url):
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
cached = _ALLDEBRID_UNLOCK_CACHE.get(url)
|
|
|
|
|
if isinstance(cached, str) and cached:
|
|
|
|
|
return cached
|
|
|
|
|
|
|
|
|
|
api_key = _get_alldebrid_api_key(config)
|
|
|
|
|
if not api_key:
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
unlocked = client.unlock_link(url)
|
|
|
|
|
if isinstance(unlocked, str) and unlocked.strip():
|
|
|
|
|
unlocked = unlocked.strip()
|
|
|
|
|
_ALLDEBRID_UNLOCK_CACHE[url] = unlocked
|
|
|
|
|
return unlocked
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"AllDebrid unlock failed for MPV target: {e}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
def _ensure_lyric_overlay(mpv: MPV) -> None:
|
|
|
|
|
try:
|
|
|
|
|
mpv.ensure_lyric_loader_running()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional[Any]:
|
|
|
|
|
"""Send a command to the MPV IPC pipe and return the response."""
|
|
|
|
|
try:
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
return mpv.send(command, silent=silent)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
if not silent:
|
|
|
|
|
debug(f"IPC Error: {e}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
|
|
|
|
"""Get the current playlist from MPV. Returns None if MPV is not running."""
|
|
|
|
|
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
|
|
|
|
resp = _send_ipc_command(cmd, silent=silent)
|
|
|
|
|
if resp is None:
|
|
|
|
|
return None
|
|
|
|
|
if resp.get("error") == "success":
|
|
|
|
|
return resp.get("data", [])
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
|
|
|
|
"""Extract a clean title from an MPV playlist item, handling memory:// M3U hacks."""
|
|
|
|
|
title = item.get("title")
|
|
|
|
|
filename = item.get("filename") or ""
|
|
|
|
|
|
|
|
|
|
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
|
|
|
|
if "memory://" in filename and "#EXTINF:" in filename:
|
|
|
|
|
try:
|
|
|
|
|
# Extract title from #EXTINF:-1,Title
|
|
|
|
|
# Use regex to find title between #EXTINF:-1, and newline
|
|
|
|
|
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
|
|
|
|
|
if match:
|
|
|
|
|
extracted_title = match.group(1).strip()
|
|
|
|
|
if not title or title == "memory://":
|
|
|
|
|
title = extracted_title
|
|
|
|
|
|
|
|
|
|
# If we still don't have a title, try to find the URL in the M3U content
|
|
|
|
|
if not title:
|
|
|
|
|
lines = filename.splitlines()
|
|
|
|
|
for line in lines:
|
|
|
|
|
line = line.strip()
|
|
|
|
|
if line and not line.startswith('#') and not line.startswith('memory://'):
|
|
|
|
|
# Found the URL, use it as title
|
|
|
|
|
return line
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return title or filename or "Unknown"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _extract_target_from_memory_uri(text: str) -> Optional[str]:
|
|
|
|
|
"""Extract the real target URL/path from a memory:// M3U payload."""
|
|
|
|
|
if not isinstance(text, str) or not text.startswith("memory://"):
|
|
|
|
|
return None
|
|
|
|
|
for line in text.splitlines():
|
|
|
|
|
line = line.strip()
|
|
|
|
|
if not line or line.startswith('#') or line.startswith('memory://'):
|
|
|
|
|
continue
|
|
|
|
|
return line
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _find_hydrus_instance_for_hash(hash_str: str, file_storage: Any) -> Optional[str]:
|
|
|
|
|
"""Find which Hydrus instance serves a specific file hash.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
hash_str: SHA256 hash (64 hex chars)
|
|
|
|
|
file_storage: FileStorage instance with Hydrus backends
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Instance name (e.g., 'home') or None if not found
|
|
|
|
|
"""
|
|
|
|
|
# Query each Hydrus backend to see if it has this file
|
|
|
|
|
for backend_name in file_storage.list_backends():
|
|
|
|
|
backend = file_storage[backend_name]
|
|
|
|
|
# Check if this is a Hydrus backend by checking class name
|
|
|
|
|
backend_class = type(backend).__name__
|
|
|
|
|
if backend_class != "HydrusNetwork":
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Query metadata to see if this instance has the file
|
|
|
|
|
metadata = backend.get_metadata(hash_str)
|
|
|
|
|
if metadata:
|
|
|
|
|
return backend_name
|
|
|
|
|
except Exception:
|
|
|
|
|
# This instance doesn't have the file or had an error
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _find_hydrus_instance_by_url(url: str, file_storage: Any) -> Optional[str]:
|
|
|
|
|
"""Find which Hydrus instance matches a given URL.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
url: Full URL (e.g., http://localhost:45869/get_files/file?hash=...)
|
|
|
|
|
file_storage: FileStorage instance with Hydrus backends
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Instance name (e.g., 'home') or None if not found
|
|
|
|
|
"""
|
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
|
|
|
|
|
|
parsed_target = urlparse(url)
|
|
|
|
|
target_netloc = parsed_target.netloc.lower()
|
|
|
|
|
|
|
|
|
|
# Check each Hydrus backend's URL
|
|
|
|
|
for backend_name in file_storage.list_backends():
|
|
|
|
|
backend = file_storage[backend_name]
|
|
|
|
|
backend_class = type(backend).__name__
|
|
|
|
|
if backend_class != "HydrusNetwork":
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Get the backend's base URL from its client
|
|
|
|
|
try:
|
|
|
|
|
backend_url = backend._client.base_url
|
|
|
|
|
parsed_backend = urlparse(backend_url)
|
|
|
|
|
backend_netloc = parsed_backend.netloc.lower()
|
|
|
|
|
|
|
|
|
|
# Match by netloc (host:port)
|
|
|
|
|
if target_netloc == backend_netloc:
|
|
|
|
|
return backend_name
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
|
|
|
|
|
"""Normalize playlist entry paths for dedupe comparisons."""
|
|
|
|
|
if not text:
|
|
|
|
|
return None
|
|
|
|
|
real = _extract_target_from_memory_uri(text) or text
|
|
|
|
|
real = real.strip()
|
|
|
|
|
if not real:
|
|
|
|
|
return None
|
|
|
|
|
# If it's already a bare hydrus hash, use it directly
|
|
|
|
|
lower_real = real.lower()
|
|
|
|
|
if re.fullmatch(r"[0-9a-f]{64}", lower_real):
|
|
|
|
|
return lower_real
|
|
|
|
|
|
|
|
|
|
# If it's a hydrus file URL, normalize to the hash for dedupe
|
|
|
|
|
try:
|
|
|
|
|
parsed = urlparse(real)
|
|
|
|
|
if parsed.scheme in {"http", "https", "hydrus"}:
|
|
|
|
|
if parsed.path.endswith("/get_files/file"):
|
|
|
|
|
qs = parse_qs(parsed.query)
|
|
|
|
|
h = qs.get("hash", [None])[0]
|
|
|
|
|
if h and re.fullmatch(r"[0-9a-f]{64}", h.lower()):
|
|
|
|
|
return h.lower()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Normalize slashes for Windows paths and lowercase for comparison
|
|
|
|
|
real = real.replace('\\', '/')
|
|
|
|
|
return real.lower()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _infer_store_from_playlist_item(item: Dict[str, Any], file_storage: Optional[Any] = None) -> str:
|
|
|
|
|
"""Infer a friendly store label from an MPV playlist entry.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
item: MPV playlist item dict
|
|
|
|
|
file_storage: Optional FileStorage instance for querying specific backend instances
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Store label (e.g., 'home', 'work', 'local', 'youtube', etc.)
|
|
|
|
|
"""
|
|
|
|
|
name = item.get("filename") if isinstance(item, dict) else None
|
|
|
|
|
target = str(name or "")
|
|
|
|
|
|
|
|
|
|
# Unwrap memory:// M3U wrapper
|
|
|
|
|
memory_target = _extract_target_from_memory_uri(target)
|
|
|
|
|
if memory_target:
|
|
|
|
|
target = memory_target
|
|
|
|
|
|
|
|
|
|
# Hydrus hashes: bare 64-hex entries
|
|
|
|
|
if re.fullmatch(r"[0-9a-f]{64}", target.lower()):
|
|
|
|
|
# If we have file_storage, query each Hydrus instance to find which one has this hash
|
|
|
|
|
if file_storage:
|
|
|
|
|
hash_str = target.lower()
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
return "hydrus"
|
|
|
|
|
|
|
|
|
|
lower = target.lower()
|
|
|
|
|
if lower.startswith("magnet:"):
|
|
|
|
|
return "magnet"
|
|
|
|
|
if lower.startswith("hydrus://"):
|
|
|
|
|
# Extract hash from hydrus:// URL if possible
|
|
|
|
|
if file_storage:
|
|
|
|
|
hash_match = re.search(r"[0-9a-f]{64}", target.lower())
|
|
|
|
|
if hash_match:
|
|
|
|
|
hash_str = hash_match.group(0)
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
return "hydrus"
|
|
|
|
|
|
|
|
|
|
# Windows / UNC paths
|
|
|
|
|
if re.match(r"^[a-z]:[\\/]", target, flags=re.IGNORECASE) or target.startswith("\\\\"):
|
|
|
|
|
return "local"
|
|
|
|
|
|
|
|
|
|
# file:// url
|
|
|
|
|
if lower.startswith("file://"):
|
|
|
|
|
return "local"
|
|
|
|
|
|
|
|
|
|
parsed = urlparse(target)
|
|
|
|
|
host = (parsed.netloc or "").lower()
|
|
|
|
|
path = parsed.path or ""
|
|
|
|
|
|
|
|
|
|
if not host:
|
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
host_no_port = host.split(":", 1)[0]
|
|
|
|
|
host_stripped = host_no_port[4:] if host_no_port.startswith("www.") else host_no_port
|
|
|
|
|
|
|
|
|
|
if "youtube" in host_stripped or "youtu.be" in target.lower():
|
|
|
|
|
return "youtube"
|
|
|
|
|
if "soundcloud" in host_stripped:
|
|
|
|
|
return "soundcloud"
|
|
|
|
|
if "bandcamp" in host_stripped:
|
|
|
|
|
return "bandcamp"
|
|
|
|
|
if "get_files" in path or "file?hash=" in path or host_stripped in {"127.0.0.1", "localhost"}:
|
|
|
|
|
# Hydrus API URL - try to extract hash and find instance
|
|
|
|
|
if file_storage:
|
|
|
|
|
# Try to extract hash from URL parameters
|
|
|
|
|
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
|
|
|
|
if hash_match:
|
|
|
|
|
hash_str = hash_match.group(1)
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
# If no hash in URL, try matching the base URL to configured instances
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_by_url(target, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
return "hydrus"
|
|
|
|
|
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host_stripped) and "get_files" in path:
|
|
|
|
|
# IP-based Hydrus URL
|
|
|
|
|
if file_storage:
|
|
|
|
|
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
|
|
|
|
if hash_match:
|
|
|
|
|
hash_str = hash_match.group(1)
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
hydrus_instance = _find_hydrus_instance_by_url(target, file_storage)
|
|
|
|
|
if hydrus_instance:
|
|
|
|
|
return hydrus_instance
|
|
|
|
|
return "hydrus"
|
|
|
|
|
|
|
|
|
|
parts = host_stripped.split('.')
|
|
|
|
|
if len(parts) >= 2:
|
|
|
|
|
return parts[-2] or host_stripped
|
|
|
|
|
return host_stripped
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_hydrus_header(config: Dict[str, Any]) -> Optional[str]:
|
|
|
|
|
"""Return header string for Hydrus auth if configured."""
|
|
|
|
|
try:
|
|
|
|
|
key = get_hydrus_access_key(config)
|
|
|
|
|
except Exception:
|
|
|
|
|
key = None
|
|
|
|
|
if not key:
|
|
|
|
|
return None
|
|
|
|
|
return f"Hydrus-Client-API-Access-Key: {key}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_ytdl_options(config: Optional[Dict[str, Any]], hydrus_header: Optional[str]) -> Optional[str]:
|
|
|
|
|
"""Compose ytdl-raw-options string including cookies and optional Hydrus header."""
|
|
|
|
|
opts: List[str] = []
|
2025-12-16 23:23:43 -08:00
|
|
|
cookies_path = None
|
2025-12-12 21:55:38 -08:00
|
|
|
try:
|
2025-12-16 23:23:43 -08:00
|
|
|
from tool.ytdlp import YtDlpTool
|
|
|
|
|
|
|
|
|
|
cookiefile = YtDlpTool(config or {}).resolve_cookiefile()
|
|
|
|
|
if cookiefile is not None:
|
|
|
|
|
cookies_path = str(cookiefile)
|
2025-12-12 21:55:38 -08:00
|
|
|
except Exception:
|
|
|
|
|
cookies_path = None
|
2025-12-16 23:23:43 -08:00
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
if cookies_path:
|
2025-12-16 23:23:43 -08:00
|
|
|
opts.append(f"cookies={cookies_path.replace('\\', '/')}" )
|
2025-12-12 21:55:38 -08:00
|
|
|
else:
|
|
|
|
|
opts.append("cookies-from-browser=chrome")
|
|
|
|
|
if hydrus_header:
|
|
|
|
|
opts.append(f"add-header={hydrus_header}")
|
|
|
|
|
return ",".join(opts) if opts else None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
|
|
|
|
|
if not path:
|
|
|
|
|
return False
|
|
|
|
|
lower = path.lower()
|
|
|
|
|
if "hydrus://" in lower:
|
|
|
|
|
return True
|
|
|
|
|
parsed = urlparse(path)
|
|
|
|
|
host = (parsed.netloc or "").lower()
|
|
|
|
|
path_part = parsed.path or ""
|
|
|
|
|
if hydrus_url:
|
|
|
|
|
try:
|
|
|
|
|
hydrus_host = urlparse(hydrus_url).netloc.lower()
|
|
|
|
|
if hydrus_host and hydrus_host in host:
|
|
|
|
|
return True
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
if "get_files" in path_part or "file?hash=" in path_part:
|
|
|
|
|
return True
|
|
|
|
|
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path_part:
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
2025-12-16 23:23:43 -08:00
|
|
|
def _ensure_ytdl_cookies(config: Optional[Dict[str, Any]] = None) -> None:
|
2025-12-12 21:55:38 -08:00
|
|
|
"""Ensure yt-dlp options are set correctly for this session."""
|
|
|
|
|
from pathlib import Path
|
2025-12-16 23:23:43 -08:00
|
|
|
cookies_path = None
|
|
|
|
|
try:
|
|
|
|
|
from tool.ytdlp import YtDlpTool
|
|
|
|
|
|
|
|
|
|
cookiefile = YtDlpTool(config or {}).resolve_cookiefile()
|
|
|
|
|
if cookiefile is not None:
|
|
|
|
|
cookies_path = str(cookiefile)
|
|
|
|
|
except Exception:
|
|
|
|
|
cookies_path = None
|
2025-12-12 21:55:38 -08:00
|
|
|
if cookies_path:
|
|
|
|
|
# Check if file exists and has content (use forward slashes for path checking)
|
|
|
|
|
check_path = cookies_path.replace('\\', '/')
|
|
|
|
|
file_obj = Path(cookies_path)
|
|
|
|
|
if file_obj.exists():
|
|
|
|
|
file_size = file_obj.stat().st_size
|
|
|
|
|
debug(f"Cookies file verified: {check_path} ({file_size} bytes)")
|
|
|
|
|
else:
|
|
|
|
|
debug(f"WARNING: Cookies file does not exist: {check_path}", file=sys.stderr)
|
|
|
|
|
else:
|
|
|
|
|
debug("No cookies file configured")
|
|
|
|
|
|
|
|
|
|
def _monitor_mpv_logs(duration: float = 3.0) -> None:
|
|
|
|
|
"""Monitor MPV logs for a short duration to capture errors."""
|
|
|
|
|
try:
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
client = mpv.client()
|
|
|
|
|
if not client.connect():
|
|
|
|
|
debug("Failed to connect to MPV for log monitoring", file=sys.stderr)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Request log messages
|
|
|
|
|
client.send_command({"command": ["request_log_messages", "warn"]})
|
|
|
|
|
|
|
|
|
|
# On Windows named pipes, avoid blocking the CLI; skip log read entirely
|
|
|
|
|
if client.is_windows:
|
|
|
|
|
client.disconnect()
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
import time
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
|
|
# Unix sockets already have timeouts set; read until duration expires
|
|
|
|
|
sock_obj = client.sock
|
|
|
|
|
if not isinstance(sock_obj, socket.socket):
|
|
|
|
|
client.disconnect()
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
while time.time() - start_time < duration:
|
|
|
|
|
try:
|
|
|
|
|
chunk = sock_obj.recv(4096)
|
|
|
|
|
except socket.timeout:
|
|
|
|
|
continue
|
|
|
|
|
except Exception:
|
|
|
|
|
break
|
|
|
|
|
if not chunk:
|
|
|
|
|
break
|
|
|
|
|
for line in chunk.decode("utf-8", errors="ignore").splitlines():
|
|
|
|
|
try:
|
|
|
|
|
msg = json.loads(line)
|
|
|
|
|
if msg.get("event") == "log-message":
|
|
|
|
|
text = msg.get("text", "").strip()
|
|
|
|
|
prefix = msg.get("prefix", "")
|
|
|
|
|
level = msg.get("level", "")
|
|
|
|
|
if "ytdl" in prefix or level == "error":
|
|
|
|
|
debug(f"[MPV {prefix}] {text}", file=sys.stderr)
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
client.disconnect()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _tail_text_file(path: str, *, max_lines: int = 120, max_bytes: int = 65536) -> List[str]:
|
|
|
|
|
try:
|
|
|
|
|
p = Path(str(path))
|
|
|
|
|
if not p.exists() or not p.is_file():
|
|
|
|
|
return []
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
with open(p, "rb") as f:
|
|
|
|
|
try:
|
|
|
|
|
f.seek(0, os.SEEK_END)
|
|
|
|
|
end = f.tell()
|
|
|
|
|
start = max(0, end - int(max_bytes))
|
|
|
|
|
f.seek(start, os.SEEK_SET)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
data = f.read()
|
|
|
|
|
text = data.decode("utf-8", errors="replace")
|
|
|
|
|
lines = text.splitlines()
|
|
|
|
|
if len(lines) > max_lines:
|
|
|
|
|
return lines[-max_lines:]
|
|
|
|
|
return lines
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[Dict[str, Any]]) -> Optional[tuple[str, Optional[str]]]:
|
|
|
|
|
"""Extract a playable path/URL from an item, handling different store types.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
item: Item to extract path from (dict, PipeObject, or string)
|
|
|
|
|
file_storage: FileStorage instance for querying backends
|
|
|
|
|
config: Config dict for Hydrus URL
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Tuple of (path, title) or None if no valid path found
|
|
|
|
|
"""
|
|
|
|
|
path: Optional[str] = None
|
|
|
|
|
title: Optional[str] = None
|
|
|
|
|
store: Optional[str] = None
|
|
|
|
|
file_hash: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
# Extract fields from item - prefer a disk path ('path'), but accept 'url' as fallback for providers
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
path = item.get("path")
|
|
|
|
|
# Fallbacks for provider-style entries where URL is stored in 'url' or 'source_url' or 'target'
|
|
|
|
|
if not path:
|
|
|
|
|
path = item.get("url") or item.get("source_url") or item.get("target")
|
|
|
|
|
if not path:
|
|
|
|
|
known = item.get("url") or item.get("url") or []
|
|
|
|
|
if known and isinstance(known, list):
|
|
|
|
|
path = known[0]
|
|
|
|
|
title = item.get("title") or item.get("file_title")
|
|
|
|
|
store = item.get("store")
|
|
|
|
|
file_hash = item.get("hash")
|
|
|
|
|
elif hasattr(item, "path") or hasattr(item, "url") or hasattr(item, "source_url") or hasattr(item, "store") or hasattr(item, "hash"):
|
|
|
|
|
# Handle PipeObject / dataclass objects - prefer path, but fall back to url/source_url attributes
|
|
|
|
|
path = getattr(item, "path", None)
|
|
|
|
|
if not path:
|
|
|
|
|
path = getattr(item, "url", None) or getattr(item, "source_url", None) or getattr(item, "target", None)
|
|
|
|
|
if not path:
|
|
|
|
|
known = getattr(item, "url", None) or (getattr(item, "extra", None) or {}).get("url")
|
|
|
|
|
if known and isinstance(known, list):
|
|
|
|
|
path = known[0]
|
|
|
|
|
title = getattr(item, "title", None) or getattr(item, "file_title", None)
|
|
|
|
|
store = getattr(item, "store", None)
|
|
|
|
|
file_hash = getattr(item, "hash", None)
|
|
|
|
|
elif isinstance(item, str):
|
|
|
|
|
path = item
|
|
|
|
|
|
|
|
|
|
# Debug: show incoming values
|
|
|
|
|
try:
|
|
|
|
|
debug(f"_get_playable_path: store={store}, path={path}, hash={file_hash}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Treat common placeholders as missing.
|
|
|
|
|
if isinstance(path, str) and path.strip().lower() in {"", "n/a", "na", "none"}:
|
|
|
|
|
path = None
|
|
|
|
|
|
|
|
|
|
if title is not None and not isinstance(title, str):
|
|
|
|
|
title = str(title)
|
|
|
|
|
|
|
|
|
|
if isinstance(file_hash, str):
|
|
|
|
|
file_hash = file_hash.strip().lower()
|
|
|
|
|
|
|
|
|
|
# Resolve hash+store into a playable target (file path or URL).
|
|
|
|
|
# This is unrelated to MPV's IPC pipe and keeps "pipe" terminology reserved for:
|
|
|
|
|
# - MPV IPC pipe (transport)
|
|
|
|
|
# - PipeObject (pipeline data)
|
|
|
|
|
if store and file_hash and file_hash != "unknown" and file_storage:
|
|
|
|
|
# If it's already a URL, MPV can usually play it directly.
|
|
|
|
|
if isinstance(path, str) and path.startswith(("http://", "https://")):
|
|
|
|
|
return (path, title)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
backend = file_storage[store]
|
|
|
|
|
except Exception:
|
|
|
|
|
backend = None
|
|
|
|
|
|
|
|
|
|
if backend is not None:
|
|
|
|
|
backend_class = type(backend).__name__
|
|
|
|
|
|
|
|
|
|
# Folder stores: resolve to an on-disk file path.
|
|
|
|
|
if hasattr(backend, "get_file") and callable(getattr(backend, "get_file")) and backend_class == "Folder":
|
|
|
|
|
try:
|
|
|
|
|
resolved = backend.get_file(file_hash)
|
|
|
|
|
if isinstance(resolved, Path):
|
|
|
|
|
path = str(resolved)
|
|
|
|
|
elif resolved is not None:
|
|
|
|
|
path = str(resolved)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Error resolving file path from store '{store}': {e}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
# HydrusNetwork: build a playable API file URL without browser side-effects.
|
|
|
|
|
elif backend_class == "HydrusNetwork":
|
|
|
|
|
try:
|
|
|
|
|
client = getattr(backend, "_client", None)
|
|
|
|
|
base_url = getattr(client, "url", None)
|
|
|
|
|
if base_url:
|
|
|
|
|
base_url = str(base_url).rstrip("/")
|
|
|
|
|
# Auth is provided via http-header-fields (set in _queue_items).
|
|
|
|
|
path = f"{base_url}/get_files/file?hash={file_hash}"
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Error building Hydrus URL from store '{store}': {e}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
if not path:
|
|
|
|
|
# As a last resort, if we have a hash and no path/url, return the hash.
|
|
|
|
|
# _queue_items will convert it to a Hydrus file URL when possible.
|
|
|
|
|
if store and file_hash and file_hash != "unknown":
|
|
|
|
|
return (str(file_hash), title)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if not isinstance(path, str):
|
|
|
|
|
path = str(path)
|
|
|
|
|
|
|
|
|
|
return (path, title)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _queue_items(
|
|
|
|
|
items: List[Any],
|
|
|
|
|
clear_first: bool = False,
|
|
|
|
|
config: Optional[Dict[str, Any]] = None,
|
|
|
|
|
start_opts: Optional[Dict[str, Any]] = None,
|
|
|
|
|
) -> bool:
|
|
|
|
|
"""Queue items to MPV, starting it if necessary.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
items: List of items to queue
|
|
|
|
|
clear_first: If True, the first item will replace the current playlist
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
True if MPV was started, False if items were queued via IPC.
|
|
|
|
|
"""
|
|
|
|
|
# Debug: print incoming items
|
|
|
|
|
try:
|
|
|
|
|
debug(f"_queue_items: count={len(items)} types={[type(i).__name__ for i in items]}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Just verify cookies are configured, don't try to set via IPC
|
2025-12-16 23:23:43 -08:00
|
|
|
_ensure_ytdl_cookies(config)
|
2025-12-12 21:55:38 -08:00
|
|
|
|
|
|
|
|
hydrus_header = _build_hydrus_header(config or {})
|
|
|
|
|
ytdl_opts = _build_ytdl_options(config, hydrus_header)
|
|
|
|
|
hydrus_url = None
|
|
|
|
|
try:
|
|
|
|
|
hydrus_url = get_hydrus_url(config) if config is not None else None
|
|
|
|
|
except Exception:
|
|
|
|
|
hydrus_url = None
|
|
|
|
|
|
|
|
|
|
# Initialize Store registry for path resolution
|
|
|
|
|
file_storage = None
|
|
|
|
|
try:
|
|
|
|
|
from Store import Store
|
|
|
|
|
file_storage = Store(config or {})
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
# Dedupe existing playlist before adding more (unless we're replacing it)
|
|
|
|
|
existing_targets: set[str] = set()
|
|
|
|
|
if not clear_first:
|
|
|
|
|
playlist = _get_playlist(silent=True) or []
|
|
|
|
|
dup_indexes: List[int] = []
|
|
|
|
|
for idx, pl_item in enumerate(playlist):
|
|
|
|
|
fname = pl_item.get("filename") if isinstance(pl_item, dict) else str(pl_item)
|
|
|
|
|
alt = pl_item.get("playlist-path") if isinstance(pl_item, dict) else None
|
|
|
|
|
norm = _normalize_playlist_path(fname) or _normalize_playlist_path(alt)
|
|
|
|
|
if not norm:
|
|
|
|
|
continue
|
|
|
|
|
if norm in existing_targets:
|
|
|
|
|
dup_indexes.append(idx)
|
|
|
|
|
else:
|
|
|
|
|
existing_targets.add(norm)
|
|
|
|
|
|
|
|
|
|
# Remove duplicates from playlist starting from the end to keep indices valid
|
|
|
|
|
for idx in reversed(dup_indexes):
|
|
|
|
|
try:
|
|
|
|
|
_send_ipc_command({"command": ["playlist-remove", idx], "request_id": 106}, silent=True)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
new_targets: set[str] = set()
|
|
|
|
|
|
|
|
|
|
for i, item in enumerate(items):
|
|
|
|
|
# Debug: show the item being processed
|
|
|
|
|
try:
|
|
|
|
|
debug(f"_queue_items: processing idx={i} type={type(item)} repr={repr(item)[:200]}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
# Extract URL/Path using store-aware logic
|
|
|
|
|
result = _get_playable_path(item, file_storage, config)
|
|
|
|
|
if not result:
|
|
|
|
|
debug(f"_queue_items: item idx={i} produced no playable path")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
target, title = result
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# If the target is an AllDebrid protected file URL, unlock it to a direct link for MPV.
|
|
|
|
|
try:
|
|
|
|
|
if isinstance(target, str):
|
|
|
|
|
target = _maybe_unlock_alldebrid_url(target, config)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
# Prefer per-item Hydrus instance credentials when the item belongs to a Hydrus store.
|
|
|
|
|
effective_hydrus_url = hydrus_url
|
|
|
|
|
effective_hydrus_header = hydrus_header
|
|
|
|
|
effective_ytdl_opts = ytdl_opts
|
|
|
|
|
item_store_name: Optional[str] = None
|
|
|
|
|
try:
|
|
|
|
|
item_store = None
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
item_store = item.get("store")
|
|
|
|
|
else:
|
|
|
|
|
item_store = getattr(item, "store", None)
|
|
|
|
|
|
|
|
|
|
if item_store:
|
|
|
|
|
item_store_name = str(item_store).strip() or None
|
|
|
|
|
|
|
|
|
|
if item_store and file_storage:
|
|
|
|
|
try:
|
|
|
|
|
backend = file_storage[str(item_store)]
|
|
|
|
|
except Exception:
|
|
|
|
|
backend = None
|
|
|
|
|
|
|
|
|
|
if backend is not None and type(backend).__name__ == "HydrusNetwork":
|
|
|
|
|
client = getattr(backend, "_client", None)
|
|
|
|
|
base_url = getattr(client, "url", None)
|
|
|
|
|
key = getattr(client, "access_key", None)
|
|
|
|
|
if base_url:
|
|
|
|
|
effective_hydrus_url = str(base_url).rstrip("/")
|
|
|
|
|
if key:
|
|
|
|
|
effective_hydrus_header = f"Hydrus-Client-API-Access-Key: {str(key).strip()}"
|
|
|
|
|
effective_ytdl_opts = _build_ytdl_options(config, effective_hydrus_header)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
if target:
|
|
|
|
|
# If we just have a hydrus hash, build a direct file URL for MPV
|
|
|
|
|
if re.fullmatch(r"[0-9a-f]{64}", str(target).strip().lower()) and effective_hydrus_url:
|
|
|
|
|
target = f"{effective_hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
|
|
|
|
|
|
|
|
|
|
norm_key = _normalize_playlist_path(target) or str(target).strip().lower()
|
|
|
|
|
if norm_key in existing_targets or norm_key in new_targets:
|
|
|
|
|
debug(f"Skipping duplicate playlist entry: {title or target}")
|
|
|
|
|
continue
|
|
|
|
|
new_targets.add(norm_key)
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# Use memory:// M3U hack to pass title to MPV.
|
|
|
|
|
# This is especially important for remote URLs (e.g., YouTube) where MPV may otherwise
|
|
|
|
|
# show the raw URL as the playlist title.
|
|
|
|
|
if title:
|
2025-12-12 21:55:38 -08:00
|
|
|
# Sanitize title for M3U (remove newlines)
|
|
|
|
|
safe_title = title.replace('\n', ' ').replace('\r', '')
|
|
|
|
|
|
|
|
|
|
# Carry the store name for hash URLs so MPV.lyric can resolve the backend.
|
|
|
|
|
# This is especially important for local file-server URLs like /get_files/file?hash=...
|
|
|
|
|
target_for_m3u = target
|
|
|
|
|
try:
|
|
|
|
|
if item_store_name and isinstance(target_for_m3u, str) and target_for_m3u.startswith("http"):
|
|
|
|
|
if "get_files/file" in target_for_m3u and "store=" not in target_for_m3u:
|
|
|
|
|
sep = "&" if "?" in target_for_m3u else "?"
|
|
|
|
|
target_for_m3u = f"{target_for_m3u}{sep}store={item_store_name}"
|
|
|
|
|
except Exception:
|
|
|
|
|
target_for_m3u = target
|
|
|
|
|
|
|
|
|
|
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target_for_m3u}"
|
|
|
|
|
target_to_send = f"memory://{m3u_content}"
|
|
|
|
|
else:
|
|
|
|
|
target_to_send = target
|
|
|
|
|
|
|
|
|
|
mode = "append"
|
|
|
|
|
if clear_first and i == 0:
|
|
|
|
|
mode = "replace"
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# If this is a Hydrus path, set header property and yt-dlp headers before loading.
|
|
|
|
|
# Use the real target (not the memory:// wrapper) for detection.
|
|
|
|
|
if effective_hydrus_header and _is_hydrus_path(str(target), effective_hydrus_url):
|
2025-12-12 21:55:38 -08:00
|
|
|
header_cmd = {"command": ["set_property", "http-header-fields", effective_hydrus_header], "request_id": 199}
|
|
|
|
|
_send_ipc_command(header_cmd, silent=True)
|
|
|
|
|
if effective_ytdl_opts:
|
|
|
|
|
ytdl_cmd = {"command": ["set_property", "ytdl-raw-options", effective_ytdl_opts], "request_id": 197}
|
|
|
|
|
_send_ipc_command(ytdl_cmd, silent=True)
|
|
|
|
|
|
|
|
|
|
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
|
|
|
|
try:
|
|
|
|
|
debug(f"Sending MPV loadfile: {target_to_send} mode={mode}")
|
|
|
|
|
resp = _send_ipc_command(cmd, silent=True)
|
|
|
|
|
debug(f"MPV loadfile response: {resp}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Exception sending loadfile to MPV: {e}", file=sys.stderr)
|
|
|
|
|
resp = None
|
|
|
|
|
|
|
|
|
|
if resp is None:
|
|
|
|
|
# MPV not running (or died)
|
|
|
|
|
# Start MPV with remaining items
|
|
|
|
|
debug(f"MPV not running/died while queuing, starting MPV with remaining items: {items[i:]}")
|
|
|
|
|
_start_mpv(items[i:], config=config, start_opts=start_opts)
|
|
|
|
|
return True
|
|
|
|
|
elif resp.get("error") == "success":
|
2025-12-16 01:45:01 -08:00
|
|
|
# Do not set `force-media-title` when queueing items. It's a global property and
|
|
|
|
|
# would change the MPV window title even if the item isn't currently playing.
|
2025-12-12 21:55:38 -08:00
|
|
|
debug(f"Queued: {title or target}")
|
|
|
|
|
else:
|
|
|
|
|
error_msg = str(resp.get('error'))
|
|
|
|
|
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|
|
|
|
"""Manage and play items in the MPV playlist via IPC."""
|
|
|
|
|
|
|
|
|
|
parsed = parse_cmdlet_args(args, CMDLET)
|
|
|
|
|
|
|
|
|
|
log_requested = bool(parsed.get("log"))
|
|
|
|
|
borderless = bool(parsed.get("borderless"))
|
|
|
|
|
|
|
|
|
|
prev_debug = is_debug_enabled()
|
|
|
|
|
prev_stream = get_thread_stream()
|
|
|
|
|
devnull_fh = None
|
|
|
|
|
|
|
|
|
|
mpv_log_path: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Default: keep `.pipe` quiet even if debug is enabled.
|
|
|
|
|
# With -log: enable debug and route it to stdout (pipeable), plus enable mpv log-file.
|
|
|
|
|
if log_requested:
|
|
|
|
|
set_debug(True)
|
|
|
|
|
set_thread_stream(sys.stdout)
|
|
|
|
|
try:
|
|
|
|
|
tmp_dir = Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".")
|
|
|
|
|
except Exception:
|
|
|
|
|
tmp_dir = Path(".")
|
|
|
|
|
mpv_log_path = str((tmp_dir / "medeia-mpv.log").resolve())
|
|
|
|
|
# Ensure file exists early so we can tail it even if mpv writes later.
|
|
|
|
|
try:
|
|
|
|
|
Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
with open(mpv_log_path, "a", encoding="utf-8", errors="replace"):
|
|
|
|
|
pass
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-12-13 12:09:50 -08:00
|
|
|
debug(f"MPV log file: {mpv_log_path}")
|
2025-12-12 21:55:38 -08:00
|
|
|
|
|
|
|
|
# If mpv is already running, set log options live via IPC.
|
|
|
|
|
try:
|
|
|
|
|
mpv_live = MPV()
|
|
|
|
|
if mpv_live.is_running():
|
|
|
|
|
mpv_live.set_property("options/log-file", mpv_log_path)
|
|
|
|
|
mpv_live.set_property("options/msg-level", "all=v")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
if prev_debug:
|
|
|
|
|
try:
|
|
|
|
|
devnull_fh = open(os.devnull, "w", encoding="utf-8", errors="replace")
|
|
|
|
|
set_thread_stream(devnull_fh)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
start_opts: Dict[str, Any] = {"borderless": borderless, "mpv_log_path": mpv_log_path}
|
|
|
|
|
|
2025-12-17 17:42:46 -08:00
|
|
|
# Store registry is only needed for certain playlist listing/inference paths.
|
|
|
|
|
# Keep it lazy so a simple `.pipe <url> -play` doesn't trigger Hydrus/API calls.
|
2025-12-12 21:55:38 -08:00
|
|
|
file_storage = None
|
|
|
|
|
|
|
|
|
|
# Initialize mpv_started flag
|
|
|
|
|
mpv_started = False
|
|
|
|
|
|
|
|
|
|
# Handle positional index argument if provided
|
|
|
|
|
index_arg = parsed.get("index")
|
|
|
|
|
url_arg = parsed.get("url")
|
|
|
|
|
|
|
|
|
|
# If index_arg is provided but is not an integer, treat it as a URL
|
|
|
|
|
# This allows .pipe "http://..." without -url flag
|
|
|
|
|
if index_arg is not None:
|
|
|
|
|
try:
|
|
|
|
|
int(index_arg)
|
|
|
|
|
except ValueError:
|
|
|
|
|
# Not an integer, treat as URL if url_arg is not set
|
|
|
|
|
if not url_arg:
|
|
|
|
|
url_arg = index_arg
|
|
|
|
|
index_arg = None
|
|
|
|
|
|
|
|
|
|
clear_mode = parsed.get("clear")
|
|
|
|
|
list_mode = parsed.get("list")
|
|
|
|
|
play_mode = parsed.get("play")
|
|
|
|
|
pause_mode = parsed.get("pause")
|
|
|
|
|
save_mode = parsed.get("save")
|
|
|
|
|
load_mode = parsed.get("load")
|
|
|
|
|
current_mode = parsed.get("current")
|
|
|
|
|
|
|
|
|
|
# Handle --current flag: emit currently playing item to pipeline
|
|
|
|
|
if current_mode:
|
|
|
|
|
items = _get_playlist()
|
|
|
|
|
if items is None:
|
|
|
|
|
debug("MPV is not running or not accessible.", file=sys.stderr)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Find the currently playing item
|
|
|
|
|
current_item = None
|
|
|
|
|
for item in items:
|
|
|
|
|
if item.get("current", False):
|
|
|
|
|
current_item = item
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if current_item is None:
|
|
|
|
|
debug("No item is currently playing.", file=sys.stderr)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Build result object with file info
|
|
|
|
|
title = _extract_title_from_item(current_item)
|
|
|
|
|
filename = current_item.get("filename", "")
|
|
|
|
|
|
|
|
|
|
# Emit the current item to pipeline
|
|
|
|
|
result_obj = {
|
|
|
|
|
'path': filename,
|
|
|
|
|
'title': title,
|
|
|
|
|
'cmdlet_name': '.pipe',
|
|
|
|
|
'source': 'pipe',
|
|
|
|
|
'__pipe_index': items.index(current_item),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ctx.emit(result_obj)
|
|
|
|
|
debug(f"Emitted current item: {title}")
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
# Handle URL queuing
|
|
|
|
|
mpv_started = False
|
|
|
|
|
if url_arg:
|
|
|
|
|
mpv_started = _queue_items([url_arg], config=config, start_opts=start_opts)
|
|
|
|
|
# Auto-play the URL when it's queued via .pipe "url" (without explicit flags)
|
|
|
|
|
# unless other flags are present
|
|
|
|
|
if not (clear_mode or play_mode or pause_mode or save_mode or load_mode):
|
|
|
|
|
if mpv_started:
|
|
|
|
|
# MPV was just started, wait a moment for it to be ready, then play first item
|
|
|
|
|
import time
|
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
index_arg = "1" # 1-based index for first item
|
|
|
|
|
play_mode = True
|
|
|
|
|
else:
|
|
|
|
|
# MPV was already running, get playlist and play the newly added item
|
|
|
|
|
playlist = _get_playlist(silent=True)
|
|
|
|
|
if playlist and len(playlist) > 0:
|
|
|
|
|
# Auto-play the last item in the playlist (the one we just added)
|
|
|
|
|
# Use 1-based indexing
|
|
|
|
|
index_arg = str(len(playlist))
|
|
|
|
|
play_mode = True
|
|
|
|
|
else:
|
|
|
|
|
# Fallback: just list the playlist if we can't determine index
|
|
|
|
|
list_mode = True
|
|
|
|
|
|
|
|
|
|
# Ensure lyric overlay is running (auto-discovery handled by MPV.lyric).
|
|
|
|
|
try:
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
_ensure_lyric_overlay(mpv)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Handle Save Playlist
|
|
|
|
|
if save_mode:
|
|
|
|
|
playlist_name = index_arg or f"Playlist {subprocess.check_output(['date', '/t'], shell=True).decode().strip()}"
|
|
|
|
|
# If index_arg was used for name, clear it so it doesn't trigger index logic
|
|
|
|
|
if index_arg:
|
|
|
|
|
index_arg = None
|
|
|
|
|
|
|
|
|
|
items = _get_playlist()
|
|
|
|
|
if not items:
|
|
|
|
|
debug("Cannot save: MPV playlist is empty or MPV is not running.")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Clean up items for saving (remove current flag, etc)
|
|
|
|
|
clean_items = []
|
|
|
|
|
for item in items:
|
|
|
|
|
# If title was extracted from memory://, we should probably save the original filename
|
|
|
|
|
# if it's a URL, or reconstruct a clean object.
|
|
|
|
|
# Actually, _extract_title_from_item handles the display title.
|
|
|
|
|
# But for playback, we need the 'filename' (which might be memory://...)
|
|
|
|
|
# If we save 'memory://...', it will work when loaded back.
|
|
|
|
|
clean_items.append(item)
|
|
|
|
|
|
|
|
|
|
# Use config from context or load it
|
|
|
|
|
config_data = config if config else {}
|
|
|
|
|
|
|
|
|
|
storage_path = get_local_storage_path(config_data)
|
|
|
|
|
if not storage_path:
|
|
|
|
|
debug("Local storage path not configured.")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
with LocalLibrarySearchOptimizer(storage_path) as db:
|
|
|
|
|
if db.save_playlist(playlist_name, clean_items):
|
|
|
|
|
debug(f"Playlist saved as '{playlist_name}'")
|
|
|
|
|
return 0
|
|
|
|
|
else:
|
|
|
|
|
debug(f"Failed to save playlist '{playlist_name}'")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Handle Load Playlist
|
|
|
|
|
current_playlist_name = None
|
|
|
|
|
if load_mode:
|
|
|
|
|
# Use config from context or load it
|
|
|
|
|
config_data = config if config else {}
|
|
|
|
|
|
|
|
|
|
storage_path = get_local_storage_path(config_data)
|
|
|
|
|
if not storage_path:
|
|
|
|
|
debug("Local storage path not configured.")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
with LocalLibrarySearchOptimizer(storage_path) as db:
|
|
|
|
|
if index_arg:
|
|
|
|
|
try:
|
|
|
|
|
pl_id = int(index_arg)
|
|
|
|
|
|
|
|
|
|
# Handle Delete Playlist (if -clear is also passed)
|
|
|
|
|
if clear_mode:
|
|
|
|
|
if db.delete_playlist(pl_id):
|
|
|
|
|
debug(f"Playlist ID {pl_id} deleted.")
|
|
|
|
|
# Clear index_arg so we fall through to list mode and show updated list
|
|
|
|
|
index_arg = None
|
|
|
|
|
# Don't return, let it list the remaining playlists
|
|
|
|
|
else:
|
|
|
|
|
debug(f"Failed to delete playlist ID {pl_id}.")
|
|
|
|
|
return 1
|
|
|
|
|
else:
|
|
|
|
|
# Handle Load Playlist
|
|
|
|
|
result = db.get_playlist_by_id(pl_id)
|
|
|
|
|
if result is None:
|
|
|
|
|
debug(f"Playlist ID {pl_id} not found.")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
name, items = result
|
|
|
|
|
current_playlist_name = name
|
|
|
|
|
|
|
|
|
|
# Queue items (replacing current playlist)
|
|
|
|
|
if items:
|
|
|
|
|
_queue_items(items, clear_first=True, config=config, start_opts=start_opts)
|
|
|
|
|
else:
|
|
|
|
|
# Empty playlist, just clear
|
|
|
|
|
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
|
|
|
|
|
|
|
|
|
|
# Switch to list mode to show the result
|
|
|
|
|
list_mode = True
|
|
|
|
|
index_arg = None
|
|
|
|
|
# Fall through to list logic
|
|
|
|
|
|
|
|
|
|
except ValueError:
|
|
|
|
|
debug(f"Invalid playlist ID: {index_arg}")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# If we deleted or didn't have an index, list playlists
|
|
|
|
|
if not index_arg:
|
|
|
|
|
playlists = db.get_playlists()
|
|
|
|
|
|
|
|
|
|
if not playlists:
|
|
|
|
|
debug("No saved playlists found.")
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
table = ResultTable("Saved Playlists")
|
|
|
|
|
for i, pl in enumerate(playlists):
|
|
|
|
|
item_count = len(pl.get('items', []))
|
|
|
|
|
row = table.add_row()
|
|
|
|
|
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
|
|
|
|
|
row.add_column("Name", pl['name'])
|
|
|
|
|
row.add_column("Items", str(item_count))
|
|
|
|
|
row.add_column("Updated", pl['updated_at'])
|
|
|
|
|
|
|
|
|
|
# Set the playlist items as the result object for this row
|
|
|
|
|
# When user selects @N, they get the list of items
|
|
|
|
|
# We also set the source command to .pipe -load <ID> so it loads it
|
|
|
|
|
table.set_row_selection_args(i, ["-load", str(pl['id'])])
|
|
|
|
|
|
|
|
|
|
table.set_source_command(".pipe")
|
|
|
|
|
|
|
|
|
|
# Register results
|
|
|
|
|
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
|
|
|
|
ctx.set_current_stage_table(table)
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# In pipeline mode, the CLI renders current-stage tables; printing here duplicates output.
|
|
|
|
|
suppress_direct_print = bool(isinstance(config, dict) and config.get("_quiet_background_output"))
|
|
|
|
|
if not suppress_direct_print:
|
|
|
|
|
print(table)
|
2025-12-12 21:55:38 -08:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
# Everything below was originally outside a try block; keep it inside so `start_opts` is in scope.
|
|
|
|
|
|
|
|
|
|
# Handle Play/Pause commands (but skip if we have index_arg to play a specific item)
|
|
|
|
|
if play_mode and index_arg is None:
|
|
|
|
|
cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
|
|
|
|
resp = _send_ipc_command(cmd)
|
|
|
|
|
if resp and resp.get("error") == "success":
|
|
|
|
|
debug("Resumed playback")
|
|
|
|
|
return 0
|
|
|
|
|
else:
|
|
|
|
|
debug("Failed to resume playback (MPV not running?)", file=sys.stderr)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
if pause_mode:
|
|
|
|
|
cmd = {"command": ["set_property", "pause", True], "request_id": 104}
|
|
|
|
|
resp = _send_ipc_command(cmd)
|
|
|
|
|
if resp and resp.get("error") == "success":
|
|
|
|
|
debug("Paused playback")
|
|
|
|
|
return 0
|
|
|
|
|
else:
|
|
|
|
|
debug("Failed to pause playback (MPV not running?)", file=sys.stderr)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Handle Clear All command (no index provided)
|
|
|
|
|
if clear_mode and index_arg is None:
|
|
|
|
|
cmd = {"command": ["playlist-clear"], "request_id": 105}
|
|
|
|
|
resp = _send_ipc_command(cmd)
|
|
|
|
|
if resp and resp.get("error") == "success":
|
|
|
|
|
debug("Playlist cleared")
|
|
|
|
|
return 0
|
|
|
|
|
else:
|
|
|
|
|
debug("Failed to clear playlist (MPV not running?)", file=sys.stderr)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# Handle piped input (add to playlist)
|
|
|
|
|
# Skip adding if -list is specified (user just wants to see current playlist)
|
|
|
|
|
if result and not list_mode and not url_arg:
|
|
|
|
|
playlist_before = _get_playlist(silent=True)
|
|
|
|
|
idle_before = None
|
|
|
|
|
try:
|
|
|
|
|
idle_resp = _send_ipc_command({"command": ["get_property", "idle-active"], "request_id": 111}, silent=True)
|
|
|
|
|
if idle_resp and idle_resp.get("error") == "success":
|
|
|
|
|
idle_before = bool(idle_resp.get("data"))
|
|
|
|
|
except Exception:
|
|
|
|
|
idle_before = None
|
|
|
|
|
|
|
|
|
|
# If result is a list of items, add them to playlist
|
|
|
|
|
items_to_add = []
|
|
|
|
|
if isinstance(result, list):
|
|
|
|
|
items_to_add = result
|
|
|
|
|
elif isinstance(result, dict):
|
|
|
|
|
items_to_add = [result]
|
|
|
|
|
else:
|
|
|
|
|
# Handle PipeObject or any other object type
|
|
|
|
|
items_to_add = [result]
|
|
|
|
|
|
|
|
|
|
# Debug: inspect incoming result and attributes
|
|
|
|
|
try:
|
|
|
|
|
debug(f"pipe._run: received result type={type(result)} repr={repr(result)[:200]}")
|
|
|
|
|
debug(f"pipe._run: attrs path={getattr(result, 'path', None)} url={getattr(result, 'url', None)} store={getattr(result, 'store', None)} hash={getattr(result, 'hash', None)}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
queued_started_mpv = False
|
|
|
|
|
if items_to_add and _queue_items(items_to_add, config=config, start_opts=start_opts):
|
|
|
|
|
mpv_started = True
|
|
|
|
|
queued_started_mpv = True
|
|
|
|
|
|
|
|
|
|
# Ensure lyric overlay is running when we queue anything via .pipe.
|
|
|
|
|
if items_to_add and not queued_started_mpv:
|
|
|
|
|
try:
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
_ensure_lyric_overlay(mpv)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Auto-play when a single item is piped and mpv was idle/empty.
|
|
|
|
|
if items_to_add and len(items_to_add) == 1 and not queued_started_mpv:
|
|
|
|
|
try:
|
|
|
|
|
playlist_after = _get_playlist(silent=True)
|
|
|
|
|
before_len = len(playlist_before) if isinstance(playlist_before, list) else 0
|
|
|
|
|
after_len = len(playlist_after) if isinstance(playlist_after, list) else 0
|
|
|
|
|
|
|
|
|
|
should_autoplay = False
|
|
|
|
|
if idle_before is True:
|
|
|
|
|
should_autoplay = True
|
|
|
|
|
elif isinstance(playlist_before, list) and len(playlist_before) == 0:
|
|
|
|
|
should_autoplay = True
|
|
|
|
|
|
|
|
|
|
if should_autoplay and after_len > 0:
|
|
|
|
|
idx_to_play = min(max(0, before_len), after_len - 1)
|
|
|
|
|
play_resp = _send_ipc_command({"command": ["playlist-play-index", idx_to_play], "request_id": 112}, silent=True)
|
|
|
|
|
_send_ipc_command({"command": ["set_property", "pause", False], "request_id": 113}, silent=True)
|
|
|
|
|
if play_resp and play_resp.get("error") == "success":
|
|
|
|
|
debug("Auto-playing piped item")
|
|
|
|
|
|
|
|
|
|
# Start lyric overlay (auto-discovery handled by MPV.lyric).
|
|
|
|
|
try:
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
_ensure_lyric_overlay(mpv)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Get playlist from MPV (silent: we handle MPV-not-running gracefully below)
|
|
|
|
|
items = _get_playlist(silent=True)
|
|
|
|
|
|
|
|
|
|
if items is None:
|
|
|
|
|
if mpv_started:
|
|
|
|
|
# MPV was just started, retry getting playlist after a brief delay
|
|
|
|
|
import time
|
|
|
|
|
time.sleep(0.3)
|
|
|
|
|
items = _get_playlist(silent=True)
|
|
|
|
|
|
|
|
|
|
if items is None:
|
|
|
|
|
# Still can't connect, but MPV is starting
|
|
|
|
|
debug("MPV is starting up...")
|
|
|
|
|
return 0
|
|
|
|
|
else:
|
|
|
|
|
# Do not auto-launch MPV when no action/inputs were provided; avoid surprise startups
|
|
|
|
|
no_inputs = not any([
|
|
|
|
|
result, url_arg, index_arg, clear_mode, play_mode,
|
|
|
|
|
pause_mode, save_mode, load_mode, current_mode, list_mode
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
if no_inputs:
|
|
|
|
|
# User invoked `.pipe` with no args: treat this as an intent to open MPV.
|
|
|
|
|
debug("MPV is not running. Starting new instance...")
|
|
|
|
|
_start_mpv([], config=config, start_opts=start_opts)
|
|
|
|
|
|
|
|
|
|
# Re-check playlist after startup; if IPC still isn't ready, just exit cleanly.
|
|
|
|
|
try:
|
|
|
|
|
import time
|
|
|
|
|
time.sleep(0.3)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
items = _get_playlist(silent=True)
|
|
|
|
|
if items is None:
|
|
|
|
|
debug("MPV is starting up...")
|
|
|
|
|
return 0
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# IPC is ready; continue without restarting MPV again.
|
|
|
|
|
else:
|
|
|
|
|
debug("MPV is not running. Starting new instance...")
|
|
|
|
|
_start_mpv([], config=config, start_opts=start_opts)
|
|
|
|
|
return 0
|
2025-12-12 21:55:38 -08:00
|
|
|
|
|
|
|
|
if not items:
|
|
|
|
|
debug("MPV playlist is empty.")
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
# If index is provided, perform action (Play or Clear)
|
|
|
|
|
if index_arg is not None:
|
|
|
|
|
try:
|
|
|
|
|
# Handle 1-based index
|
|
|
|
|
idx = int(index_arg) - 1
|
|
|
|
|
|
|
|
|
|
if idx < 0 or idx >= len(items):
|
|
|
|
|
debug(f"Index {index_arg} out of range (1-{len(items)}).")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
item = items[idx]
|
|
|
|
|
title = _extract_title_from_item(item)
|
|
|
|
|
filename = item.get("filename", "") if isinstance(item, dict) else ""
|
|
|
|
|
hydrus_header = _build_hydrus_header(config or {})
|
|
|
|
|
hydrus_url = None
|
|
|
|
|
try:
|
|
|
|
|
hydrus_url = get_hydrus_url(config) if config is not None else None
|
|
|
|
|
except Exception:
|
|
|
|
|
hydrus_url = None
|
|
|
|
|
|
|
|
|
|
if clear_mode:
|
|
|
|
|
# Remove item
|
|
|
|
|
cmd = {"command": ["playlist-remove", idx], "request_id": 101}
|
|
|
|
|
resp = _send_ipc_command(cmd)
|
|
|
|
|
if resp and resp.get("error") == "success":
|
|
|
|
|
debug(f"Removed: {title}")
|
|
|
|
|
# Refresh items for listing
|
|
|
|
|
items = _get_playlist() or []
|
|
|
|
|
list_mode = True
|
|
|
|
|
index_arg = None
|
|
|
|
|
else:
|
|
|
|
|
debug(f"Failed to remove item: {resp.get('error') if resp else 'No response'}")
|
|
|
|
|
return 1
|
|
|
|
|
else:
|
|
|
|
|
# Play item
|
|
|
|
|
if hydrus_header and _is_hydrus_path(filename, hydrus_url):
|
|
|
|
|
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 198}
|
|
|
|
|
_send_ipc_command(header_cmd, silent=True)
|
|
|
|
|
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
|
|
|
|
resp = _send_ipc_command(cmd)
|
|
|
|
|
if resp and resp.get("error") == "success":
|
|
|
|
|
# Ensure playback starts (unpause)
|
|
|
|
|
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
|
|
|
|
_send_ipc_command(unpause_cmd)
|
|
|
|
|
|
|
|
|
|
debug(f"Playing: {title}")
|
|
|
|
|
|
|
|
|
|
# Monitor logs briefly for errors (e.g. ytdl failures)
|
|
|
|
|
_monitor_mpv_logs(3.0)
|
|
|
|
|
|
|
|
|
|
# Refresh playlist view so the user sees the new current item immediately
|
|
|
|
|
items = _get_playlist(silent=True) or items
|
|
|
|
|
list_mode = True
|
|
|
|
|
index_arg = None
|
|
|
|
|
else:
|
|
|
|
|
debug(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
|
|
|
|
|
return 1
|
|
|
|
|
except ValueError:
|
|
|
|
|
debug(f"Invalid index: {index_arg}")
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
# List items (Default action or after clear)
|
|
|
|
|
if list_mode or (index_arg is None and not url_arg):
|
|
|
|
|
if not items:
|
|
|
|
|
debug("MPV playlist is empty.")
|
|
|
|
|
return 0
|
|
|
|
|
|
2025-12-17 17:42:46 -08:00
|
|
|
if file_storage is None:
|
|
|
|
|
try:
|
|
|
|
|
from Store import Store
|
|
|
|
|
file_storage = Store(config)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr)
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
# Use the loaded playlist name if available, otherwise default
|
|
|
|
|
# Note: current_playlist_name is defined in the load_mode block if a playlist was loaded
|
|
|
|
|
try:
|
|
|
|
|
table_title = current_playlist_name or "MPV Playlist"
|
|
|
|
|
except NameError:
|
|
|
|
|
table_title = "MPV Playlist"
|
|
|
|
|
|
|
|
|
|
table = ResultTable(table_title, preserve_order=True)
|
|
|
|
|
|
|
|
|
|
# Convert MPV items to PipeObjects with proper hash and store
|
|
|
|
|
pipe_objects = []
|
|
|
|
|
for i, item in enumerate(items):
|
|
|
|
|
is_current = item.get("current", False)
|
|
|
|
|
title = _extract_title_from_item(item)
|
|
|
|
|
filename = item.get("filename", "")
|
|
|
|
|
|
|
|
|
|
# Extract the real path/URL from memory:// wrapper if present
|
|
|
|
|
real_path = _extract_target_from_memory_uri(filename) or filename
|
|
|
|
|
|
|
|
|
|
# Try to extract hash from the path/URL
|
|
|
|
|
file_hash = None
|
|
|
|
|
store_name = None
|
|
|
|
|
|
|
|
|
|
# Check if it's a Hydrus URL
|
|
|
|
|
if "get_files/file" in real_path or "hash=" in real_path:
|
|
|
|
|
# Extract hash from Hydrus URL
|
|
|
|
|
hash_match = re.search(r"hash=([0-9a-f]{64})", real_path.lower())
|
|
|
|
|
if hash_match:
|
|
|
|
|
file_hash = hash_match.group(1)
|
|
|
|
|
# Try to find which Hydrus instance has this file
|
|
|
|
|
if file_storage:
|
|
|
|
|
store_name = _find_hydrus_instance_for_hash(file_hash, file_storage)
|
|
|
|
|
if not store_name:
|
|
|
|
|
store_name = "hydrus"
|
|
|
|
|
# Check if it's a hash-based local file
|
|
|
|
|
elif real_path:
|
|
|
|
|
# Try to extract hash from filename (e.g., C:\path\1e8c46...a1b2.mp4)
|
|
|
|
|
path_obj = Path(real_path)
|
|
|
|
|
stem = path_obj.stem # filename without extension
|
|
|
|
|
if len(stem) == 64 and all(c in '0123456789abcdef' for c in stem.lower()):
|
|
|
|
|
file_hash = stem.lower()
|
|
|
|
|
# Find which folder store has this file
|
|
|
|
|
if file_storage:
|
|
|
|
|
for backend_name in file_storage.list_backends():
|
|
|
|
|
backend = file_storage[backend_name]
|
|
|
|
|
if type(backend).__name__ == "Folder":
|
|
|
|
|
# Check if this backend has the file
|
|
|
|
|
try:
|
|
|
|
|
result_path = backend.get_file(file_hash)
|
|
|
|
|
if isinstance(result_path, Path) and result_path.exists():
|
|
|
|
|
store_name = backend_name
|
|
|
|
|
break
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# Fallback to inferred store if we couldn't find it
|
|
|
|
|
if not store_name:
|
|
|
|
|
store_name = _infer_store_from_playlist_item(item, file_storage=file_storage)
|
|
|
|
|
|
|
|
|
|
# Build PipeObject with proper metadata
|
|
|
|
|
pipe_obj = PipeObject(
|
|
|
|
|
hash=file_hash or "unknown",
|
|
|
|
|
store=store_name or "unknown",
|
|
|
|
|
title=title,
|
|
|
|
|
path=real_path
|
|
|
|
|
)
|
|
|
|
|
pipe_objects.append(pipe_obj)
|
|
|
|
|
|
|
|
|
|
# Truncate title for display
|
|
|
|
|
display_title = title
|
|
|
|
|
if len(display_title) > 80:
|
|
|
|
|
display_title = display_title[:77] + "..."
|
|
|
|
|
|
|
|
|
|
row = table.add_row()
|
|
|
|
|
row.add_column("Current", "*" if is_current else "")
|
|
|
|
|
row.add_column("Store", store_name or "unknown")
|
|
|
|
|
row.add_column("Title", display_title)
|
|
|
|
|
|
|
|
|
|
table.set_row_selection_args(i, [str(i + 1)])
|
|
|
|
|
|
|
|
|
|
table.set_source_command(".pipe")
|
|
|
|
|
|
|
|
|
|
# Register PipeObjects (not raw MPV items) with pipeline context
|
|
|
|
|
ctx.set_last_result_table_overlay(table, pipe_objects)
|
|
|
|
|
ctx.set_current_stage_table(table)
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# In pipeline mode, the CLI renders current-stage tables; printing here duplicates output.
|
|
|
|
|
suppress_direct_print = bool(isinstance(config, dict) and config.get("_quiet_background_output"))
|
|
|
|
|
if not suppress_direct_print:
|
|
|
|
|
print(table)
|
2025-12-12 21:55:38 -08:00
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
finally:
|
|
|
|
|
if log_requested and isinstance(mpv_log_path, str) and mpv_log_path.strip():
|
|
|
|
|
try:
|
|
|
|
|
tail_lines = _tail_text_file(mpv_log_path, max_lines=160)
|
|
|
|
|
if tail_lines:
|
|
|
|
|
print("MPV log (tail):")
|
|
|
|
|
for ln in tail_lines:
|
|
|
|
|
print(ln)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
set_thread_stream(prev_stream)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
set_debug(prev_debug)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
if devnull_fh is not None:
|
|
|
|
|
devnull_fh.close()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_opts: Optional[Dict[str, Any]] = None) -> None:
|
|
|
|
|
"""Start MPV with a list of items."""
|
|
|
|
|
import time as _time_module
|
|
|
|
|
|
|
|
|
|
mpv = MPV()
|
|
|
|
|
mpv.kill_existing_windows()
|
|
|
|
|
_time_module.sleep(0.5) # Wait for process to die
|
|
|
|
|
|
|
|
|
|
hydrus_header = _build_hydrus_header(config or {})
|
|
|
|
|
ytdl_opts = _build_ytdl_options(config, hydrus_header)
|
|
|
|
|
|
2025-12-16 23:23:43 -08:00
|
|
|
cookies_path = None
|
|
|
|
|
try:
|
|
|
|
|
from tool.ytdlp import YtDlpTool
|
|
|
|
|
|
|
|
|
|
cookiefile = YtDlpTool(config or {}).resolve_cookiefile()
|
|
|
|
|
if cookiefile is not None:
|
|
|
|
|
cookies_path = str(cookiefile)
|
|
|
|
|
except Exception:
|
|
|
|
|
cookies_path = None
|
2025-12-12 21:55:38 -08:00
|
|
|
if cookies_path:
|
|
|
|
|
debug(f"Starting MPV with cookies file: {cookies_path.replace('\\', '/')}")
|
|
|
|
|
else:
|
|
|
|
|
debug("Starting MPV with browser cookies: chrome")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
extra_args: List[str] = [
|
|
|
|
|
'--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]',
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
# Optional: borderless window (useful for uosc-like overlay UI without fullscreen).
|
|
|
|
|
if start_opts and start_opts.get("borderless"):
|
|
|
|
|
extra_args.append("--border=no")
|
|
|
|
|
|
|
|
|
|
# Optional: mpv logging to file.
|
|
|
|
|
mpv_log_path = (start_opts or {}).get("mpv_log_path")
|
|
|
|
|
if isinstance(mpv_log_path, str) and mpv_log_path.strip():
|
|
|
|
|
extra_args.append(f"--log-file={mpv_log_path}")
|
|
|
|
|
extra_args.append("--msg-level=all=v")
|
|
|
|
|
|
|
|
|
|
# Always start MPV with the bundled Lua script via MPV class.
|
|
|
|
|
mpv.start(
|
|
|
|
|
extra_args=extra_args,
|
|
|
|
|
ytdl_raw_options=ytdl_opts,
|
|
|
|
|
http_header_fields=hydrus_header,
|
|
|
|
|
detached=True,
|
|
|
|
|
)
|
|
|
|
|
debug("Started MPV process")
|
|
|
|
|
|
|
|
|
|
# Wait for IPC pipe to be ready
|
|
|
|
|
if not mpv.wait_for_ipc(retries=20, delay_seconds=0.2):
|
|
|
|
|
debug("Timed out waiting for MPV IPC connection", file=sys.stderr)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Ensure Lua script is loaded (redundant when started with --script, but safe)
|
|
|
|
|
mpv.ensure_lua_loaded()
|
|
|
|
|
|
|
|
|
|
# Ensure lyric overlay is running (auto-discovery handled by MPV.lyric).
|
|
|
|
|
_ensure_lyric_overlay(mpv)
|
|
|
|
|
|
|
|
|
|
# Queue items via IPC
|
|
|
|
|
if items:
|
|
|
|
|
_queue_items(items, config=config, start_opts=start_opts)
|
|
|
|
|
|
|
|
|
|
# Auto-play the first item
|
|
|
|
|
import time
|
|
|
|
|
time.sleep(0.3) # Give MPV a moment to process the queued items
|
|
|
|
|
|
|
|
|
|
# Play the first item (index 0) and unpause
|
|
|
|
|
play_cmd = {"command": ["playlist-play-index", 0], "request_id": 102}
|
|
|
|
|
play_resp = _send_ipc_command(play_cmd, silent=True)
|
|
|
|
|
|
|
|
|
|
if play_resp and play_resp.get("error") == "success":
|
|
|
|
|
# Ensure playback starts (unpause)
|
|
|
|
|
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
|
|
|
|
_send_ipc_command(unpause_cmd, silent=True)
|
|
|
|
|
debug("Auto-playing first item")
|
|
|
|
|
|
|
|
|
|
# Overlay already started above; it will follow track changes automatically.
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CMDLET = Cmdlet(
|
|
|
|
|
name=".pipe",
|
|
|
|
|
alias=["pipe", "playlist", "queue", "ls-pipe"],
|
|
|
|
|
summary="Manage and play items in the MPV playlist via IPC",
|
|
|
|
|
usage=".pipe [index|url] [-current] [-clear] [-list] [-url URL] [-log] [-borderless]",
|
|
|
|
|
arg=[
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="index",
|
|
|
|
|
type="string", # Changed to string to allow URL detection
|
|
|
|
|
description="Index of item to play/clear, or URL to queue",
|
|
|
|
|
required=False
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="url",
|
|
|
|
|
type="string",
|
|
|
|
|
description="URL to queue",
|
|
|
|
|
required=False
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="clear",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Remove the selected item, or clear entire playlist if no index provided"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="list",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="List items (default)"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="play",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Resume playback"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="pause",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Pause playback"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="save",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Save current playlist to database"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="load",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="List saved playlists"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="current",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Emit the currently playing item to pipeline for further processing"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="log",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Enable pipeable debug output and write an mpv log file"
|
|
|
|
|
),
|
|
|
|
|
CmdletArg(
|
|
|
|
|
name="borderless",
|
|
|
|
|
type="flag",
|
|
|
|
|
description="Start mpv with no window border (uosc-like overlay feel without fullscreen)"
|
|
|
|
|
),
|
|
|
|
|
],
|
|
|
|
|
exec=_run
|
|
|
|
|
)
|
|
|
|
|
|