This commit is contained in:
nose
2025-12-07 00:21:30 -08:00
parent f29709d951
commit 6b05dc5552
23 changed files with 2196 additions and 1133 deletions

View File

@@ -5,7 +5,7 @@ import platform
import socket
import re
import subprocess
from urllib.parse import urlparse
from urllib.parse import urlparse, parse_qs
from pathlib import Path
from cmdlets._shared import Cmdlet, CmdletArg, parse_cmdlet_args
from helper.logger import log, debug
@@ -87,6 +87,37 @@ def _extract_target_from_memory_uri(text: str) -> Optional[str]:
return None
def _normalize_playlist_target(text: Optional[str]) -> Optional[str]:
"""Normalize playlist entry targets for dedupe comparisons."""
if not text:
return None
real = _extract_target_from_memory_uri(text) or text
real = real.strip()
if not real:
return None
# If it's already a bare hydrus hash, use it directly
lower_real = real.lower()
if re.fullmatch(r"[0-9a-f]{64}", lower_real):
return lower_real
# If it's a hydrus file URL, normalize to the hash for dedupe
try:
parsed = urlparse(real)
if parsed.scheme in {"http", "https", "hydrus"}:
if parsed.path.endswith("/get_files/file"):
qs = parse_qs(parsed.query)
h = qs.get("hash", [None])[0]
if h and re.fullmatch(r"[0-9a-f]{64}", h.lower()):
return h.lower()
except Exception:
pass
# Normalize slashes for Windows paths and lowercase for comparison
real = real.replace('\\', '\\')
real = real.replace('\\', '\\')
return real.lower()
def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
"""Infer a friendly store label from an MPV playlist entry."""
name = item.get("filename") if isinstance(item, dict) else None
@@ -97,6 +128,10 @@ def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
if memory_target:
target = memory_target
# Hydrus hashes: bare 64-hex entries
if re.fullmatch(r"[0-9a-f]{64}", target.lower()):
return "hydrus"
lower = target.lower()
if lower.startswith("magnet:"):
return "magnet"
@@ -245,31 +280,36 @@ def _monitor_mpv_logs(duration: float = 3.0) -> None:
# Request log messages
client.send_command({"command": ["request_log_messages", "warn"]})
# On Windows named pipes, avoid blocking the CLI; skip log read entirely
if client.is_windows:
client.disconnect()
return
import time
start_time = time.time()
# Unix sockets already have timeouts set; read until duration expires
while time.time() - start_time < duration:
# We need to read raw lines from the socket
if client.is_windows:
try:
line = client.sock.readline()
if line:
try:
msg = json.loads(line)
if msg.get("event") == "log-message":
text = msg.get("text", "").strip()
prefix = msg.get("prefix", "")
level = msg.get("level", "")
if "ytdl" in prefix or level == "error":
debug(f"[MPV {prefix}] {text}", file=sys.stderr)
except json.JSONDecodeError:
pass
except Exception:
break
else:
# Unix socket handling (simplified)
try:
chunk = client.sock.recv(4096)
except socket.timeout:
continue
except Exception:
break
time.sleep(0.05)
if not chunk:
break
for line in chunk.decode("utf-8", errors="ignore").splitlines():
try:
msg = json.loads(line)
if msg.get("event") == "log-message":
text = msg.get("text", "").strip()
prefix = msg.get("prefix", "")
level = msg.get("level", "")
if "ytdl" in prefix or level == "error":
debug(f"[MPV {prefix}] {text}", file=sys.stderr)
except json.JSONDecodeError:
continue
client.disconnect()
except Exception:
pass
@@ -294,6 +334,31 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
except Exception:
hydrus_url = None
# Dedupe existing playlist before adding more (unless we're replacing it)
existing_targets: set[str] = set()
if not clear_first:
playlist = _get_playlist(silent=True) or []
dup_indexes: List[int] = []
for idx, pl_item in enumerate(playlist):
fname = pl_item.get("filename") if isinstance(pl_item, dict) else str(pl_item)
alt = pl_item.get("playlist-path") if isinstance(pl_item, dict) else None
norm = _normalize_playlist_target(fname) or _normalize_playlist_target(alt)
if not norm:
continue
if norm in existing_targets:
dup_indexes.append(idx)
else:
existing_targets.add(norm)
# Remove duplicates from playlist starting from the end to keep indices valid
for idx in reversed(dup_indexes):
try:
_send_ipc_command({"command": ["playlist-remove", idx], "request_id": 106}, silent=True)
except Exception:
pass
new_targets: set[str] = set()
for i, item in enumerate(items):
# Extract URL/Path
target = None
@@ -309,6 +374,16 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
target = item
if target:
# If we just have a hydrus hash, build a direct file URL for MPV
if re.fullmatch(r"[0-9a-f]{64}", str(target).strip().lower()) and hydrus_url:
target = f"{hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
norm_key = _normalize_playlist_target(target) or str(target).strip().lower()
if norm_key in existing_targets or norm_key in new_targets:
debug(f"Skipping duplicate playlist entry: {title or target}")
continue
new_targets.add(norm_key)
# Check if it's a yt-dlp supported URL
is_ytdlp = False
if target.startswith("http") and is_url_supported_by_ytdlp(target):
@@ -699,7 +774,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Monitor logs briefly for errors (e.g. ytdl failures)
_monitor_mpv_logs(3.0)
return 0
# Refresh playlist view so the user sees the new current item immediately
items = _get_playlist(silent=True) or items
list_mode = True
index_arg = None
else:
debug(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
return 1