d
This commit is contained in:
437
cmdnat/matrix.py
437
cmdnat/matrix.py
@@ -1,29 +1,436 @@
|
||||
from typing import Any, Dict, Sequence, List
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
import sys
|
||||
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
import tempfile
|
||||
import re
|
||||
import uuid
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from cmdlet._shared import Cmdlet, CmdletArg
|
||||
from SYS.logger import log, debug
|
||||
from result_table import ResultTable
|
||||
# REFACTOR: Commenting out Matrix import until provider refactor is complete
|
||||
from config import save_config, load_config
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
_MATRIX_PENDING_ITEMS_KEY = "matrix_pending_items"
|
||||
|
||||
|
||||
def _normalize_to_list(value: Any) -> List[Any]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def _extract_room_id(room_obj: Any) -> Optional[str]:
|
||||
try:
|
||||
# PipeObject stores unknown fields in .extra
|
||||
if hasattr(room_obj, "extra"):
|
||||
extra = getattr(room_obj, "extra")
|
||||
if isinstance(extra, dict):
|
||||
rid = extra.get("room_id")
|
||||
if isinstance(rid, str) and rid.strip():
|
||||
return rid.strip()
|
||||
# Dict fallback
|
||||
if isinstance(room_obj, dict):
|
||||
rid = room_obj.get("room_id")
|
||||
if isinstance(rid, str) and rid.strip():
|
||||
return rid.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _extract_file_path(item: Any) -> Optional[str]:
|
||||
"""Best-effort local file path extraction.
|
||||
|
||||
Returns a filesystem path string only if it exists.
|
||||
"""
|
||||
def _maybe_local_path(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, Path):
|
||||
candidate_path = value
|
||||
else:
|
||||
text = str(value).strip()
|
||||
if not text:
|
||||
return None
|
||||
# Treat URLs as not-local.
|
||||
if text.startswith("http://") or text.startswith("https://"):
|
||||
return None
|
||||
candidate_path = Path(text).expanduser()
|
||||
try:
|
||||
if candidate_path.exists():
|
||||
return str(candidate_path)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
try:
|
||||
if hasattr(item, "path"):
|
||||
found = _maybe_local_path(getattr(item, "path"))
|
||||
if found:
|
||||
return found
|
||||
if hasattr(item, "file_path"):
|
||||
found = _maybe_local_path(getattr(item, "file_path"))
|
||||
if found:
|
||||
return found
|
||||
if isinstance(item, dict):
|
||||
for key in ("path", "file_path", "target"):
|
||||
found = _maybe_local_path(item.get(key))
|
||||
if found:
|
||||
return found
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _extract_url(item: Any) -> Optional[str]:
|
||||
try:
|
||||
if hasattr(item, "url"):
|
||||
raw = getattr(item, "url")
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
return raw.strip()
|
||||
if isinstance(raw, (list, tuple)):
|
||||
for v in raw:
|
||||
if isinstance(v, str) and v.strip():
|
||||
return v.strip()
|
||||
if hasattr(item, "source_url"):
|
||||
raw = getattr(item, "source_url")
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
return raw.strip()
|
||||
if isinstance(item, dict):
|
||||
for key in ("url", "source_url", "path", "target"):
|
||||
raw = item.get(key)
|
||||
if isinstance(raw, str) and raw.strip() and raw.strip().startswith(("http://", "https://")):
|
||||
return raw.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
def _extract_sha256_hex(item: Any) -> Optional[str]:
|
||||
try:
|
||||
if hasattr(item, "hash"):
|
||||
h = getattr(item, "hash")
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
if isinstance(item, dict):
|
||||
h = item.get("hash")
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _extract_hash_from_hydrus_file_url(url: str) -> Optional[str]:
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
if not (parsed.path or "").endswith("/get_files/file"):
|
||||
return None
|
||||
qs = parse_qs(parsed.query or "")
|
||||
h = (qs.get("hash") or [None])[0]
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_download_hydrus_file(item: Any, config: Dict[str, Any], output_dir: Path) -> Optional[str]:
|
||||
"""If the item looks like a Hydrus file (hash + Hydrus URL), download it using Hydrus access key headers.
|
||||
|
||||
This avoids 401 from Hydrus when the URL is /get_files/file?hash=... without headers.
|
||||
"""
|
||||
try:
|
||||
from config import get_hydrus_access_key, get_hydrus_url
|
||||
from API.HydrusNetwork import HydrusNetwork as HydrusClient, download_hydrus_file
|
||||
|
||||
# Prefer per-item Hydrus instance name when it matches a configured instance.
|
||||
store_name = None
|
||||
if isinstance(item, dict):
|
||||
store_name = item.get("store")
|
||||
else:
|
||||
store_name = getattr(item, "store", None)
|
||||
store_name = str(store_name).strip() if store_name else ""
|
||||
|
||||
# Try the store name as instance key first; fallback to "home".
|
||||
instance_candidates = [s for s in [store_name.lower(), "home"] if s]
|
||||
hydrus_url = None
|
||||
access_key = None
|
||||
for inst in instance_candidates:
|
||||
access_key = (get_hydrus_access_key(config, inst) or "").strip() or None
|
||||
hydrus_url = (get_hydrus_url(config, inst) or "").strip() or None
|
||||
if access_key and hydrus_url:
|
||||
break
|
||||
|
||||
if not access_key or not hydrus_url:
|
||||
return None
|
||||
|
||||
url = _extract_url(item)
|
||||
file_hash = _extract_sha256_hex(item)
|
||||
if url and not file_hash:
|
||||
file_hash = _extract_hash_from_hydrus_file_url(url)
|
||||
|
||||
# If it doesn't look like a Hydrus file, skip.
|
||||
if not file_hash:
|
||||
return None
|
||||
|
||||
# Only treat it as Hydrus when we have a matching /get_files/file URL OR the item store suggests it.
|
||||
is_hydrus_url = False
|
||||
if url:
|
||||
parsed = urlparse(url)
|
||||
is_hydrus_url = (parsed.path or "").endswith("/get_files/file") and _extract_hash_from_hydrus_file_url(url) == file_hash
|
||||
hydrus_instances: set[str] = set()
|
||||
try:
|
||||
store_cfg = (config or {}).get("store") if isinstance(config, dict) else None
|
||||
if isinstance(store_cfg, dict):
|
||||
hydrus_cfg = store_cfg.get("hydrusnetwork")
|
||||
if isinstance(hydrus_cfg, dict):
|
||||
hydrus_instances = {str(k).strip().lower() for k in hydrus_cfg.keys() if str(k).strip()}
|
||||
except Exception:
|
||||
hydrus_instances = set()
|
||||
|
||||
store_hint = store_name.lower() in {"hydrus", "hydrusnetwork"} or (store_name.lower() in hydrus_instances)
|
||||
if not (is_hydrus_url or store_hint):
|
||||
return None
|
||||
|
||||
client = HydrusClient(url=hydrus_url, access_key=access_key, timeout=30.0)
|
||||
file_url = url if (url and is_hydrus_url) else client.file_url(file_hash)
|
||||
|
||||
# Best-effort extension from Hydrus metadata.
|
||||
suffix = ".hydrus"
|
||||
try:
|
||||
meta_response = client.fetch_file_metadata(hashes=[file_hash], include_mime=True)
|
||||
entries = meta_response.get("metadata") if isinstance(meta_response, dict) else None
|
||||
if isinstance(entries, list) and entries:
|
||||
entry = entries[0]
|
||||
if isinstance(entry, dict):
|
||||
ext = entry.get("ext")
|
||||
if isinstance(ext, str) and ext.strip():
|
||||
cleaned = ext.strip()
|
||||
if not cleaned.startswith("."):
|
||||
cleaned = "." + cleaned.lstrip(".")
|
||||
if len(cleaned) <= 12:
|
||||
suffix = cleaned
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
dest = output_dir / f"{file_hash}{suffix}"
|
||||
if dest.exists():
|
||||
# Avoid clobbering; pick a unique name.
|
||||
dest = output_dir / f"{file_hash}_{uuid.uuid4().hex[:10]}{suffix}"
|
||||
|
||||
headers = {"Hydrus-Client-API-Access-Key": access_key}
|
||||
download_hydrus_file(file_url, headers, dest, timeout=30.0)
|
||||
if dest.exists():
|
||||
return str(dest)
|
||||
except Exception as exc:
|
||||
debug(f"[matrix] Hydrus export failed: {exc}")
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_unlock_alldebrid_url(url: str, config: Dict[str, Any]) -> str:
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
host = (parsed.netloc or "").lower()
|
||||
if host != "alldebrid.com":
|
||||
return url
|
||||
if not (parsed.path or "").startswith("/f/"):
|
||||
return url
|
||||
|
||||
try:
|
||||
from Provider.alldebrid import _get_debrid_api_key # type: ignore
|
||||
|
||||
api_key = _get_debrid_api_key(config or {})
|
||||
except Exception:
|
||||
api_key = None
|
||||
if not api_key:
|
||||
return url
|
||||
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
client = AllDebridClient(str(api_key))
|
||||
unlocked = client.unlock_link(url)
|
||||
if isinstance(unlocked, str) and unlocked.strip():
|
||||
return unlocked.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return url
|
||||
|
||||
|
||||
def _resolve_upload_path(item: Any, config: Dict[str, Any]) -> Optional[str]:
|
||||
"""Resolve a usable local file path for uploading.
|
||||
|
||||
- Prefer existing local file paths.
|
||||
- Otherwise, if the item has an http(s) URL, download it to a temp directory.
|
||||
"""
|
||||
local = _extract_file_path(item)
|
||||
if local:
|
||||
return local
|
||||
|
||||
# If this is a Hydrus-backed item (e.g. /get_files/file?hash=...), download it with Hydrus headers.
|
||||
try:
|
||||
base_tmp = None
|
||||
if isinstance(config, dict):
|
||||
base_tmp = config.get("temp")
|
||||
output_dir = Path(str(base_tmp)).expanduser() if base_tmp else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
output_dir = output_dir / "matrix" / "hydrus"
|
||||
hydrus_path = _maybe_download_hydrus_file(item, config, output_dir)
|
||||
if hydrus_path:
|
||||
return hydrus_path
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
url = _extract_url(item)
|
||||
if not url:
|
||||
return None
|
||||
|
||||
# Best-effort: unlock AllDebrid file links (they require auth and aren't directly downloadable).
|
||||
url = _maybe_unlock_alldebrid_url(url, config)
|
||||
|
||||
try:
|
||||
from SYS.download import _download_direct_file
|
||||
|
||||
base_tmp = None
|
||||
if isinstance(config, dict):
|
||||
base_tmp = config.get("temp")
|
||||
output_dir = Path(str(base_tmp)).expanduser() if base_tmp else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
output_dir = output_dir / "matrix"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
result = _download_direct_file(url, output_dir, quiet=True)
|
||||
if result and hasattr(result, "path") and isinstance(result.path, Path) and result.path.exists():
|
||||
return str(result.path)
|
||||
except Exception as exc:
|
||||
debug(f"[matrix] Failed to download URL for upload: {exc}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# REFACTOR: Matrix cmdlet temporarily disabled during storage provider refactor
|
||||
log("⚠️ Matrix cmdlet is temporarily disabled during refactor", file=sys.stderr)
|
||||
return 1
|
||||
# Internal stage: send previously selected items to selected rooms.
|
||||
if any(str(a).lower() == "-send" for a in (args or [])):
|
||||
rooms = _normalize_to_list(result)
|
||||
room_ids: List[str] = []
|
||||
for r in rooms:
|
||||
rid = _extract_room_id(r)
|
||||
if rid:
|
||||
room_ids.append(rid)
|
||||
if not room_ids:
|
||||
log("No Matrix room selected (use @N on the rooms table)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
pending_items = ctx.load_value(_MATRIX_PENDING_ITEMS_KEY, default=[])
|
||||
items = _normalize_to_list(pending_items)
|
||||
if not items:
|
||||
log("No pending items to upload (use: @N | .matrix)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
from Provider.matrix import Matrix
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
log(f"Matrix not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
any_failed = False
|
||||
for rid in room_ids:
|
||||
for item in items:
|
||||
file_path = _resolve_upload_path(item, config)
|
||||
if not file_path:
|
||||
any_failed = True
|
||||
log("Matrix upload requires a local file (path) or a direct URL on the selected item", file=sys.stderr)
|
||||
continue
|
||||
try:
|
||||
link = provider.upload_to_room(file_path, rid)
|
||||
debug(f"✓ Sent {Path(file_path).name} -> {rid}")
|
||||
if link:
|
||||
log(link)
|
||||
except Exception as exc:
|
||||
any_failed = True
|
||||
log(f"Matrix send failed for {Path(file_path).name}: {exc}", file=sys.stderr)
|
||||
|
||||
# Clear pending items once we've attempted to send.
|
||||
ctx.store_value(_MATRIX_PENDING_ITEMS_KEY, [])
|
||||
return 1 if any_failed else 0
|
||||
|
||||
# Default stage: show rooms, then wait for @N selection to resume sending.
|
||||
selected_items = _normalize_to_list(result)
|
||||
if not selected_items:
|
||||
log("Usage: @N | .matrix (select items first, then pick a room)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
ctx.store_value(_MATRIX_PENDING_ITEMS_KEY, selected_items)
|
||||
|
||||
from Provider.matrix import Matrix
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
log(f"Matrix not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
rooms = provider.list_rooms()
|
||||
except Exception as exc:
|
||||
log(f"Failed to list Matrix rooms: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not rooms:
|
||||
log("No joined rooms found.", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
table = ResultTable("Matrix Rooms")
|
||||
table.set_table("matrix")
|
||||
table.set_source_command(".matrix", [])
|
||||
|
||||
for room in rooms:
|
||||
row = table.add_row()
|
||||
name = str(room.get("name") or "").strip() if isinstance(room, dict) else ""
|
||||
room_id = str(room.get("room_id") or "").strip() if isinstance(room, dict) else ""
|
||||
row.add_column("Name", name)
|
||||
row.add_column("Room", room_id)
|
||||
|
||||
# Make selection results clearer: stash a friendly title/store on the backing items.
|
||||
# This avoids confusion when the selection handler prints PipeObject debug info.
|
||||
room_items: List[Dict[str, Any]] = []
|
||||
for room in rooms:
|
||||
if not isinstance(room, dict):
|
||||
continue
|
||||
room_id = str(room.get("room_id") or "").strip()
|
||||
name = str(room.get("name") or "").strip()
|
||||
room_items.append(
|
||||
{
|
||||
**room,
|
||||
"store": "matrix",
|
||||
"title": name or room_id or "Matrix Room",
|
||||
}
|
||||
)
|
||||
|
||||
# Overlay table: user selects @N, then we resume with `.matrix -send`.
|
||||
ctx.set_last_result_table_overlay(table, room_items)
|
||||
ctx.set_current_stage_table(table)
|
||||
ctx.set_pending_pipeline_tail([[".matrix", "-send"]], ".matrix")
|
||||
|
||||
print()
|
||||
print(table.format_plain())
|
||||
print("\nSelect room(s) with @N (e.g. @1 or @1-3) to send the selected item(s)")
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".matrix",
|
||||
alias=["matrix", "rooms"],
|
||||
summary="List and select default Matrix room",
|
||||
usage=".matrix [selection]",
|
||||
summary="Send selected items to a Matrix room",
|
||||
usage="@N | .matrix",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="selection",
|
||||
type="string",
|
||||
description="Index or ID of the room to set as default",
|
||||
required=False
|
||||
)
|
||||
CmdletArg(name="send", type="bool", description="(internal) Send to selected room(s)", required=False),
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
|
||||
128
cmdnat/pipe.py
128
cmdnat/pipe.py
@@ -12,7 +12,6 @@ from SYS.logger import debug, get_thread_stream, is_debug_enabled, set_debug, se
|
||||
from result_table import ResultTable
|
||||
from MPV.mpv_ipc import MPV
|
||||
import pipeline as ctx
|
||||
from SYS.download import is_url_supported_by_ytdlp
|
||||
from models import PipeObject
|
||||
|
||||
from API.folder import LocalLibrarySearchOptimizer
|
||||
@@ -20,6 +19,78 @@ from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
|
||||
from hydrus_health_check import get_cookies_file_path
|
||||
|
||||
|
||||
_ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {}
|
||||
|
||||
|
||||
def _get_alldebrid_api_key(config: Optional[Dict[str, Any]]) -> Optional[str]:
|
||||
try:
|
||||
if not isinstance(config, dict):
|
||||
return None
|
||||
provider_cfg = config.get("provider")
|
||||
if not isinstance(provider_cfg, dict):
|
||||
return None
|
||||
ad_cfg = provider_cfg.get("alldebrid")
|
||||
if not isinstance(ad_cfg, dict):
|
||||
return None
|
||||
key = ad_cfg.get("api_key")
|
||||
if not isinstance(key, str):
|
||||
return None
|
||||
key = key.strip()
|
||||
return key or None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _is_alldebrid_protected_url(url: str) -> bool:
|
||||
try:
|
||||
if not isinstance(url, str):
|
||||
return False
|
||||
u = url.strip()
|
||||
if not u.startswith(("http://", "https://")):
|
||||
return False
|
||||
p = urlparse(u)
|
||||
host = (p.netloc or "").lower()
|
||||
path = p.path or ""
|
||||
# AllDebrid file page links (require auth; not directly streamable by mpv)
|
||||
return host == "alldebrid.com" and path.startswith("/f/")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _maybe_unlock_alldebrid_url(url: str, config: Optional[Dict[str, Any]]) -> str:
|
||||
"""Convert AllDebrid protected file URLs into direct streamable links.
|
||||
|
||||
When AllDebrid returns `https://alldebrid.com/f/...`, that URL typically requires
|
||||
authentication. MPV cannot access it without credentials. We transparently call
|
||||
the AllDebrid API `link/unlock` (using the configured API key) to obtain a direct
|
||||
URL that MPV can stream.
|
||||
"""
|
||||
if not _is_alldebrid_protected_url(url):
|
||||
return url
|
||||
|
||||
cached = _ALLDEBRID_UNLOCK_CACHE.get(url)
|
||||
if isinstance(cached, str) and cached:
|
||||
return cached
|
||||
|
||||
api_key = _get_alldebrid_api_key(config)
|
||||
if not api_key:
|
||||
return url
|
||||
|
||||
try:
|
||||
from API.alldebrid import AllDebridClient
|
||||
|
||||
client = AllDebridClient(api_key)
|
||||
unlocked = client.unlock_link(url)
|
||||
if isinstance(unlocked, str) and unlocked.strip():
|
||||
unlocked = unlocked.strip()
|
||||
_ALLDEBRID_UNLOCK_CACHE[url] = unlocked
|
||||
return unlocked
|
||||
except Exception as e:
|
||||
debug(f"AllDebrid unlock failed for MPV target: {e}", file=sys.stderr)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def _ensure_lyric_overlay(mpv: MPV) -> None:
|
||||
try:
|
||||
mpv.ensure_lyric_loader_running()
|
||||
@@ -621,6 +692,13 @@ def _queue_items(
|
||||
|
||||
target, title = result
|
||||
|
||||
# If the target is an AllDebrid protected file URL, unlock it to a direct link for MPV.
|
||||
try:
|
||||
if isinstance(target, str):
|
||||
target = _maybe_unlock_alldebrid_url(target, config)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Prefer per-item Hydrus instance credentials when the item belongs to a Hydrus store.
|
||||
effective_hydrus_url = hydrus_url
|
||||
effective_hydrus_header = hydrus_header
|
||||
@@ -665,21 +743,10 @@ def _queue_items(
|
||||
continue
|
||||
new_targets.add(norm_key)
|
||||
|
||||
# Check if it's a yt-dlp supported URL
|
||||
is_ytdlp = False
|
||||
# Treat any http(s) target as yt-dlp candidate. If the Python yt-dlp
|
||||
# module is available we also check more deeply, but default to True
|
||||
# so MPV can use its ytdl hooks for remote streaming sites.
|
||||
is_hydrus_target = _is_hydrus_path(str(target), effective_hydrus_url)
|
||||
try:
|
||||
# Hydrus direct file URLs should not be treated as yt-dlp targets.
|
||||
is_ytdlp = (not is_hydrus_target) and (target.startswith("http") or is_url_supported_by_ytdlp(target))
|
||||
except Exception:
|
||||
is_ytdlp = (not is_hydrus_target) and target.startswith("http")
|
||||
|
||||
# Use memory:// M3U hack to pass title to MPV
|
||||
# Skip for yt-dlp url to ensure proper handling
|
||||
if title and (is_hydrus_target or not is_ytdlp):
|
||||
# Use memory:// M3U hack to pass title to MPV.
|
||||
# This is especially important for remote URLs (e.g., YouTube) where MPV may otherwise
|
||||
# show the raw URL as the playlist title.
|
||||
if title:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
|
||||
@@ -703,8 +770,9 @@ def _queue_items(
|
||||
if clear_first and i == 0:
|
||||
mode = "replace"
|
||||
|
||||
# If this is a Hydrus path, set header property and yt-dlp headers before loading
|
||||
if effective_hydrus_header and _is_hydrus_path(target_to_send, effective_hydrus_url):
|
||||
# If this is a Hydrus path, set header property and yt-dlp headers before loading.
|
||||
# Use the real target (not the memory:// wrapper) for detection.
|
||||
if effective_hydrus_header and _is_hydrus_path(str(target), effective_hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", effective_hydrus_header], "request_id": 199}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
if effective_ytdl_opts:
|
||||
@@ -727,10 +795,8 @@ def _queue_items(
|
||||
_start_mpv(items[i:], config=config, start_opts=start_opts)
|
||||
return True
|
||||
elif resp.get("error") == "success":
|
||||
# Also set property for good measure
|
||||
if title:
|
||||
title_cmd = {"command": ["set_property", "force-media-title", title], "request_id": 201}
|
||||
_send_ipc_command(title_cmd)
|
||||
# Do not set `force-media-title` when queueing items. It's a global property and
|
||||
# would change the MPV window title even if the item isn't currently playing.
|
||||
debug(f"Queued: {title or target}")
|
||||
else:
|
||||
error_msg = str(resp.get('error'))
|
||||
@@ -1008,7 +1074,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
# In pipeline mode, the CLI renders current-stage tables; printing here duplicates output.
|
||||
suppress_direct_print = bool(isinstance(config, dict) and config.get("_quiet_background_output"))
|
||||
if not suppress_direct_print:
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
# Everything below was originally outside a try block; keep it inside so `start_opts` is in scope.
|
||||
@@ -1153,9 +1222,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
debug("MPV is starting up...")
|
||||
return 0
|
||||
|
||||
debug("MPV is not running. Starting new instance...")
|
||||
_start_mpv([], config=config, start_opts=start_opts)
|
||||
return 0
|
||||
# IPC is ready; continue without restarting MPV again.
|
||||
else:
|
||||
debug("MPV is not running. Starting new instance...")
|
||||
_start_mpv([], config=config, start_opts=start_opts)
|
||||
return 0
|
||||
|
||||
if not items:
|
||||
debug("MPV playlist is empty.")
|
||||
@@ -1314,7 +1385,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
ctx.set_last_result_table_overlay(table, pipe_objects)
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
# In pipeline mode, the CLI renders current-stage tables; printing here duplicates output.
|
||||
suppress_direct_print = bool(isinstance(config, dict) and config.get("_quiet_background_output"))
|
||||
if not suppress_direct_print:
|
||||
print(table)
|
||||
|
||||
return 0
|
||||
finally:
|
||||
|
||||
Reference in New Issue
Block a user