dfdkflj
This commit is contained in:
448
cmdnats/pipe.py
448
cmdnats/pipe.py
@@ -14,7 +14,7 @@ from helper.mpv_ipc import get_ipc_pipe_path, MPVIPCClient
|
||||
import pipeline as ctx
|
||||
from helper.download import is_url_supported_by_ytdlp
|
||||
|
||||
from helper.local_library import LocalLibrarySearchOptimizer
|
||||
from helper.folder_store import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
|
||||
from hydrus_health_check import get_cookies_file_path
|
||||
|
||||
@@ -35,6 +35,20 @@ def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional
|
||||
debug(f"IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def _is_mpv_running() -> bool:
|
||||
"""Check if MPV is currently running and accessible via IPC."""
|
||||
try:
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
client = MPVIPCClient(socket_path=ipc_pipe)
|
||||
if client.connect():
|
||||
client.disconnect()
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get the current playlist from MPV. Returns None if MPV is not running."""
|
||||
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
||||
@@ -87,8 +101,75 @@ def _extract_target_from_memory_uri(text: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _normalize_playlist_target(text: Optional[str]) -> Optional[str]:
|
||||
"""Normalize playlist entry targets for dedupe comparisons."""
|
||||
def _find_hydrus_instance_for_hash(hash_str: str, file_storage: Any) -> Optional[str]:
|
||||
"""Find which Hydrus instance serves a specific file hash.
|
||||
|
||||
Args:
|
||||
hash_str: SHA256 hash (64 hex chars)
|
||||
file_storage: FileStorage instance with Hydrus backends
|
||||
|
||||
Returns:
|
||||
Instance name (e.g., 'home') or None if not found
|
||||
"""
|
||||
# Query each Hydrus backend to see if it has this file
|
||||
for backend_name in file_storage.list_backends():
|
||||
backend = file_storage[backend_name]
|
||||
# Check if this is a Hydrus backend by checking class name
|
||||
backend_class = type(backend).__name__
|
||||
if backend_class != "HydrusNetwork":
|
||||
continue
|
||||
|
||||
try:
|
||||
# Query metadata to see if this instance has the file
|
||||
metadata = backend.get_metadata(hash_str)
|
||||
if metadata:
|
||||
return backend_name
|
||||
except Exception:
|
||||
# This instance doesn't have the file or had an error
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_hydrus_instance_by_url(url: str, file_storage: Any) -> Optional[str]:
|
||||
"""Find which Hydrus instance matches a given URL.
|
||||
|
||||
Args:
|
||||
url: Full URL (e.g., http://localhost:45869/get_files/file?hash=...)
|
||||
file_storage: FileStorage instance with Hydrus backends
|
||||
|
||||
Returns:
|
||||
Instance name (e.g., 'home') or None if not found
|
||||
"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed_target = urlparse(url)
|
||||
target_netloc = parsed_target.netloc.lower()
|
||||
|
||||
# Check each Hydrus backend's URL
|
||||
for backend_name in file_storage.list_backends():
|
||||
backend = file_storage[backend_name]
|
||||
backend_class = type(backend).__name__
|
||||
if backend_class != "HydrusNetwork":
|
||||
continue
|
||||
|
||||
# Get the backend's base URL from its client
|
||||
try:
|
||||
backend_url = backend._client.base_url
|
||||
parsed_backend = urlparse(backend_url)
|
||||
backend_netloc = parsed_backend.netloc.lower()
|
||||
|
||||
# Match by netloc (host:port)
|
||||
if target_netloc == backend_netloc:
|
||||
return backend_name
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
|
||||
"""Normalize playlist entry paths for dedupe comparisons."""
|
||||
if not text:
|
||||
return None
|
||||
real = _extract_target_from_memory_uri(text) or text
|
||||
@@ -118,8 +199,16 @@ def _normalize_playlist_target(text: Optional[str]) -> Optional[str]:
|
||||
return real.lower()
|
||||
|
||||
|
||||
def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
|
||||
"""Infer a friendly store label from an MPV playlist entry."""
|
||||
def _infer_store_from_playlist_item(item: Dict[str, Any], file_storage: Optional[Any] = None) -> str:
|
||||
"""Infer a friendly store label from an MPV playlist entry.
|
||||
|
||||
Args:
|
||||
item: MPV playlist item dict
|
||||
file_storage: Optional FileStorage instance for querying specific backend instances
|
||||
|
||||
Returns:
|
||||
Store label (e.g., 'home', 'work', 'local', 'youtube', etc.)
|
||||
"""
|
||||
name = item.get("filename") if isinstance(item, dict) else None
|
||||
target = str(name or "")
|
||||
|
||||
@@ -130,19 +219,33 @@ def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
|
||||
|
||||
# Hydrus hashes: bare 64-hex entries
|
||||
if re.fullmatch(r"[0-9a-f]{64}", target.lower()):
|
||||
# If we have file_storage, query each Hydrus instance to find which one has this hash
|
||||
if file_storage:
|
||||
hash_str = target.lower()
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
|
||||
lower = target.lower()
|
||||
if lower.startswith("magnet:"):
|
||||
return "magnet"
|
||||
if lower.startswith("hydrus://"):
|
||||
# Extract hash from hydrus:// URL if possible
|
||||
if file_storage:
|
||||
hash_match = re.search(r"[0-9a-f]{64}", target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(0)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
|
||||
# Windows / UNC paths
|
||||
if re.match(r"^[a-z]:[\\/]", target, flags=re.IGNORECASE) or target.startswith("\\\\"):
|
||||
return "local"
|
||||
|
||||
# file:// URLs
|
||||
# file:// url
|
||||
if lower.startswith("file://"):
|
||||
return "local"
|
||||
|
||||
@@ -162,9 +265,33 @@ def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
|
||||
return "soundcloud"
|
||||
if "bandcamp" in host_stripped:
|
||||
return "bandcamp"
|
||||
if "get_files" in path or host_stripped in {"127.0.0.1", "localhost"}:
|
||||
if "get_files" in path or "file?hash=" in path or host_stripped in {"127.0.0.1", "localhost"}:
|
||||
# Hydrus API URL - try to extract hash and find instance
|
||||
if file_storage:
|
||||
# Try to extract hash from URL parameters
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(1)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
# If no hash in URL, try matching the base URL to configured instances
|
||||
hydrus_instance = _find_hydrus_instance_by_url(target, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host_stripped) and "get_files" in path:
|
||||
# IP-based Hydrus URL
|
||||
if file_storage:
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(1)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
hydrus_instance = _find_hydrus_instance_by_url(target, file_storage)
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
|
||||
parts = host_stripped.split('.')
|
||||
@@ -231,15 +358,15 @@ def _build_ytdl_options(config: Optional[Dict[str, Any]], hydrus_header: Optiona
|
||||
return ",".join(opts) if opts else None
|
||||
|
||||
|
||||
def _is_hydrus_target(target: str, hydrus_url: Optional[str]) -> bool:
|
||||
if not target:
|
||||
def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
|
||||
if not path:
|
||||
return False
|
||||
lower = target.lower()
|
||||
lower = path.lower()
|
||||
if "hydrus://" in lower:
|
||||
return True
|
||||
parsed = urlparse(target)
|
||||
parsed = urlparse(path)
|
||||
host = (parsed.netloc or "").lower()
|
||||
path = parsed.path or ""
|
||||
path_part = parsed.path or ""
|
||||
if hydrus_url:
|
||||
try:
|
||||
hydrus_host = urlparse(hydrus_url).netloc.lower()
|
||||
@@ -247,9 +374,9 @@ def _is_hydrus_target(target: str, hydrus_url: Optional[str]) -> bool:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
if "get_files" in path or "file?hash=" in path:
|
||||
if "get_files" in path_part or "file?hash=" in path_part:
|
||||
return True
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path:
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path_part:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -313,6 +440,113 @@ def _monitor_mpv_logs(duration: float = 3.0) -> None:
|
||||
client.disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[Dict[str, Any]]) -> Optional[tuple[str, Optional[str]]]:
|
||||
"""Extract a playable path/URL from an item, handling different store types.
|
||||
|
||||
Args:
|
||||
item: Item to extract path from (dict, PipeObject, or string)
|
||||
file_storage: FileStorage instance for querying backends
|
||||
config: Config dict for Hydrus URL
|
||||
|
||||
Returns:
|
||||
Tuple of (path, title) or None if no valid path found
|
||||
"""
|
||||
path = None
|
||||
title = None
|
||||
store = None
|
||||
file_hash = None
|
||||
|
||||
# Extract fields from item - prefer a disk path ('path'), but accept 'url' as fallback for providers
|
||||
if isinstance(item, dict):
|
||||
# Support both canonical 'path' and legacy 'file_path' keys, and provider 'url' keys
|
||||
path = item.get("path") or item.get("file_path")
|
||||
# Fallbacks for provider-style entries where URL is stored in 'url' or 'source_url' or 'target'
|
||||
if not path:
|
||||
path = item.get("url") or item.get("source_url") or item.get("target")
|
||||
if not path:
|
||||
known = item.get("url") or item.get("url") or []
|
||||
if known and isinstance(known, list):
|
||||
path = known[0]
|
||||
title = item.get("title") or item.get("file_title")
|
||||
store = item.get("store") or item.get("storage") or item.get("storage_source") or item.get("origin")
|
||||
file_hash = item.get("hash") or item.get("file_hash") or item.get("hash_hex")
|
||||
elif hasattr(item, "path") or hasattr(item, "url") or hasattr(item, "source_url") or hasattr(item, "store") or hasattr(item, "hash"):
|
||||
# Handle PipeObject / dataclass objects - prefer path, but fall back to url/source_url attributes
|
||||
path = getattr(item, "path", None) or getattr(item, "file_path", None)
|
||||
if not path:
|
||||
path = getattr(item, "url", None) or getattr(item, "source_url", None) or getattr(item, "target", None)
|
||||
if not path:
|
||||
known = getattr(item, "url", None) or (getattr(item, "extra", None) or {}).get("url")
|
||||
if known and isinstance(known, list):
|
||||
path = known[0]
|
||||
title = getattr(item, "title", None) or getattr(item, "file_title", None)
|
||||
store = getattr(item, "store", None) or getattr(item, "origin", None)
|
||||
file_hash = getattr(item, "hash", None)
|
||||
elif isinstance(item, str):
|
||||
path = item
|
||||
|
||||
# Debug: show incoming values
|
||||
try:
|
||||
debug(f"_get_playable_path: store={store}, path={path}, hash={file_hash}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not path:
|
||||
return None
|
||||
|
||||
# If we have a store and hash, use store's .pipe() method if available
|
||||
# Skip this for URL-based providers (YouTube, SoundCloud, etc.) which have hash="unknown"
|
||||
# Also skip if path is already a URL (http/https)
|
||||
if store and file_hash and file_hash != "unknown" and file_storage:
|
||||
# Check if this is actually a URL - if so, just return it
|
||||
if path.startswith(("http://", "https://")):
|
||||
return (path, title)
|
||||
|
||||
try:
|
||||
backend = file_storage[store]
|
||||
# Check if backend has a .pipe() method
|
||||
if hasattr(backend, 'pipe') and callable(backend.pipe):
|
||||
pipe_path = backend.pipe(file_hash, config)
|
||||
if pipe_path:
|
||||
path = pipe_path
|
||||
debug(f"Got pipe path from {store} backend: {path}")
|
||||
except KeyError:
|
||||
# Store not found in file_storage - it could be a search provider (youtube, bandcamp, etc.)
|
||||
from helper.provider import get_search_provider
|
||||
try:
|
||||
provider = get_search_provider(store, config or {})
|
||||
if provider and hasattr(provider, 'pipe') and callable(provider.pipe):
|
||||
try:
|
||||
debug(f"Calling provider.pipe for '{store}' with path: {path}")
|
||||
provider_path = provider.pipe(path, config or {})
|
||||
debug(f"provider.pipe returned: {provider_path}")
|
||||
if provider_path:
|
||||
path = provider_path
|
||||
debug(f"Got pipe path from provider '{store}': {path}")
|
||||
except Exception as e:
|
||||
debug(f"Error in provider.pipe for '{store}': {e}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
debug(f"Error calling provider.pipe for '{store}': {e}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
debug(f"Error calling .pipe() on store '{store}': {e}", file=sys.stderr)
|
||||
|
||||
# As a fallback, if a provider exists for this store (e.g., youtube) and
|
||||
# this store is not part of FileStorage backends, call provider.pipe()
|
||||
if store and (not file_storage or store not in (file_storage.list_backends() if file_storage else [])):
|
||||
try:
|
||||
from helper.provider import get_search_provider
|
||||
provider = get_search_provider(store, config or {})
|
||||
if provider and hasattr(provider, 'pipe') and callable(provider.pipe):
|
||||
provider_path = provider.pipe(path, config or {})
|
||||
if provider_path:
|
||||
path = provider_path
|
||||
debug(f"Got pipe path from provider '{store}' (fallback): {path}")
|
||||
except Exception as e:
|
||||
debug(f"Error calling provider.pipe (fallback) for '{store}': {e}", file=sys.stderr)
|
||||
|
||||
return (path, title)
|
||||
|
||||
|
||||
def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""Queue items to MPV, starting it if necessary.
|
||||
|
||||
@@ -323,6 +557,12 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
Returns:
|
||||
True if MPV was started, False if items were queued via IPC.
|
||||
"""
|
||||
# Debug: print incoming items
|
||||
try:
|
||||
debug(f"_queue_items: count={len(items)} types={[type(i).__name__ for i in items]}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Just verify cookies are configured, don't try to set via IPC
|
||||
_ensure_ytdl_cookies()
|
||||
|
||||
@@ -333,6 +573,14 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
hydrus_url = get_hydrus_url(config) if config is not None else None
|
||||
except Exception:
|
||||
hydrus_url = None
|
||||
|
||||
# Initialize FileStorage for path resolution
|
||||
file_storage = None
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
file_storage = FileStorage(config or {})
|
||||
except Exception as e:
|
||||
debug(f"Warning: Could not initialize FileStorage: {e}", file=sys.stderr)
|
||||
|
||||
# Dedupe existing playlist before adding more (unless we're replacing it)
|
||||
existing_targets: set[str] = set()
|
||||
@@ -342,7 +590,7 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
for idx, pl_item in enumerate(playlist):
|
||||
fname = pl_item.get("filename") if isinstance(pl_item, dict) else str(pl_item)
|
||||
alt = pl_item.get("playlist-path") if isinstance(pl_item, dict) else None
|
||||
norm = _normalize_playlist_target(fname) or _normalize_playlist_target(alt)
|
||||
norm = _normalize_playlist_path(fname) or _normalize_playlist_path(alt)
|
||||
if not norm:
|
||||
continue
|
||||
if norm in existing_targets:
|
||||
@@ -360,25 +608,25 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
new_targets: set[str] = set()
|
||||
|
||||
for i, item in enumerate(items):
|
||||
# Extract URL/Path
|
||||
target = None
|
||||
title = None
|
||||
# Debug: show the item being processed
|
||||
try:
|
||||
debug(f"_queue_items: processing idx={i} type={type(item)} repr={repr(item)[:200]}")
|
||||
except Exception:
|
||||
pass
|
||||
# Extract URL/Path using store-aware logic
|
||||
result = _get_playable_path(item, file_storage, config)
|
||||
if not result:
|
||||
debug(f"_queue_items: item idx={i} produced no playable path")
|
||||
continue
|
||||
|
||||
if isinstance(item, dict):
|
||||
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||
title = item.get("title") or item.get("name")
|
||||
elif hasattr(item, "target"):
|
||||
target = item.target
|
||||
title = getattr(item, "title", None)
|
||||
elif isinstance(item, str):
|
||||
target = item
|
||||
target, title = result
|
||||
|
||||
if target:
|
||||
# If we just have a hydrus hash, build a direct file URL for MPV
|
||||
if re.fullmatch(r"[0-9a-f]{64}", str(target).strip().lower()) and hydrus_url:
|
||||
target = f"{hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
|
||||
|
||||
norm_key = _normalize_playlist_target(target) or str(target).strip().lower()
|
||||
norm_key = _normalize_playlist_path(target) or str(target).strip().lower()
|
||||
if norm_key in existing_targets or norm_key in new_targets:
|
||||
debug(f"Skipping duplicate playlist entry: {title or target}")
|
||||
continue
|
||||
@@ -386,11 +634,16 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
|
||||
# Check if it's a yt-dlp supported URL
|
||||
is_ytdlp = False
|
||||
if target.startswith("http") and is_url_supported_by_ytdlp(target):
|
||||
is_ytdlp = True
|
||||
# Treat any http(s) target as yt-dlp candidate. If the Python yt-dlp
|
||||
# module is available we also check more deeply, but default to True
|
||||
# so MPV can use its ytdl hooks for remote streaming sites.
|
||||
try:
|
||||
is_ytdlp = target.startswith("http") or is_url_supported_by_ytdlp(target)
|
||||
except Exception:
|
||||
is_ytdlp = target.startswith("http")
|
||||
|
||||
# Use memory:// M3U hack to pass title to MPV
|
||||
# Skip for yt-dlp URLs to ensure proper handling
|
||||
# Skip for yt-dlp url to ensure proper handling
|
||||
if title and not is_ytdlp:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
@@ -403,8 +656,8 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
if clear_first and i == 0:
|
||||
mode = "replace"
|
||||
|
||||
# If this is a Hydrus target, set header property and yt-dlp headers before loading
|
||||
if hydrus_header and _is_hydrus_target(target_to_send, hydrus_url):
|
||||
# If this is a Hydrus path, set header property and yt-dlp headers before loading
|
||||
if hydrus_header and _is_hydrus_path(target_to_send, hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 199}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
if ytdl_opts:
|
||||
@@ -412,11 +665,18 @@ def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[D
|
||||
_send_ipc_command(ytdl_cmd, silent=True)
|
||||
|
||||
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
||||
resp = _send_ipc_command(cmd)
|
||||
try:
|
||||
debug(f"Sending MPV loadfile: {target_to_send} mode={mode}")
|
||||
resp = _send_ipc_command(cmd)
|
||||
debug(f"MPV loadfile response: {resp}")
|
||||
except Exception as e:
|
||||
debug(f"Exception sending loadfile to MPV: {e}", file=sys.stderr)
|
||||
resp = None
|
||||
|
||||
if resp is None:
|
||||
# MPV not running (or died)
|
||||
# Start MPV with remaining items
|
||||
debug(f"MPV not running/died while queuing, starting MPV with remaining items: {items[i:]}")
|
||||
_start_mpv(items[i:], config=config)
|
||||
return True
|
||||
elif resp.get("error") == "success":
|
||||
@@ -435,6 +695,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
# Initialize FileStorage for detecting Hydrus instance names
|
||||
file_storage = None
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
file_storage = FileStorage(config)
|
||||
except Exception as e:
|
||||
debug(f"Warning: Could not initialize FileStorage: {e}", file=sys.stderr)
|
||||
|
||||
# Initialize mpv_started flag
|
||||
mpv_started = False
|
||||
|
||||
@@ -485,7 +753,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Emit the current item to pipeline
|
||||
result_obj = {
|
||||
'file_path': filename,
|
||||
'path': filename,
|
||||
'title': title,
|
||||
'cmdlet_name': '.pipe',
|
||||
'source': 'pipe',
|
||||
@@ -683,10 +951,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
items_to_add = result
|
||||
elif isinstance(result, dict):
|
||||
items_to_add = [result]
|
||||
|
||||
if _queue_items(items_to_add, config=config):
|
||||
else:
|
||||
# Handle PipeObject or any other object type
|
||||
items_to_add = [result]
|
||||
|
||||
# Debug: inspect incoming result and attributes
|
||||
try:
|
||||
debug(f"pipe._run: received result type={type(result)} repr={repr(result)[:200]}")
|
||||
debug(f"pipe._run: attrs path={getattr(result, 'path', None)} url={getattr(result, 'url', None)} store={getattr(result, 'store', None)} hash={getattr(result, 'hash', None)}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if items_to_add and _queue_items(items_to_add, config=config):
|
||||
mpv_started = True
|
||||
|
||||
|
||||
if items_to_add:
|
||||
# If we added items, we might want to play the first one if nothing is playing?
|
||||
# For now, just list the playlist
|
||||
@@ -760,7 +1038,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
else:
|
||||
# Play item
|
||||
if hydrus_header and _is_hydrus_target(filename, hydrus_url):
|
||||
if hydrus_header and _is_hydrus_path(filename, hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 198}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
||||
@@ -799,28 +1077,84 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except NameError:
|
||||
table_title = "MPV Playlist"
|
||||
|
||||
table = ResultTable(table_title)
|
||||
table = ResultTable(table_title, preserve_order=True)
|
||||
|
||||
# Convert MPV items to PipeObjects with proper hash and store
|
||||
pipe_objects = []
|
||||
for i, item in enumerate(items):
|
||||
is_current = item.get("current", False)
|
||||
title = _extract_title_from_item(item)
|
||||
store = _infer_store_from_playlist_item(item)
|
||||
|
||||
# Truncate if too long
|
||||
if len(title) > 80:
|
||||
title = title[:77] + "..."
|
||||
filename = item.get("filename", "")
|
||||
|
||||
# Extract the real path/URL from memory:// wrapper if present
|
||||
real_path = _extract_target_from_memory_uri(filename) or filename
|
||||
|
||||
# Try to extract hash from the path/URL
|
||||
file_hash = None
|
||||
store_name = None
|
||||
|
||||
# Check if it's a Hydrus URL
|
||||
if "get_files/file" in real_path or "hash=" in real_path:
|
||||
# Extract hash from Hydrus URL
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", real_path.lower())
|
||||
if hash_match:
|
||||
file_hash = hash_match.group(1)
|
||||
# Try to find which Hydrus instance has this file
|
||||
if file_storage:
|
||||
store_name = _find_hydrus_instance_for_hash(file_hash, file_storage)
|
||||
if not store_name:
|
||||
store_name = "hydrus"
|
||||
# Check if it's a hash-based local file
|
||||
elif real_path:
|
||||
# Try to extract hash from filename (e.g., C:\path\1e8c46...a1b2.mp4)
|
||||
path_obj = Path(real_path)
|
||||
stem = path_obj.stem # filename without extension
|
||||
if len(stem) == 64 and all(c in '0123456789abcdef' for c in stem.lower()):
|
||||
file_hash = stem.lower()
|
||||
# Find which folder store has this file
|
||||
if file_storage:
|
||||
for backend_name in file_storage.list_backends():
|
||||
backend = file_storage[backend_name]
|
||||
if type(backend).__name__ == "Folder":
|
||||
# Check if this backend has the file
|
||||
try:
|
||||
result_path = backend.get_file(file_hash)
|
||||
if result_path and result_path.exists():
|
||||
store_name = backend_name
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback to inferred store if we couldn't find it
|
||||
if not store_name:
|
||||
store_name = _infer_store_from_playlist_item(item, file_storage=file_storage)
|
||||
|
||||
# Build PipeObject with proper metadata
|
||||
from models import PipeObject
|
||||
pipe_obj = PipeObject(
|
||||
hash=file_hash or "unknown",
|
||||
store=store_name or "unknown",
|
||||
title=title,
|
||||
path=real_path
|
||||
)
|
||||
pipe_objects.append(pipe_obj)
|
||||
|
||||
# Truncate title for display
|
||||
display_title = title
|
||||
if len(display_title) > 80:
|
||||
display_title = display_title[:77] + "..."
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("Current", "*" if is_current else "")
|
||||
row.add_column("Store", store)
|
||||
row.add_column("Title", title)
|
||||
row.add_column("Store", store_name or "unknown")
|
||||
row.add_column("Title", display_title)
|
||||
|
||||
table.set_row_selection_args(i, [str(i + 1)])
|
||||
|
||||
table.set_source_command(".pipe")
|
||||
|
||||
# Register results with pipeline context so @N selection works
|
||||
ctx.set_last_result_table_overlay(table, items)
|
||||
# Register PipeObjects (not raw MPV items) with pipeline context
|
||||
ctx.set_last_result_table_overlay(table, pipe_objects)
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
@@ -889,16 +1223,30 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None) -> Non
|
||||
if items:
|
||||
_queue_items(items, config=config)
|
||||
|
||||
# Auto-play the first item
|
||||
import time
|
||||
time.sleep(0.3) # Give MPV a moment to process the queued items
|
||||
|
||||
# Play the first item (index 0) and unpause
|
||||
play_cmd = {"command": ["playlist-play-index", 0], "request_id": 102}
|
||||
play_resp = _send_ipc_command(play_cmd, silent=True)
|
||||
|
||||
if play_resp and play_resp.get("error") == "success":
|
||||
# Ensure playback starts (unpause)
|
||||
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
_send_ipc_command(unpause_cmd, silent=True)
|
||||
debug("Auto-playing first item")
|
||||
|
||||
except Exception as e:
|
||||
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".pipe",
|
||||
aliases=["pipe", "playlist", "queue", "ls-pipe"],
|
||||
alias=["pipe", "playlist", "queue", "ls-pipe"],
|
||||
summary="Manage and play items in the MPV playlist via IPC",
|
||||
usage=".pipe [index|url] [-current] [-clear] [-list] [-url URL]",
|
||||
args=[
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="index",
|
||||
type="string", # Changed to string to allow URL detection
|
||||
|
||||
Reference in New Issue
Block a user