This commit is contained in:
2026-01-22 01:53:13 -08:00
parent b3e7f3e277
commit 33406a6ecf
17 changed files with 857 additions and 877 deletions

View File

@@ -4,6 +4,7 @@ import sys
import json
import socket
import re
from datetime import datetime
from urllib.parse import urlparse, parse_qs
from pathlib import Path
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args, resolve_tidal_manifest_path
@@ -13,8 +14,7 @@ from MPV.mpv_ipc import MPV
from SYS import pipeline as ctx
from SYS.models import PipeObject
from API.folder import LocalLibrarySearchOptimizer
from SYS.config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
from SYS.config import get_hydrus_access_key, get_hydrus_url
_ALLDEBRID_UNLOCK_CACHE: Dict[str,
str] = {}
@@ -27,6 +27,94 @@ def _repo_root() -> Path:
return Path(os.getcwd())
def _playlist_store_path() -> Path:
return _repo_root() / "mpv_playlists.json"
def _load_playlist_store(path: Path) -> Dict[str, Any]:
if not path.exists():
return {"next_id": 1, "playlists": []}
try:
data = json.loads(path.read_text(encoding="utf-8"))
if not isinstance(data, dict):
return {"next_id": 1, "playlists": []}
data.setdefault("next_id", 1)
data.setdefault("playlists", [])
if not isinstance(data["playlists"], list):
data["playlists"] = []
return data
except Exception:
return {"next_id": 1, "playlists": []}
def _save_playlist_store(path: Path, data: Dict[str, Any]) -> bool:
try:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(data, indent=2), encoding="utf-8")
return True
except Exception:
return False
def _save_playlist(name: str, items: List[Any]) -> bool:
path = _playlist_store_path()
data = _load_playlist_store(path)
playlists = data.get("playlists", [])
now = datetime.utcnow().isoformat(timespec="seconds") + "Z"
for pl in playlists:
if str(pl.get("name")).strip().lower() == str(name).strip().lower():
pl["items"] = list(items)
pl["updated_at"] = now
return _save_playlist_store(path, data)
new_id = int(data.get("next_id") or 1)
data["next_id"] = new_id + 1
playlists.append({
"id": new_id,
"name": name,
"items": list(items),
"updated_at": now,
})
data["playlists"] = playlists
return _save_playlist_store(path, data)
def _get_playlist_by_id(playlist_id: int) -> Optional[tuple[str, List[Any]]]:
data = _load_playlist_store(_playlist_store_path())
for pl in data.get("playlists", []):
try:
if int(pl.get("id")) == int(playlist_id):
return str(pl.get("name") or ""), list(pl.get("items") or [])
except Exception:
continue
return None
def _delete_playlist(playlist_id: int) -> bool:
path = _playlist_store_path()
data = _load_playlist_store(path)
playlists = data.get("playlists", [])
kept = []
removed = False
for pl in playlists:
try:
if int(pl.get("id")) == int(playlist_id):
removed = True
continue
except Exception:
pass
kept.append(pl)
data["playlists"] = kept
return _save_playlist_store(path, data) if removed else False
def _get_playlists() -> List[Dict[str, Any]]:
data = _load_playlist_store(_playlist_store_path())
playlists = data.get("playlists", [])
return [dict(pl) for pl in playlists if isinstance(pl, dict)]
def _repo_log_dir() -> Path:
d = _repo_root() / "Log"
try:
@@ -828,23 +916,8 @@ def _get_playable_path(
backend_class = type(backend).__name__
backend_target_resolved = True
# Folder stores: resolve to an on-disk file path.
if (hasattr(backend, "get_file") and callable(getattr(backend, "get_file"))
and backend_class == "Folder"):
try:
resolved = backend.get_file(file_hash)
if isinstance(resolved, Path):
path = str(resolved)
elif resolved is not None:
path = str(resolved)
except Exception as e:
debug(
f"Error resolving file path from store '{store}': {e}",
file=sys.stderr
)
# HydrusNetwork: build a playable API file URL without browser side-effects.
elif backend_class == "HydrusNetwork":
if backend_class == "HydrusNetwork":
try:
client = getattr(backend, "_client", None)
base_url = getattr(client, "url", None)
@@ -1367,58 +1440,38 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# If we save 'memory://...', it will work when loaded back.
clean_items.append(item)
# Use config from context or load it
config_data = config if config else {}
storage_path = get_local_storage_path(config_data) or _default_state_dir()
try:
Path(storage_path).mkdir(parents=True, exist_ok=True)
except Exception:
pass
with LocalLibrarySearchOptimizer(storage_path) as db:
if db.save_playlist(playlist_name, clean_items):
debug(f"Playlist saved as '{playlist_name}'")
return 0
else:
debug(f"Failed to save playlist '{playlist_name}'")
return 1
if _save_playlist(playlist_name, clean_items):
debug(f"Playlist saved as '{playlist_name}'")
return 0
debug(f"Failed to save playlist '{playlist_name}'")
return 1
# Handle Load Playlist
current_playlist_name = None
if load_mode:
# Use config from context or load it
config_data = config if config else {}
if index_arg:
try:
pl_id = int(index_arg)
storage_path = get_local_storage_path(config_data)
if not storage_path:
debug("Local storage path not configured.")
return 1
with LocalLibrarySearchOptimizer(storage_path) as db:
if index_arg:
try:
pl_id = int(index_arg)
# Handle Delete Playlist (if -clear is also passed)
if clear_mode:
if db.delete_playlist(pl_id):
debug(f"Playlist ID {pl_id} deleted.")
# Clear index_arg so we fall through to list mode and show updated list
index_arg = None
# Don't return, let it list the remaining playlists
else:
debug(f"Failed to delete playlist ID {pl_id}.")
return 1
# Handle Delete Playlist (if -clear is also passed)
if clear_mode:
if _delete_playlist(pl_id):
debug(f"Playlist ID {pl_id} deleted.")
# Clear index_arg so we fall through to list mode and show updated list
index_arg = None
# Don't return, let it list the remaining playlists
else:
# Handle Load Playlist
result = db.get_playlist_by_id(pl_id)
if result is None:
debug(f"Playlist ID {pl_id} not found.")
return 1
debug(f"Failed to delete playlist ID {pl_id}.")
return 1
else:
# Handle Load Playlist
result = _get_playlist_by_id(pl_id)
if result is None:
debug(f"Playlist ID {pl_id} not found.")
return 1
name, items = result
current_playlist_name = name
name, items = result
current_playlist_name = name
# Queue items (replacing current playlist)
if items:
@@ -1446,42 +1499,42 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
debug(f"Invalid playlist ID: {index_arg}")
return 1
# If we deleted or didn't have an index, list playlists
if not index_arg:
playlists = db.get_playlists()
# If we deleted or didn't have an index, list playlists
if not index_arg:
playlists = _get_playlists()
if not playlists:
debug("No saved playlists found.")
return 0
table = Table("Saved Playlists")
for i, pl in enumerate(playlists):
item_count = len(pl.get("items", []))
row = table.add_row()
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
row.add_column("Name", pl["name"])
row.add_column("Items", str(item_count))
row.add_column("Updated", pl["updated_at"])
# Set the playlist items as the result object for this row
# When user selects @N, they get the list of items
# We also set the source command to .pipe -load <ID> so it loads it
table.set_row_selection_args(i, ["-load", str(pl["id"])])
table.set_source_command(".mpv")
# Register results
ctx.set_last_result_table_overlay(
table,
[p["items"] for p in playlists]
)
ctx.set_current_stage_table(table)
# Do not print directly here.
# Both CmdletExecutor and PipelineExecutor render the current-stage/overlay table,
# so printing here would duplicate output.
if not playlists:
debug("No saved playlists found.")
return 0
table = Table("Saved Playlists")
for i, pl in enumerate(playlists):
item_count = len(pl.get("items", []))
row = table.add_row()
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
row.add_column("Name", pl["name"])
row.add_column("Items", str(item_count))
row.add_column("Updated", pl.get("updated_at") or "")
# Set the playlist items as the result object for this row
# When user selects @N, they get the list of items
# We also set the source command to .pipe -load <ID> so it loads it
table.set_row_selection_args(i, ["-load", str(pl["id"])])
table.set_source_command(".mpv")
# Register results
ctx.set_last_result_table_overlay(
table,
[p["items"] for p in playlists]
)
ctx.set_current_stage_table(table)
# Do not print directly here.
# Both CmdletExecutor and PipelineExecutor render the current-stage/overlay table,
# so printing here would duplicate output.
return 0
# Everything below was originally outside a try block; keep it inside so `start_opts` is in scope.
# Handle Play/Pause commands (but skip if we have index_arg to play a specific item)
@@ -1850,20 +1903,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
if len(stem) == 64 and all(c in "0123456789abcdef"
for c in stem.lower()):
file_hash = stem.lower()
# Find which folder store has this file
if file_storage:
for backend_name in file_storage.list_backends():
backend = file_storage[backend_name]
if type(backend).__name__ == "Folder":
# Check if this backend has the file
try:
result_path = backend.get_file(file_hash)
if isinstance(result_path,
Path) and result_path.exists():
store_name = backend_name
break
except Exception:
pass
# Fallback to inferred store if we couldn't find it
if not store_name: