This commit is contained in:
2026-03-25 22:39:30 -07:00
parent c31402c8f1
commit 562acd809c
46 changed files with 2367 additions and 1868 deletions

View File

@@ -23,6 +23,15 @@ _ALLDEBRID_UNLOCK_CACHE: Dict[str,
str] = {}
_NOTES_PREFETCH_INFLIGHT: set[str] = set()
_NOTES_PREFETCH_LOCK = threading.Lock()
_PLAYLIST_STORE_CACHE: Optional[Dict[str, Any]] = None
_PLAYLIST_STORE_MTIME_NS: Optional[int] = None
_SHA256_RE = re.compile(r"[0-9a-f]{64}")
_SHA256_FULL_RE = re.compile(r"^[0-9a-f]{64}$")
_EXTINF_TITLE_RE = re.compile(r"#EXTINF:-1,(.*?)(?:\n|\r|$)")
_WINDOWS_PATH_RE = re.compile(r"^[a-z]:[\\/]", flags=re.IGNORECASE)
_HASH_QUERY_RE = re.compile(r"hash=([0-9a-f]{64})")
_IPV4_RE = re.compile(r"^\d+\.\d+\.\d+\.\d+$")
_MPD_PATH_RE = re.compile(r"\.mpd($|\?)")
def _repo_root() -> Path:
@@ -36,26 +45,56 @@ def _playlist_store_path() -> Path:
return _repo_root() / "mpv_playlists.json"
def _load_playlist_store(path: Path) -> Dict[str, Any]:
if not path.exists():
return {"next_id": 1, "playlists": []}
def _new_playlist_store() -> Dict[str, Any]:
return {"next_id": 1, "playlists": []}
def _normalize_playlist_store(data: Any) -> Dict[str, Any]:
if not isinstance(data, dict):
return _new_playlist_store()
normalized = dict(data)
try:
data = json.loads(path.read_text(encoding="utf-8"))
if not isinstance(data, dict):
return {"next_id": 1, "playlists": []}
data.setdefault("next_id", 1)
data.setdefault("playlists", [])
if not isinstance(data["playlists"], list):
data["playlists"] = []
next_id = int(normalized.get("next_id") or 1)
except Exception:
next_id = 1
normalized["next_id"] = max(next_id, 1)
playlists = normalized.get("playlists")
normalized["playlists"] = playlists if isinstance(playlists, list) else []
return normalized
def _load_playlist_store(path: Path) -> Dict[str, Any]:
global _PLAYLIST_STORE_CACHE, _PLAYLIST_STORE_MTIME_NS
if not path.exists():
_PLAYLIST_STORE_CACHE = _new_playlist_store()
_PLAYLIST_STORE_MTIME_NS = None
return _PLAYLIST_STORE_CACHE
try:
current_mtime_ns = path.stat().st_mtime_ns
if (_PLAYLIST_STORE_CACHE is not None and
_PLAYLIST_STORE_MTIME_NS == current_mtime_ns):
return _PLAYLIST_STORE_CACHE
data = _normalize_playlist_store(json.loads(path.read_text(encoding="utf-8")))
_PLAYLIST_STORE_CACHE = data
_PLAYLIST_STORE_MTIME_NS = current_mtime_ns
return data
except Exception:
return {"next_id": 1, "playlists": []}
_PLAYLIST_STORE_CACHE = _new_playlist_store()
_PLAYLIST_STORE_MTIME_NS = None
return _PLAYLIST_STORE_CACHE
def _save_playlist_store(path: Path, data: Dict[str, Any]) -> bool:
global _PLAYLIST_STORE_CACHE, _PLAYLIST_STORE_MTIME_NS
try:
normalized = _normalize_playlist_store(data)
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(data, indent=2), encoding="utf-8")
path.write_text(json.dumps(normalized, indent=2), encoding="utf-8")
_PLAYLIST_STORE_CACHE = normalized
_PLAYLIST_STORE_MTIME_NS = path.stat().st_mtime_ns
return True
except Exception:
return False
@@ -559,7 +598,7 @@ def _extract_store_and_hash(item: Any) -> tuple[Optional[str], Optional[str]]:
else:
text = getattr(item, "path", None) or getattr(item, "url", None)
if text:
m = re.search(r"[0-9a-f]{64}", str(text).lower())
m = _SHA256_RE.search(str(text).lower())
if m:
file_hash = m.group(0)
except Exception:
@@ -707,7 +746,7 @@ def _extract_title_from_item(item: Dict[str, Any]) -> str:
try:
# Extract title from #EXTINF:-1,Title
# Use regex to find title between #EXTINF:-1, and newline
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
match = _EXTINF_TITLE_RE.search(filename)
if match:
extracted_title = match.group(1).strip()
if not title or title == "memory://":
@@ -817,7 +856,7 @@ def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
return None
# If it's already a bare hydrus hash, use it directly
lower_real = real.lower()
if re.fullmatch(r"[0-9a-f]{64}", lower_real):
if _SHA256_FULL_RE.fullmatch(lower_real):
return lower_real
# If it's a hydrus file URL, normalize to the hash for dedupe
@@ -829,7 +868,7 @@ def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
if parsed.path.endswith("/get_files/file"):
qs = parse_qs(parsed.query)
h = qs.get("hash", [None])[0]
if h and re.fullmatch(r"[0-9a-f]{64}", h.lower()):
if h and _SHA256_FULL_RE.fullmatch(h.lower()):
return h.lower()
except Exception:
pass
@@ -862,7 +901,7 @@ def _infer_store_from_playlist_item(
target = memory_target
# Hydrus hashes: bare 64-hex entries
if re.fullmatch(r"[0-9a-f]{64}", target.lower()):
if _SHA256_FULL_RE.fullmatch(target.lower()):
# If we have file_storage, query each Hydrus instance to find which one has this hash
if file_storage:
hash_str = target.lower()
@@ -877,7 +916,7 @@ def _infer_store_from_playlist_item(
if lower.startswith("hydrus://"):
# Extract hash from hydrus:// URL if possible
if file_storage:
hash_match = re.search(r"[0-9a-f]{64}", target.lower())
hash_match = _SHA256_RE.search(target.lower())
if hash_match:
hash_str = hash_match.group(0)
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
@@ -886,9 +925,7 @@ def _infer_store_from_playlist_item(
return "hydrus"
# Windows / UNC paths
if re.match(r"^[a-z]:[\\/]",
target,
flags=re.IGNORECASE) or target.startswith("\\\\"):
if _WINDOWS_PATH_RE.match(target) or target.startswith("\\\\"):
return "local"
# file:// url
@@ -918,7 +955,7 @@ def _infer_store_from_playlist_item(
# Hydrus API URL - try to extract hash and find instance
if file_storage:
# Try to extract hash from URL parameters
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
hash_match = _HASH_QUERY_RE.search(target.lower())
if hash_match:
hash_str = hash_match.group(1)
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
@@ -929,10 +966,10 @@ def _infer_store_from_playlist_item(
if hydrus_instance:
return hydrus_instance
return "hydrus"
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host_stripped) and "get_files" in path:
if _IPV4_RE.match(host_stripped) and "get_files" in path:
# IP-based Hydrus URL
if file_storage:
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
hash_match = _HASH_QUERY_RE.search(target.lower())
if hash_match:
hash_str = hash_match.group(1)
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
@@ -1002,7 +1039,7 @@ def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
pass
if "get_files" in path_part or "file?hash=" in path_part:
return True
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path_part:
if _IPV4_RE.match(host) and "get_files" in path_part:
return True
return False
@@ -1493,7 +1530,7 @@ def _queue_items(
# Set it via IPC before loadfile so the currently running MPV can play the manifest.
try:
target_str = str(target or "")
if re.search(r"\.mpd($|\?)", target_str.lower()):
if _MPD_PATH_RE.search(target_str.lower()):
_send_ipc_command(
{
"command": [
@@ -1556,8 +1593,9 @@ def _queue_items(
if target:
# If we just have a hydrus hash, build a direct file URL for MPV
if re.fullmatch(r"[0-9a-f]{64}",
str(target).strip().lower()) and effective_hydrus_url:
if _SHA256_FULL_RE.fullmatch(
str(target).strip().lower()
) and effective_hydrus_url:
target = (
f"{effective_hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
)
@@ -2337,7 +2375,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# Check if it's a Hydrus URL
if "get_files/file" in real_path or "hash=" in real_path:
# Extract hash from Hydrus URL
hash_match = re.search(r"hash=([0-9a-f]{64})", real_path.lower())
hash_match = _HASH_QUERY_RE.search(real_path.lower())
if hash_match:
file_hash = hash_match.group(1)
# Try to find which Hydrus instance has this file
@@ -2576,7 +2614,7 @@ def _start_mpv(
candidate = it.get("path") or it.get("url")
else:
candidate = getattr(it, "path", None) or getattr(it, "url", None)
if candidate and re.search(r"\.mpd($|\?)", str(candidate).lower()):
if candidate and _MPD_PATH_RE.search(str(candidate).lower()):
needs_mpd_whitelist = True
break
if needs_mpd_whitelist: