fix lyrics

This commit is contained in:
2026-03-16 02:57:00 -07:00
parent 4dd7556e85
commit 11f03cae3e
3 changed files with 364 additions and 15 deletions

View File

@@ -92,7 +92,7 @@
"(hitfile\\.net/[a-z0-9A-Z]{4,9})" "(hitfile\\.net/[a-z0-9A-Z]{4,9})"
], ],
"regexp": "(hitf\\.(to|cc)/([a-z0-9A-Z]{4,9}))|(htfl\\.(net|to|cc)/([a-z0-9A-Z]{4,9}))|(hitfile\\.(net)/download/free/([a-z0-9A-Z]{4,9}))|((hitfile\\.net/[a-z0-9A-Z]{4,9}))", "regexp": "(hitf\\.(to|cc)/([a-z0-9A-Z]{4,9}))|(htfl\\.(net|to|cc)/([a-z0-9A-Z]{4,9}))|(hitfile\\.(net)/download/free/([a-z0-9A-Z]{4,9}))|((hitfile\\.net/[a-z0-9A-Z]{4,9}))",
"status": false "status": true
}, },
"mega": { "mega": {
"name": "mega", "name": "mega",
@@ -595,7 +595,7 @@
"(simfileshare\\.net/download/[0-9]+/)" "(simfileshare\\.net/download/[0-9]+/)"
], ],
"regexp": "(simfileshare\\.net/download/[0-9]+/)", "regexp": "(simfileshare\\.net/download/[0-9]+/)",
"status": true "status": false
}, },
"streamtape": { "streamtape": {
"name": "streamtape", "name": "streamtape",

View File

@@ -26,6 +26,7 @@ from __future__ import annotations
import argparse import argparse
import bisect import bisect
import hashlib import hashlib
import json
import os import os
import re import re
import sys import sys
@@ -66,6 +67,10 @@ _ITEM_HASH_PROP = "user-data/medeia-item-hash"
_OSD_STYLE_SAVED: Optional[Dict[str, Any]] = None _OSD_STYLE_SAVED: Optional[Dict[str, Any]] = None
_OSD_STYLE_APPLIED: bool = False _OSD_STYLE_APPLIED: bool = False
_NOTES_CACHE_VERSION = 1
_DEFAULT_NOTES_CACHE_TTL_S = 900.0
_DEFAULT_NOTES_CACHE_WAIT_S = 1.5
_DEFAULT_NOTES_PENDING_WAIT_S = 12.0
def _single_instance_lock_path(ipc_path: str) -> Path: def _single_instance_lock_path(ipc_path: str) -> Path:
@@ -571,6 +576,204 @@ def _load_config_best_effort() -> dict:
return {} return {}
def _cache_float_config(config: Optional[dict], key: str, default: float) -> float:
try:
raw = (config or {}).get(key)
if raw is None:
return float(default)
value = float(raw)
if value < 0:
return 0.0
return value
except Exception:
return float(default)
def _notes_cache_root() -> Path:
root = Path(tempfile.gettempdir()) / "medeia-mpv-notes" / "cache"
root.mkdir(parents=True, exist_ok=True)
return root
def _notes_cache_key(store: str, file_hash: str) -> str:
return hashlib.sha1(
f"{str(store or '').strip().lower()}:{str(file_hash or '').strip().lower()}".encode(
"utf-8",
errors="ignore",
)
).hexdigest()
def _notes_cache_path(store: str, file_hash: str) -> Path:
return (_notes_cache_root() / f"notes-{_notes_cache_key(store, file_hash)}.json").resolve()
def _notes_pending_path(store: str, file_hash: str) -> Path:
return (_notes_cache_root() / f"notes-{_notes_cache_key(store, file_hash)}.pending").resolve()
def _normalize_notes_payload(notes: Any) -> Dict[str, str]:
if not isinstance(notes, dict):
return {}
return {
str(k): str(v or "")
for k, v in notes.items()
if str(k).strip()
}
def load_cached_notes(
store: Optional[str],
file_hash: Optional[str],
*,
config: Optional[dict] = None,
) -> Optional[Dict[str, str]]:
if not store or not file_hash:
return None
path = _notes_cache_path(str(store), str(file_hash))
if not path.exists():
return None
ttl_s = _cache_float_config(config, "lyric_notes_cache_ttl_seconds", _DEFAULT_NOTES_CACHE_TTL_S)
if ttl_s > 0:
try:
age_s = max(0.0, time.time() - float(path.stat().st_mtime))
if age_s > ttl_s:
return None
except Exception:
return None
try:
payload = json.loads(path.read_text(encoding="utf-8", errors="replace"))
except Exception:
return None
if not isinstance(payload, dict):
return None
if int(payload.get("version") or 0) != _NOTES_CACHE_VERSION:
return None
return _normalize_notes_payload(payload.get("notes"))
def store_cached_notes(
store: Optional[str],
file_hash: Optional[str],
notes: Any,
) -> bool:
if not store or not file_hash:
return False
normalized = _normalize_notes_payload(notes)
path = _notes_cache_path(str(store), str(file_hash))
tmp_path = path.with_suffix(".tmp")
payload = {
"version": _NOTES_CACHE_VERSION,
"saved_at": time.time(),
"store": str(store),
"hash": str(file_hash),
"notes": normalized,
}
try:
path.parent.mkdir(parents=True, exist_ok=True)
tmp_path.write_text(
json.dumps(payload, ensure_ascii=False, indent=2),
encoding="utf-8",
errors="replace",
)
tmp_path.replace(path)
return True
except Exception:
return False
def set_notes_prefetch_pending(
store: Optional[str],
file_hash: Optional[str],
pending: bool,
) -> None:
if not store or not file_hash:
return
path = _notes_pending_path(str(store), str(file_hash))
if pending:
try:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(str(time.time()), encoding="utf-8", errors="replace")
except Exception:
return
return
try:
if path.exists():
path.unlink()
except Exception:
return
def is_notes_prefetch_pending(
store: Optional[str],
file_hash: Optional[str],
*,
stale_after_s: float = 60.0,
) -> bool:
if not store or not file_hash:
return False
path = _notes_pending_path(str(store), str(file_hash))
if not path.exists():
return False
try:
age_s = max(0.0, time.time() - float(path.stat().st_mtime))
if stale_after_s > 0 and age_s > stale_after_s:
path.unlink(missing_ok=True)
return False
except Exception:
return False
return True
def _infer_artist_title_from_mpv(client: MPVIPCClient) -> tuple[Optional[str], Optional[str]]:
artist = None
title = None
artist_keys = [
"metadata/by-key/artist",
"metadata/by-key/Artist",
"metadata/by-key/album_artist",
"metadata/by-key/ALBUMARTIST",
]
title_keys = [
"metadata/by-key/title",
"metadata/by-key/Title",
"media-title",
]
for key in artist_keys:
try:
value = _ipc_get_property(client, key, None)
except Exception:
value = None
artist = _sanitize_query(str(value) if isinstance(value, str) else None)
if artist:
break
for key in title_keys:
try:
value = _ipc_get_property(client, key, None)
except Exception:
value = None
title = _sanitize_query(str(value) if isinstance(value, str) else None)
if title:
break
return artist, title
def _extract_note_text(notes: Dict[str, str], name: str) -> Optional[str]: def _extract_note_text(notes: Dict[str, str], name: str) -> Optional[str]:
"""Return stripped text from the note named *name*, or None if absent or blank.""" """Return stripped text from the note named *name*, or None if absent or blank."""
if not isinstance(notes, dict) or not notes: if not isinstance(notes, dict) or not notes:
@@ -874,6 +1077,8 @@ class _PlaybackState:
last_target: Optional[str] = None last_target: Optional[str] = None
fetch_attempt_key: Optional[str] = None fetch_attempt_key: Optional[str] = None
fetch_attempt_at: float = 0.0 fetch_attempt_at: float = 0.0
cache_wait_key: Optional[str] = None
cache_wait_started_at: float = 0.0
def clear(self, client: MPVIPCClient, *, clear_hash: bool = True) -> None: def clear(self, client: MPVIPCClient, *, clear_hash: bool = True) -> None:
"""Reset backend resolution and clean up any active OSD / external subtitle. """Reset backend resolution and clean up any active OSD / external subtitle.
@@ -889,6 +1094,8 @@ class _PlaybackState:
self.file_hash = None self.file_hash = None
self.entries = [] self.entries = []
self.times = [] self.times = []
self.cache_wait_key = None
self.cache_wait_started_at = 0.0
if self.loaded_key is not None: if self.loaded_key is not None:
_osd_clear_and_restore(client) _osd_clear_and_restore(client)
self.loaded_key = None self.loaded_key = None
@@ -1035,6 +1242,8 @@ def run_auto_overlay(
state.store_name = None state.store_name = None
state.backend = None state.backend = None
state.key = None state.key = None
state.cache_wait_key = None
state.cache_wait_started_at = 0.0
if store_override and (not hash_override or hash_override == state.file_hash): if store_override and (not hash_override or hash_override == state.file_hash):
reg = _make_registry() reg = _make_registry()
@@ -1159,11 +1368,51 @@ def run_auto_overlay(
and state.file_hash and state.file_hash
and state.backend and state.backend
): ):
notes: Dict[str, str] = {} notes: Optional[Dict[str, str]] = None
cache_wait_s = _cache_float_config(
cfg,
"lyric_notes_cache_wait_seconds",
_DEFAULT_NOTES_CACHE_WAIT_S,
)
pending_wait_s = _cache_float_config(
cfg,
"lyric_notes_pending_wait_seconds",
_DEFAULT_NOTES_PENDING_WAIT_S,
)
try: try:
notes = state.backend.get_note(state.file_hash, config=cfg) or {} notes = load_cached_notes(state.store_name, state.file_hash, config=cfg)
except Exception: except Exception:
notes = {} notes = None
if notes is None:
now = time.time()
if state.cache_wait_key != state.key:
state.cache_wait_key = state.key
state.cache_wait_started_at = now
pending = is_notes_prefetch_pending(state.store_name, state.file_hash)
waited_s = max(0.0, now - float(state.cache_wait_started_at or now))
if pending and waited_s < pending_wait_s:
time.sleep(min(max(poll_s, 0.05), 0.2))
continue
if waited_s < cache_wait_s:
time.sleep(min(max(poll_s, 0.05), 0.2))
continue
try:
notes = state.backend.get_note(state.file_hash, config=cfg) or {}
except Exception:
notes = {}
try:
store_cached_notes(state.store_name, state.file_hash, notes)
except Exception:
pass
state.cache_wait_key = None
state.cache_wait_started_at = 0.0
try: try:
_log( _log(
@@ -1217,21 +1466,21 @@ def run_auto_overlay(
state.fetch_attempt_key = state.key state.fetch_attempt_key = state.key
state.fetch_attempt_at = now state.fetch_attempt_at = now
artist: Optional[str] = None artist, title = _infer_artist_title_from_mpv(client)
title: Optional[str] = None
duration_s: Optional[float] = None duration_s: Optional[float] = None
try: try:
duration_s = _ipc_get_property(client, "duration", None) duration_s = _ipc_get_property(client, "duration", None)
except Exception: except Exception:
pass pass
try: if not artist or not title:
tags, _src = state.backend.get_tag(state.file_hash, config=cfg) try:
if isinstance(tags, list): tags, _src = state.backend.get_tag(state.file_hash, config=cfg)
artist, title = _infer_artist_title_from_tags( if isinstance(tags, list):
[str(x) for x in tags] artist, title = _infer_artist_title_from_tags(
) [str(x) for x in tags]
except Exception: )
pass except Exception:
pass
_log( _log(
f"Autofetch query artist={artist!r} title={title!r}" f"Autofetch query artist={artist!r} title={title!r}"

View File

@@ -5,6 +5,7 @@ import sys
import json import json
import socket import socket
import re import re
import threading
from datetime import datetime, timedelta from datetime import datetime, timedelta
from urllib.parse import urlparse, parse_qs from urllib.parse import urlparse, parse_qs
from pathlib import Path from pathlib import Path
@@ -20,6 +21,8 @@ from SYS.config import get_hydrus_access_key, get_hydrus_url
_ALLDEBRID_UNLOCK_CACHE: Dict[str, _ALLDEBRID_UNLOCK_CACHE: Dict[str,
str] = {} str] = {}
_NOTES_PREFETCH_INFLIGHT: set[str] = set()
_NOTES_PREFETCH_LOCK = threading.Lock()
def _repo_root() -> Path: def _repo_root() -> Path:
@@ -389,6 +392,99 @@ def _set_mpv_item_context(store: Optional[str], file_hash: Optional[str]) -> Non
pass pass
def _get_lyric_prefetch_limit(config: Optional[Dict[str, Any]]) -> int:
try:
raw = (config or {}).get("lyric_prefetch_limit")
if raw is None:
return 5
value = int(raw)
except Exception:
return 5
return max(0, min(20, value))
def _prefetch_notes_async(
store: Optional[str],
file_hash: Optional[str],
config: Optional[Dict[str, Any]],
) -> None:
if not store or not file_hash:
return
key = f"{str(store).strip().lower()}:{str(file_hash).strip().lower()}"
with _NOTES_PREFETCH_LOCK:
if key in _NOTES_PREFETCH_INFLIGHT:
return
_NOTES_PREFETCH_INFLIGHT.add(key)
cfg = dict(config or {})
def _worker() -> None:
try:
from MPV.lyric import (
load_cached_notes,
set_notes_prefetch_pending,
store_cached_notes,
)
from Store import Store
cached = load_cached_notes(store, file_hash, config=cfg)
if cached is not None:
return
set_notes_prefetch_pending(store, file_hash, True)
registry = Store(cfg, suppress_debug=True)
backend = registry[str(store)]
notes = backend.get_note(str(file_hash), config=cfg) or {}
store_cached_notes(store, file_hash, notes)
try:
debug(
f"Prefetched MPV notes cache for {key} keys={sorted(str(k) for k in notes)}"
)
except Exception:
debug(f"Prefetched MPV notes cache for {key}")
except Exception as exc:
debug(f"MPV note prefetch failed for {key}: {exc}", file=sys.stderr)
finally:
try:
from MPV.lyric import set_notes_prefetch_pending
set_notes_prefetch_pending(store, file_hash, False)
except Exception:
pass
with _NOTES_PREFETCH_LOCK:
_NOTES_PREFETCH_INFLIGHT.discard(key)
thread = threading.Thread(
target=_worker,
name=f"mpv-notes-prefetch-{file_hash[:8]}",
daemon=True,
)
thread.start()
def _schedule_notes_prefetch(items: Sequence[Any], config: Optional[Dict[str, Any]]) -> None:
limit = _get_lyric_prefetch_limit(config)
if limit <= 0:
return
seen: set[str] = set()
scheduled = 0
for item in items or []:
store, file_hash = _extract_store_and_hash(item)
if not store or not file_hash:
continue
key = f"{store.lower()}:{file_hash}"
if key in seen:
continue
seen.add(key)
_prefetch_notes_async(store, file_hash, config)
scheduled += 1
if scheduled >= limit:
break
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]: def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
"""Get the current playlist from MPV. Returns None if MPV is not running.""" """Get the current playlist from MPV. Returns None if MPV is not running."""
cmd = { cmd = {
@@ -1143,6 +1239,8 @@ def _queue_items(
except Exception as e: except Exception as e:
debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr) debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr)
_schedule_notes_prefetch(items, config)
# Dedupe existing playlist before adding more (unless we're replacing it) # Dedupe existing playlist before adding more (unless we're replacing it)
existing_targets: set[str] = set() existing_targets: set[str] = set()
if not clear_first: if not clear_first:
@@ -2226,6 +2324,8 @@ def _start_mpv(
hydrus_header = _build_hydrus_header(config or {}) hydrus_header = _build_hydrus_header(config or {})
ytdl_opts = _build_ytdl_options(config, hydrus_header) ytdl_opts = _build_ytdl_options(config, hydrus_header)
_schedule_notes_prefetch(items[:1], config)
cookies_path = None cookies_path = None
try: try:
from tool.ytdlp import YtDlpTool from tool.ytdlp import YtDlpTool