This commit is contained in:
nose
2025-11-27 10:59:01 -08:00
parent e9b505e609
commit 9eff65d1af
30 changed files with 2099 additions and 1095 deletions

View File

@@ -63,7 +63,7 @@ def _progress_callback(status: Dict[str, Any]) -> None:
sys.stdout.write("\r" + " " * 70 + "\r")
sys.stdout.flush()
# Log finished message (visible)
log(f"✓ Download finished: {status.get('filename')}")
debug(f"✓ Download finished: {status.get('filename')}")
elif event in ("postprocessing", "processing"):
debug(f"Post-processing: {status.get('postprocessor')}")
@@ -629,7 +629,7 @@ def download_media(
_ensure_yt_dlp_ready()
ytdl_options = _build_ytdlp_options(opts)
log(f"Starting yt-dlp download: {opts.url}")
debug(f"Starting yt-dlp download: {opts.url}")
if debug_logger is not None:
debug_logger.write_record("ytdlp-start", {"url": opts.url})
@@ -707,7 +707,7 @@ def download_media(
or entry.get("url")
)
log(f"✓ Downloaded: {media_path.name} ({len(tags)} tags)")
debug(f"✓ Downloaded: {media_path.name} ({len(tags)} tags)")
if debug_logger is not None:
debug_logger.write_record(
"downloaded",

View File

@@ -50,6 +50,10 @@ class StorageBackend(ABC):
Exception: If upload fails
"""
@abstractmethod
def get_name(self) -> str:
"""Get the unique name of this backend."""
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
"""Search for files in backends that support it.
@@ -125,7 +129,7 @@ class LocalStorageBackend(StorageBackend):
try:
# Compute file hash
file_hash = sha256_file(file_path)
log(f"File hash: {file_hash}", file=sys.stderr)
debug(f"File hash: {file_hash}", file=sys.stderr)
dest_dir = Path(location).expanduser()
dest_dir.mkdir(parents=True, exist_ok=True)
@@ -148,13 +152,13 @@ class LocalStorageBackend(StorageBackend):
if move_file:
shutil.move(str(file_path), dest_file)
log(f"Local move: {dest_file}", file=sys.stderr)
debug(f"Local move: {dest_file}", file=sys.stderr)
else:
shutil.copy2(file_path, dest_file)
log(f"Local copy: {dest_file}", file=sys.stderr)
debug(f"Local copy: {dest_file}", file=sys.stderr)
return str(dest_file)
except Exception as exc:
log(f"Local copy failed: {exc}", file=sys.stderr)
debug(f"Local copy failed: {exc}", file=sys.stderr)
raise
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
@@ -200,7 +204,6 @@ class LocalStorageBackend(StorageBackend):
# Try database search first (much faster than filesystem scan)
try:
debug(f"Connecting to local library DB at {search_dir}")
db = LocalLibraryDB(search_dir)
cursor = db.connection.cursor()
@@ -261,8 +264,9 @@ class LocalStorageBackend(StorageBackend):
all_tags = [row[0] for row in cursor.fetchall()]
results.append({
"name": file_path.name,
"title": file_path.name,
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
@@ -284,35 +288,60 @@ class LocalStorageBackend(StorageBackend):
# 2. Simple tags (without namespace) containing the query
# NOTE: Does NOT match namespaced tags (e.g., "joe" won't match "channel:Joe Mullan")
# Use explicit namespace search for that (e.g., "channel:joe*")
query_pattern = f"%{query_lower}%"
debug(f"Performing filename/tag search: {query_pattern}")
# Split query into terms for AND logic
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
if not terms:
terms = [query_lower]
debug(f"Performing filename/tag search for terms: {terms}")
# Fetch more results than requested to allow for filtering
fetch_limit = (limit or 45) * 50
cursor.execute("""
# 1. Filename search (AND logic)
conditions = ["LOWER(f.file_path) LIKE ?" for _ in terms]
params = [f"%{t}%" for t in terms]
where_clause = " AND ".join(conditions)
cursor.execute(f"""
SELECT DISTINCT f.id, f.file_path, f.file_size
FROM files f
WHERE LOWER(f.file_path) LIKE ?
WHERE {where_clause}
ORDER BY f.file_path
LIMIT ?
""", (query_pattern, fetch_limit))
""", (*params, fetch_limit))
rows = cursor.fetchall()
debug(f"Found {len(rows)} filename matches in DB (before whole-word filter)")
# Compile regex for whole word matching
try:
word_regex = re.compile(r'\b' + re.escape(query_lower) + r'\b', re.IGNORECASE)
except Exception:
word_regex = None
# Compile regex for whole word matching (only if single term, otherwise skip)
word_regex = None
if len(terms) == 1:
term = terms[0]
# Check if term contains wildcard characters
has_wildcard = '*' in term or '?' in term
if has_wildcard:
# Use fnmatch for wildcard patterns (e.g., "sie*" matches "SiebeliebenWohl...")
try:
from fnmatch import translate
word_regex = re.compile(translate(term), re.IGNORECASE)
except Exception:
word_regex = None
else:
# Use word boundary for exact terms (backwards compatibility)
try:
word_regex = re.compile(r'\b' + re.escape(term) + r'\b', re.IGNORECASE)
except Exception:
word_regex = None
seen_files = set()
for file_id, file_path_str, size_bytes in rows:
if not file_path_str or file_path_str in seen_files:
continue
# Apply whole word filter on filename
# Apply whole word filter on filename if single term
if word_regex:
p = Path(file_path_str)
if not word_regex.search(p.name):
@@ -332,8 +361,9 @@ class LocalStorageBackend(StorageBackend):
tags = [row[0] for row in cursor.fetchall()]
results.append({
"name": file_path.name,
"title": file_path.name,
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
@@ -343,6 +373,12 @@ class LocalStorageBackend(StorageBackend):
})
# Also search for simple tags (without namespace) containing the query
# Only perform tag search if single term, or if we want to support multi-term tag search
# For now, fallback to single pattern search for tags if multiple terms
# (searching for a tag that contains "term1 term2" or "term1,term2")
# This is less useful for AND logic across multiple tags, but consistent with previous behavior
query_pattern = f"%{query_lower}%"
cursor.execute("""
SELECT DISTINCT f.id, f.file_path, f.file_size
FROM files f
@@ -371,8 +407,9 @@ class LocalStorageBackend(StorageBackend):
tags = [row[0] for row in cursor.fetchall()]
results.append({
"name": file_path.name,
"title": file_path.name,
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
@@ -409,8 +446,9 @@ class LocalStorageBackend(StorageBackend):
tags = [row[0] for row in cursor.fetchall()]
results.append({
"name": file_path.name,
"title": file_path.name,
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
@@ -434,6 +472,11 @@ class LocalStorageBackend(StorageBackend):
recursive = kwargs.get("recursive", True)
pattern = "**/*" if recursive else "*"
# Split query into terms for AND logic
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
if not terms:
terms = [query_lower]
count = 0
for file_path in search_dir.glob(pattern):
if not file_path.is_file():
@@ -442,14 +485,26 @@ class LocalStorageBackend(StorageBackend):
if lower_name.endswith('.tags') or lower_name.endswith('.metadata') \
or lower_name.endswith('.notes') or lower_name.endswith('.tags.txt'):
continue
if not (match_all or query_lower in lower_name):
continue
if not match_all:
# Check if ALL terms are present in the filename
# For single terms with wildcards, use fnmatch; otherwise use substring matching
if len(terms) == 1 and ('*' in terms[0] or '?' in terms[0]):
# Wildcard pattern matching for single term
from fnmatch import fnmatch
if not fnmatch(lower_name, terms[0]):
continue
else:
# Substring matching for all terms (AND logic)
if not all(term in lower_name for term in terms):
continue
size_bytes = file_path.stat().st_size
path_str = str(file_path)
results.append({
"name": file_path.name,
"title": file_path.name,
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
@@ -562,7 +617,7 @@ class HydrusStorageBackend(StorageBackend):
raise Exception(f"Hydrus response missing file hash: {response}")
file_hash = hydrus_hash
log(f"✅ File uploaded to Hydrus: {file_hash}", file=sys.stderr)
log(f"Hydrus: {file_hash}", file=sys.stderr)
# Add tags if provided
if tags:
@@ -654,7 +709,8 @@ class HydrusStorageBackend(StorageBackend):
# Fetch metadata for the found files
results = []
query_lower = query.lower().strip()
search_terms = set(query_lower.split()) # For substring matching
# Split by comma or space for AND logic
search_terms = set(query_lower.replace(',', ' ').split()) # For substring matching
if file_ids:
metadata = client.fetch_file_metadata(file_ids=file_ids)
@@ -852,6 +908,11 @@ class DebridStorageBackend(StorageBackend):
# "*" means "match all" - include all magnets
match_all = query_lower == "*"
# Split query into terms for AND logic
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
if not terms:
terms = [query_lower]
for magnet in magnets:
filename = magnet.get('filename', '').lower()
status_code = magnet.get('statusCode', 0)
@@ -862,8 +923,9 @@ class DebridStorageBackend(StorageBackend):
continue
# Match query against filename (or match all if query is "*")
if not match_all and query_lower not in filename:
continue
if not match_all:
if not all(term in filename for term in terms):
continue
matching_magnet_ids.append(magnet_id)
magnet_info_map[magnet_id] = magnet
@@ -952,6 +1014,102 @@ class DebridStorageBackend(StorageBackend):
return result
class MatrixStorageBackend(StorageBackend):
"""File storage backend for Matrix (Element) chat rooms."""
def get_name(self) -> str:
return "matrix"
def upload(self, file_path: Path, **kwargs: Any) -> str:
"""Upload file to Matrix room.
Requires 'config' in kwargs with 'storage.matrix' settings:
- homeserver: URL of homeserver (e.g. https://matrix.org)
- user_id: User ID (e.g. @user:matrix.org)
- access_token: Access token (preferred) OR password
- room_id: Room ID to upload to (e.g. !roomid:matrix.org)
"""
config = kwargs.get('config', {})
if not config:
raise ValueError("Config required for Matrix upload")
matrix_conf = config.get('storage', {}).get('matrix', {})
if not matrix_conf:
raise ValueError("Matrix storage not configured in config.json")
homeserver = matrix_conf.get('homeserver')
# user_id = matrix_conf.get('user_id') # Not strictly needed if we have token
access_token = matrix_conf.get('access_token')
room_id = matrix_conf.get('room_id')
if not homeserver or not room_id:
raise ValueError("Matrix homeserver and room_id required")
# Ensure homeserver has protocol
if not homeserver.startswith('http'):
homeserver = f"https://{homeserver}"
# Login if no access token (optional implementation, for now assume token)
if not access_token:
raise ValueError("Matrix access_token required (login not yet implemented)")
# 1. Upload Media
upload_url = f"{homeserver}/_matrix/media/r3/upload"
headers = {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/octet-stream" # Or guess mime type
}
import mimetypes
mime_type, _ = mimetypes.guess_type(file_path)
if mime_type:
headers["Content-Type"] = mime_type
filename = file_path.name
try:
with open(file_path, 'rb') as f:
resp = requests.post(upload_url, headers=headers, data=f, params={"filename": filename})
if resp.status_code != 200:
raise Exception(f"Matrix upload failed: {resp.text}")
content_uri = resp.json().get('content_uri')
if not content_uri:
raise Exception("No content_uri returned from Matrix upload")
# 2. Send Message
send_url = f"{homeserver}/_matrix/client/r0/rooms/{room_id}/send/m.room.message"
# Determine msgtype
msgtype = "m.file"
if mime_type:
if mime_type.startswith("image/"): msgtype = "m.image"
elif mime_type.startswith("video/"): msgtype = "m.video"
elif mime_type.startswith("audio/"): msgtype = "m.audio"
payload = {
"msgtype": msgtype,
"body": filename,
"url": content_uri,
"info": {
"mimetype": mime_type,
"size": file_path.stat().st_size
}
}
resp = requests.post(send_url, headers=headers, json=payload)
if resp.status_code != 200:
raise Exception(f"Matrix send message failed: {resp.text}")
event_id = resp.json().get('event_id')
return f"matrix://{room_id}/{event_id}"
except Exception as e:
log(f"❌ Matrix upload error: {e}", file=sys.stderr)
raise
class FileStorage:
"""Unified file storage interface supporting multiple backend services.
@@ -997,6 +1155,9 @@ class FileStorage:
# Include Debrid backend (API key optional - will raise on use if not provided)
if debrid_api_key:
self._backends["debrid"] = DebridStorageBackend(api_key=debrid_api_key)
# Include Matrix backend
self._backends["matrix"] = MatrixStorageBackend()
def __getitem__(self, backend_name: str) -> StorageBackend:
"""Get a storage backend by name.

View File

@@ -1411,7 +1411,7 @@ def get_client(config: dict[str, Any]) -> HydrusClient:
cached_client = _hydrus_client_cache[cache_key]
# If cached client has a session key, reuse it (don't re-acquire)
if hasattr(cached_client, '_session_key') and cached_client._session_key:
debug(f"Reusing cached session key for {hydrus_url}")
# debug(f"Reusing cached session key for {hydrus_url}")
return cached_client
# If no session key in cache, try to get one
try:

View File

@@ -230,6 +230,16 @@ class LocalLibraryDB:
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS playlists (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
items TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
# Worker tracking tables (drop legacy workers table if still present)
self._ensure_worker_tables(cursor)
@@ -1386,6 +1396,104 @@ class LocalLibrarySearchOptimizer:
"""Fast tag-based search using database."""
if not self.db:
return []
try:
cursor = self.db.connection.cursor()
cursor.execute("""
SELECT f.file_path
FROM files f
JOIN tags t ON f.id = t.file_id
WHERE t.tag LIKE ?
LIMIT ?
""", (f"%{tag}%", limit))
return [Path(row[0]) for row in cursor.fetchall()]
except Exception as e:
logger.error(f"Tag search failed: {e}")
return []
def save_playlist(self, name: str, items: List[Dict[str, Any]]) -> bool:
"""Save a playlist to the database."""
if not self.db:
return False
try:
cursor = self.db.connection.cursor()
items_json = json.dumps(items)
cursor.execute("""
INSERT INTO playlists (name, items, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(name) DO UPDATE SET
items = excluded.items,
updated_at = CURRENT_TIMESTAMP
""", (name, items_json))
self.db.connection.commit()
return True
except Exception as e:
logger.error(f"Failed to save playlist {name}: {e}")
return False
def get_playlists(self) -> List[Dict[str, Any]]:
"""Get all saved playlists."""
if not self.db:
return []
try:
cursor = self.db.connection.cursor()
cursor.execute("SELECT id, name, items, updated_at FROM playlists ORDER BY updated_at DESC")
results = []
for row in cursor.fetchall():
try:
items = json.loads(row['items'])
except json.JSONDecodeError:
items = []
results.append({
'id': row['id'],
'name': row['name'],
'items': items,
'updated_at': row['updated_at']
})
return results
except Exception as e:
logger.error(f"Failed to get playlists: {e}")
return []
def get_playlist(self, name: str) -> Optional[List[Dict[str, Any]]]:
"""Get a specific playlist by name."""
if not self.db:
return None
try:
cursor = self.db.connection.cursor()
cursor.execute("SELECT items FROM playlists WHERE name = ?", (name,))
row = cursor.fetchone()
if row:
try:
return json.loads(row['items'])
except json.JSONDecodeError:
return []
return None
except Exception as e:
logger.error(f"Failed to get playlist {name}: {e}")
return None
def get_playlist_by_id(self, playlist_id: int) -> Optional[Tuple[str, List[Dict[str, Any]]]]:
"""Get a specific playlist by ID. Returns (name, items)."""
if not self.db:
return None
try:
cursor = self.db.connection.cursor()
cursor.execute("SELECT name, items FROM playlists WHERE id = ?", (playlist_id,))
row = cursor.fetchone()
if row:
try:
items = json.loads(row['items'])
return (row['name'], items)
except json.JSONDecodeError:
return (row['name'], [])
return None
except Exception as e:
logger.error(f"Failed to get playlist ID {playlist_id}: {e}")
return None
if not self.db:
return []
return self.db.search_by_tag(tag, limit)
def search_by_hash(self, file_hash: str) -> Optional[Path]:

290
helper/mpv_ipc.py Normal file
View File

@@ -0,0 +1,290 @@
"""MPV IPC client for cross-platform communication.
This module provides a cross-platform interface to communicate with mpv
using either named pipes (Windows) or Unix domain sockets (Linux/macOS).
This is the central hub for all Python-mpv IPC communication. The Lua script
should use the Python CLI, which uses this module to manage mpv connections.
"""
import json
import os
import platform
import socket
import time as _time
from typing import Any, Dict, Optional, List
from helper.logger import debug
# Fixed pipe name for persistent MPV connection across all Python sessions
FIXED_IPC_PIPE_NAME = "mpv-medeia-macina"
class MPVIPCError(Exception):
"""Raised when MPV IPC communication fails."""
pass
def get_ipc_pipe_path() -> str:
"""Get the fixed IPC pipe/socket path for persistent MPV connection.
Uses a fixed name so all playback sessions connect to the same MPV
window/process instead of creating new instances.
Returns:
Path to IPC pipe (Windows) or socket (Linux/macOS)
"""
system = platform.system()
if system == "Windows":
return f"\\\\.\\pipe\\{FIXED_IPC_PIPE_NAME}"
elif system == "Darwin": # macOS
return f"/tmp/{FIXED_IPC_PIPE_NAME}.sock"
else: # Linux and others
return f"/tmp/{FIXED_IPC_PIPE_NAME}.sock"
class MPVIPCClient:
"""Client for communicating with mpv via IPC socket/pipe.
This is the unified interface for all Python code to communicate with mpv.
It handles platform-specific differences (Windows named pipes vs Unix sockets).
"""
def __init__(self, socket_path: Optional[str] = None, timeout: float = 5.0):
"""Initialize MPV IPC client.
Args:
socket_path: Path to IPC socket/pipe. If None, uses the fixed persistent path.
timeout: Socket timeout in seconds.
"""
self.timeout = timeout
self.socket_path = socket_path or get_ipc_pipe_path()
self.sock = None
self.is_windows = platform.system() == "Windows"
def connect(self) -> bool:
"""Connect to mpv IPC socket.
Returns:
True if connection successful, False otherwise.
"""
try:
if self.is_windows:
# Windows named pipes
try:
# Try to open the named pipe
self.sock = open(self.socket_path, 'r+b', buffering=0)
return True
except (OSError, IOError) as exc:
debug(f"Failed to connect to MPV named pipe: {exc}")
return False
else:
# Unix domain socket (Linux, macOS)
if not os.path.exists(self.socket_path):
debug(f"IPC socket not found: {self.socket_path}")
return False
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.settimeout(self.timeout)
self.sock.connect(self.socket_path)
return True
except Exception as exc:
debug(f"Failed to connect to MPV IPC: {exc}")
self.sock = None
return False
def send_command(self, command_data: Dict[str, Any] | List[Any]) -> Optional[Dict[str, Any]]:
"""Send a command to mpv and get response.
Args:
command_data: Command dict (e.g. {"command": [...]}) or list (e.g. ["loadfile", ...])
Returns:
Response dict with 'error' key (value 'success' on success), or None on error.
"""
if not self.sock:
if not self.connect():
return None
try:
# Format command as JSON (mpv IPC protocol)
if isinstance(command_data, list):
request = {"command": command_data}
else:
request = command_data
# Add request_id if not present to match response
if "request_id" not in request:
request["request_id"] = int(_time.time() * 1000) % 100000
payload = json.dumps(request) + "\n"
# Send command
if self.is_windows:
self.sock.write(payload.encode('utf-8'))
self.sock.flush()
else:
self.sock.sendall(payload.encode('utf-8'))
# Receive response
# We need to read lines until we find the one with matching request_id
# or until timeout/error. MPV might send events in between.
start_time = _time.time()
while _time.time() - start_time < self.timeout:
response_data = b""
if self.is_windows:
try:
response_data = self.sock.readline()
except (OSError, IOError):
return None
else:
try:
# This is simplistic for Unix socket (might not get full line)
# But for now assuming MPV sends line-buffered JSON
chunk = self.sock.recv(4096)
if not chunk:
break
response_data = chunk
# TODO: Handle partial lines if needed
except socket.timeout:
return None
if not response_data:
break
try:
lines = response_data.decode('utf-8').strip().split('\n')
for line in lines:
if not line: continue
resp = json.loads(line)
# Check if this is the response to our request
if resp.get("request_id") == request.get("request_id"):
return resp
# If it's an error without request_id (shouldn't happen for commands)
if "error" in resp and "request_id" not in resp:
# Might be an event or async error
pass
except json.JSONDecodeError:
pass
return None
except Exception as exc:
debug(f"Error sending command to MPV: {exc}")
self.disconnect()
return None
def disconnect(self) -> None:
"""Disconnect from mpv IPC socket."""
if self.sock:
try:
self.sock.close()
except Exception:
pass
self.sock = None
def __del__(self) -> None:
"""Cleanup on object destruction."""
self.disconnect()
def __enter__(self):
"""Context manager entry."""
self.connect()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.disconnect()
def send_to_mpv(file_url: str, title: str, headers: Optional[Dict[str, str]] = None,
append: bool = True) -> bool:
"""Send a file to be played in the existing MPV instance via IPC.
This attempts to send to an existing MPV instance. If it fails, the calling
code should start a new MPV instance with the IPC pipe.
Args:
file_url: URL or path to file to play
title: Display title for the file
headers: Optional HTTP headers (dict)
append: If True, append to playlist; if False, replace
Returns:
True if successfully sent to existing MPV, False if pipe unavailable.
"""
# Try to connect using the robust client
client = get_mpv_client()
if not client:
return False
try:
# Command 1: Set headers if provided
if headers:
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
cmd_headers = {
"command": ["set_property", "http-header-fields", header_str],
"request_id": 0
}
client.send_command(cmd_headers)
# Command 2: Load file
# Use memory:// M3U to preserve title in playlist if provided
# This is required for YouTube URLs and proper playlist display
if title:
# Sanitize title for M3U (remove newlines)
safe_title = title.replace("\n", " ").replace("\r", "")
# M3U format: #EXTM3U\n#EXTINF:-1,Title\nURL
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{file_url}\n"
target = f"memory://{m3u_content}"
else:
target = file_url
load_mode = "append-play" if append else "replace"
cmd_load = {
"command": ["loadfile", target, load_mode],
"request_id": 1
}
resp = client.send_command(cmd_load)
if not resp or resp.get('error') != 'success':
debug(f"MPV loadfile failed: {resp}")
return False
# Command 3: Set title (metadata for display) - still useful for window title
if title:
safe_title_prop = title.replace('"', '\\"')
cmd_title = {
"command": ["set_property", "force-media-title", safe_title_prop],
"request_id": 2
}
client.send_command(cmd_title)
debug(f"Sent to existing MPV: {title}")
return True
except Exception as e:
debug(f"Error in send_to_mpv: {e}")
return False
finally:
client.disconnect()
def get_mpv_client(socket_path: Optional[str] = None) -> Optional[MPVIPCClient]:
"""Get an MPV IPC client, attempting to connect.
Args:
socket_path: Custom socket path (uses default if None)
Returns:
Connected MPVIPCClient or None if connection fails.
"""
client = MPVIPCClient(socket_path=socket_path)
if client.connect():
return client
return None

View File

@@ -1660,7 +1660,7 @@ class FileProvider(ABC):
self.name = self.__class__.__name__.replace("FileProvider", "").lower()
@abstractmethod
def upload(self, file_path: str) -> str:
def upload(self, file_path: str, **kwargs: Any) -> str:
"""Upload a file and return the URL."""
pass
@@ -1677,7 +1677,7 @@ class ZeroXZeroFileProvider(FileProvider):
self.name = "0x0"
self.base_url = "https://0x0.st"
def upload(self, file_path: str) -> str:
def upload(self, file_path: str, **kwargs: Any) -> str:
"""Upload file to 0x0.st."""
from helper.http_client import HTTPClient
import os
@@ -1707,9 +1707,137 @@ class ZeroXZeroFileProvider(FileProvider):
return True
class MatrixFileProvider(FileProvider):
"""File provider for Matrix (Element) chat rooms."""
def __init__(self, config: Optional[Dict[str, Any]] = None):
super().__init__(config)
self.name = "matrix"
def validate(self) -> bool:
"""Check if Matrix is configured."""
if not self.config: return False
matrix_conf = self.config.get('storage', {}).get('matrix', {})
return bool(matrix_conf.get('homeserver') and matrix_conf.get('room_id') and (matrix_conf.get('access_token') or matrix_conf.get('password')))
def upload(self, file_path: str, **kwargs: Any) -> str:
"""Upload file to Matrix room."""
import requests
import mimetypes
from pathlib import Path
import json
debug(f"[Matrix] Starting upload for: {file_path}")
debug(f"[Matrix] kwargs: {kwargs}")
path = Path(file_path)
if not path.exists():
raise FileNotFoundError(f"File not found: {file_path}")
matrix_conf = self.config.get('storage', {}).get('matrix', {})
homeserver = matrix_conf.get('homeserver')
access_token = matrix_conf.get('access_token')
room_id = matrix_conf.get('room_id')
if not homeserver.startswith('http'):
homeserver = f"https://{homeserver}"
# 1. Upload Media
# Use v3 API
upload_url = f"{homeserver}/_matrix/media/v3/upload"
headers = {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/octet-stream"
}
mime_type, _ = mimetypes.guess_type(path)
if mime_type:
headers["Content-Type"] = mime_type
filename = path.name
debug(f"[Matrix] Uploading media to {upload_url} with mime_type: {mime_type}")
with open(path, 'rb') as f:
resp = requests.post(upload_url, headers=headers, data=f, params={"filename": filename})
if resp.status_code != 200:
raise Exception(f"Matrix upload failed: {resp.text}")
content_uri = resp.json().get('content_uri')
if not content_uri:
raise Exception("No content_uri returned from Matrix upload")
debug(f"[Matrix] Media uploaded, content_uri: {content_uri}")
# 2. Send Message
# Use v3 API
send_url = f"{homeserver}/_matrix/client/v3/rooms/{room_id}/send/m.room.message"
# Determine msgtype with better fallback for audio
msgtype = "m.file"
ext = path.suffix.lower()
# Explicit check for common audio extensions to force m.audio
# This prevents audio files being treated as generic files or video
AUDIO_EXTS = {'.mp3', '.flac', '.wav', '.m4a', '.aac', '.ogg', '.opus', '.wma', '.mka', '.alac'}
VIDEO_EXTS = {'.mp4', '.mkv', '.webm', '.mov', '.avi', '.flv', '.mpg', '.mpeg', '.ts', '.m4v', '.wmv'}
IMAGE_EXTS = {'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff'}
if ext in AUDIO_EXTS:
msgtype = "m.audio"
elif ext in VIDEO_EXTS:
msgtype = "m.video"
elif ext in IMAGE_EXTS:
msgtype = "m.image"
elif mime_type:
if mime_type.startswith("audio/"): msgtype = "m.audio"
elif mime_type.startswith("video/"): msgtype = "m.video"
elif mime_type.startswith("image/"): msgtype = "m.image"
debug(f"[Matrix] Determined msgtype: {msgtype} (ext: {ext}, mime: {mime_type})")
info = {
"mimetype": mime_type,
"size": path.stat().st_size
}
# Try to get duration for audio/video
if msgtype in ("m.audio", "m.video"):
try:
# Try mutagen first (lightweight)
# Use dynamic import to avoid top-level dependency if not installed
# Note: mutagen.File is available at package level at runtime but type checkers might miss it
import mutagen # type: ignore
m = mutagen.File(str(path)) # type: ignore
if m and m.info and hasattr(m.info, 'length'):
duration_ms = int(m.info.length * 1000)
info['duration'] = duration_ms
debug(f"[Matrix] Extracted duration: {duration_ms}ms")
except Exception as e:
debug(f"[Matrix] Failed to extract duration: {e}")
payload = {
"msgtype": msgtype,
"body": filename,
"url": content_uri,
"info": info
}
debug(f"[Matrix] Sending message payload: {json.dumps(payload, indent=2)}")
resp = requests.post(send_url, headers=headers, json=payload)
if resp.status_code != 200:
raise Exception(f"Matrix send message failed: {resp.text}")
event_id = resp.json().get('event_id')
return f"https://matrix.to/#/{room_id}/{event_id}"
# File provider registry
_FILE_PROVIDERS = {
"0x0": ZeroXZeroFileProvider,
"matrix": MatrixFileProvider,
}