d
This commit is contained in:
39
cmdnats/__init__.py
Normal file
39
cmdnats/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from importlib import import_module
|
||||
from typing import Any, Callable, Dict, Sequence
|
||||
|
||||
CmdletFn = Callable[[Any, Sequence[str], Dict[str, Any]], int]
|
||||
|
||||
|
||||
def _register_cmdlet_object(cmdlet_obj, registry: Dict[str, CmdletFn]) -> None:
|
||||
run_fn = getattr(cmdlet_obj, "exec", None) if hasattr(cmdlet_obj, "exec") else None
|
||||
if not callable(run_fn):
|
||||
return
|
||||
|
||||
if hasattr(cmdlet_obj, "name") and cmdlet_obj.name:
|
||||
registry[cmdlet_obj.name.replace("_", "-").lower()] = run_fn
|
||||
|
||||
if hasattr(cmdlet_obj, "aliases") and getattr(cmdlet_obj, "aliases"):
|
||||
for alias in cmdlet_obj.aliases:
|
||||
registry[alias.replace("_", "-").lower()] = run_fn
|
||||
|
||||
|
||||
def register_native_commands(registry: Dict[str, CmdletFn]) -> None:
|
||||
"""Import native command modules and register their CMDLET exec functions."""
|
||||
base_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(base_dir):
|
||||
if not (filename.endswith(".py") and not filename.startswith("_") and filename != "__init__.py"):
|
||||
continue
|
||||
|
||||
mod_name = filename[:-3]
|
||||
try:
|
||||
module = import_module(f".{mod_name}", __name__)
|
||||
cmdlet_obj = getattr(module, "CMDLET", None)
|
||||
if cmdlet_obj:
|
||||
_register_cmdlet_object(cmdlet_obj, registry)
|
||||
except Exception as exc:
|
||||
import sys
|
||||
print(f"Error importing native command '{mod_name}': {exc}", file=sys.stderr)
|
||||
continue
|
||||
148
cmdnats/adjective.py
Normal file
148
cmdnats/adjective.py
Normal file
@@ -0,0 +1,148 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Dict, Any, Optional, Sequence
|
||||
from cmdlets._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from helper.logger import log
|
||||
from result_table import ResultTable
|
||||
import pipeline as ctx
|
||||
|
||||
ADJECTIVE_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)), "helper", "adjective.json")
|
||||
|
||||
def _load_adjectives() -> Dict[str, List[str]]:
|
||||
try:
|
||||
if os.path.exists(ADJECTIVE_FILE):
|
||||
with open(ADJECTIVE_FILE, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
log(f"Error loading adjectives: {e}", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
def _save_adjectives(data: Dict[str, List[str]]) -> bool:
|
||||
try:
|
||||
with open(ADJECTIVE_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"Error saving adjectives: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
data = _load_adjectives()
|
||||
|
||||
# Parse arguments manually first to handle positional args
|
||||
# We expect: .adjective [category] [tag] [-add] [-delete]
|
||||
|
||||
# If no args, list categories
|
||||
if not args:
|
||||
table = ResultTable("Adjective Categories")
|
||||
for i, (category, tags) in enumerate(data.items()):
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Category", category)
|
||||
row.add_column("Tag Amount", str(len(tags)))
|
||||
|
||||
# Selection expands to: .adjective "Category Name"
|
||||
table.set_row_selection_args(i, [category])
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, list(data.keys()))
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
# We have args. First arg is likely category.
|
||||
category = args[0]
|
||||
|
||||
# Check if we are adding a new category (implicit if it doesn't exist)
|
||||
if category not in data:
|
||||
# If only category provided, create it
|
||||
if len(args) == 1:
|
||||
data[category] = []
|
||||
_save_adjectives(data)
|
||||
log(f"Created new category: {category}")
|
||||
# If more args, we might be trying to add to a non-existent category
|
||||
elif "-add" in args:
|
||||
data[category] = []
|
||||
# Continue to add logic
|
||||
|
||||
# Handle operations within category
|
||||
remaining_args = list(args[1:])
|
||||
|
||||
# Check for -add flag
|
||||
if "-add" in remaining_args:
|
||||
# .adjective category -add tag
|
||||
# or .adjective category tag -add
|
||||
add_idx = remaining_args.index("-add")
|
||||
# Tag could be before or after
|
||||
tag = None
|
||||
if add_idx + 1 < len(remaining_args):
|
||||
tag = remaining_args[add_idx + 1]
|
||||
elif add_idx > 0:
|
||||
tag = remaining_args[add_idx - 1]
|
||||
|
||||
if tag:
|
||||
if tag not in data[category]:
|
||||
data[category].append(tag)
|
||||
_save_adjectives(data)
|
||||
log(f"Added '{tag}' to '{category}'")
|
||||
else:
|
||||
log(f"Tag '{tag}' already exists in '{category}'")
|
||||
else:
|
||||
log("Error: No tag specified to add")
|
||||
return 1
|
||||
|
||||
# Check for -delete flag
|
||||
elif "-delete" in remaining_args:
|
||||
# .adjective category -delete tag
|
||||
# or .adjective category tag -delete
|
||||
del_idx = remaining_args.index("-delete")
|
||||
tag = None
|
||||
if del_idx + 1 < len(remaining_args):
|
||||
tag = remaining_args[del_idx + 1]
|
||||
elif del_idx > 0:
|
||||
tag = remaining_args[del_idx - 1]
|
||||
|
||||
if tag:
|
||||
if tag in data[category]:
|
||||
data[category].remove(tag)
|
||||
_save_adjectives(data)
|
||||
log(f"Deleted '{tag}' from '{category}'")
|
||||
else:
|
||||
log(f"Tag '{tag}' not found in '{category}'")
|
||||
else:
|
||||
log("Error: No tag specified to delete")
|
||||
return 1
|
||||
|
||||
# List tags in category (Default action if no flags or after modification)
|
||||
tags = data.get(category, [])
|
||||
table = ResultTable(f"Tags in '{category}'")
|
||||
for i, tag in enumerate(tags):
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Tag", tag)
|
||||
|
||||
# Selection expands to: .adjective "Category" "Tag"
|
||||
# This allows typing @N -delete to delete it
|
||||
table.set_row_selection_args(i, [category, tag])
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, tags)
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".adjective",
|
||||
aliases=["adj"],
|
||||
summary="Manage adjective categories and tags",
|
||||
usage=".adjective [category] [-add tag] [-delete tag]",
|
||||
args=[
|
||||
CmdletArg(name="category", type="string", description="Category name", required=False),
|
||||
CmdletArg(name="tag", type="string", description="Tag name", required=False),
|
||||
CmdletArg(name="add", type="flag", description="Add tag"),
|
||||
CmdletArg(name="delete", type="flag", description="Delete tag"),
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
103
cmdnats/matrix.py
Normal file
103
cmdnats/matrix.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from typing import Any, Dict, Sequence, List
|
||||
import sys
|
||||
from cmdlets._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from helper.logger import log, debug
|
||||
from result_table import ResultTable
|
||||
from helper.file_storage import MatrixStorageBackend
|
||||
from config import save_config, load_config
|
||||
import pipeline as ctx
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
# Initialize backend
|
||||
backend = MatrixStorageBackend()
|
||||
|
||||
# Get current default room
|
||||
matrix_conf = config.get('storage', {}).get('matrix', {})
|
||||
current_room_id = matrix_conf.get('room_id')
|
||||
|
||||
# Fetch rooms
|
||||
debug("Fetching joined rooms from Matrix...")
|
||||
rooms = backend.list_rooms(config)
|
||||
|
||||
if not rooms:
|
||||
debug("No joined rooms found or Matrix not configured.")
|
||||
return 1
|
||||
|
||||
# Handle selection if provided
|
||||
selection = parsed.get("selection")
|
||||
if selection:
|
||||
new_room_id = None
|
||||
selected_room_name = None
|
||||
|
||||
# Try as index (1-based)
|
||||
try:
|
||||
idx = int(selection) - 1
|
||||
if 0 <= idx < len(rooms):
|
||||
selected_room = rooms[idx]
|
||||
new_room_id = selected_room['id']
|
||||
selected_room_name = selected_room['name']
|
||||
except ValueError:
|
||||
# Try as Room ID
|
||||
for room in rooms:
|
||||
if room['id'] == selection:
|
||||
new_room_id = selection
|
||||
selected_room_name = room['name']
|
||||
break
|
||||
|
||||
if new_room_id:
|
||||
# Update config
|
||||
# Load fresh config from disk to avoid saving runtime objects (like WorkerManager)
|
||||
disk_config = load_config()
|
||||
|
||||
if 'storage' not in disk_config: disk_config['storage'] = {}
|
||||
if 'matrix' not in disk_config['storage']: disk_config['storage']['matrix'] = {}
|
||||
|
||||
disk_config['storage']['matrix']['room_id'] = new_room_id
|
||||
save_config(disk_config)
|
||||
|
||||
debug(f"Default Matrix room set to: {selected_room_name} ({new_room_id})")
|
||||
current_room_id = new_room_id
|
||||
else:
|
||||
debug(f"Invalid selection: {selection}")
|
||||
return 1
|
||||
|
||||
# Display table
|
||||
table = ResultTable("Matrix Rooms")
|
||||
for i, room in enumerate(rooms):
|
||||
is_default = (room['id'] == current_room_id)
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("Default", "*" if is_default else "")
|
||||
row.add_column("Name", room['name'])
|
||||
row.add_column("ID", room['id'])
|
||||
|
||||
# Set selection args so user can type @N to select
|
||||
# This will run .matrix N
|
||||
table.set_row_selection_args(i, [str(i + 1)])
|
||||
|
||||
table.set_source_command(".matrix")
|
||||
|
||||
# Register results
|
||||
ctx.set_last_result_table_overlay(table, rooms)
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".matrix",
|
||||
aliases=["matrix", "rooms"],
|
||||
summary="List and select default Matrix room",
|
||||
usage=".matrix [selection]",
|
||||
args=[
|
||||
CmdletArg(
|
||||
name="selection",
|
||||
type="string",
|
||||
description="Index or ID of the room to set as default",
|
||||
required=False
|
||||
)
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
866
cmdnats/pipe.py
Normal file
866
cmdnats/pipe.py
Normal file
@@ -0,0 +1,866 @@
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
import sys
|
||||
import json
|
||||
import platform
|
||||
import socket
|
||||
import re
|
||||
import subprocess
|
||||
from urllib.parse import urlparse
|
||||
from pathlib import Path
|
||||
from cmdlets._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from helper.logger import log, debug
|
||||
from result_table import ResultTable
|
||||
from helper.mpv_ipc import get_ipc_pipe_path, MPVIPCClient
|
||||
import pipeline as ctx
|
||||
from helper.download import is_url_supported_by_ytdlp
|
||||
|
||||
from helper.local_library import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
|
||||
from hydrus_health_check import get_cookies_file_path
|
||||
|
||||
def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional[Any]:
|
||||
"""Send a command to the MPV IPC pipe and return the response."""
|
||||
try:
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
client = MPVIPCClient(socket_path=ipc_pipe)
|
||||
|
||||
if not client.connect():
|
||||
return None # MPV not running
|
||||
|
||||
response = client.send_command(command)
|
||||
client.disconnect()
|
||||
return response
|
||||
except Exception as e:
|
||||
if not silent:
|
||||
debug(f"IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get the current playlist from MPV. Returns None if MPV is not running."""
|
||||
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
||||
resp = _send_ipc_command(cmd, silent=silent)
|
||||
if resp is None:
|
||||
return None
|
||||
if resp.get("error") == "success":
|
||||
return resp.get("data", [])
|
||||
return []
|
||||
|
||||
def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||
"""Extract a clean title from an MPV playlist item, handling memory:// M3U hacks."""
|
||||
title = item.get("title")
|
||||
filename = item.get("filename") or ""
|
||||
|
||||
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
||||
if "memory://" in filename and "#EXTINF:" in filename:
|
||||
try:
|
||||
# Extract title from #EXTINF:-1,Title
|
||||
# Use regex to find title between #EXTINF:-1, and newline
|
||||
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
|
||||
if match:
|
||||
extracted_title = match.group(1).strip()
|
||||
if not title or title == "memory://":
|
||||
title = extracted_title
|
||||
|
||||
# If we still don't have a title, try to find the URL in the M3U content
|
||||
if not title:
|
||||
lines = filename.splitlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#') and not line.startswith('memory://'):
|
||||
# Found the URL, use it as title
|
||||
return line
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return title or filename or "Unknown"
|
||||
|
||||
|
||||
def _extract_target_from_memory_uri(text: str) -> Optional[str]:
|
||||
"""Extract the real target URL/path from a memory:// M3U payload."""
|
||||
if not isinstance(text, str) or not text.startswith("memory://"):
|
||||
return None
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#') or line.startswith('memory://'):
|
||||
continue
|
||||
return line
|
||||
return None
|
||||
|
||||
|
||||
def _infer_store_from_playlist_item(item: Dict[str, Any]) -> str:
|
||||
"""Infer a friendly store label from an MPV playlist entry."""
|
||||
name = item.get("filename") if isinstance(item, dict) else None
|
||||
target = str(name or "")
|
||||
|
||||
# Unwrap memory:// M3U wrapper
|
||||
memory_target = _extract_target_from_memory_uri(target)
|
||||
if memory_target:
|
||||
target = memory_target
|
||||
|
||||
lower = target.lower()
|
||||
if lower.startswith("magnet:"):
|
||||
return "magnet"
|
||||
if lower.startswith("hydrus://"):
|
||||
return "hydrus"
|
||||
|
||||
# Windows / UNC paths
|
||||
if re.match(r"^[a-z]:[\\/]", target, flags=re.IGNORECASE) or target.startswith("\\\\"):
|
||||
return "local"
|
||||
|
||||
# file:// URLs
|
||||
if lower.startswith("file://"):
|
||||
return "local"
|
||||
|
||||
parsed = urlparse(target)
|
||||
host = (parsed.netloc or "").lower()
|
||||
path = parsed.path or ""
|
||||
|
||||
if not host:
|
||||
return ""
|
||||
|
||||
host_no_port = host.split(":", 1)[0]
|
||||
host_stripped = host_no_port[4:] if host_no_port.startswith("www.") else host_no_port
|
||||
|
||||
if "youtube" in host_stripped or "youtu.be" in target.lower():
|
||||
return "youtube"
|
||||
if "soundcloud" in host_stripped:
|
||||
return "soundcloud"
|
||||
if "bandcamp" in host_stripped:
|
||||
return "bandcamp"
|
||||
if "get_files" in path or host_stripped in {"127.0.0.1", "localhost"}:
|
||||
return "hydrus"
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host_stripped) and "get_files" in path:
|
||||
return "hydrus"
|
||||
|
||||
parts = host_stripped.split('.')
|
||||
if len(parts) >= 2:
|
||||
return parts[-2] or host_stripped
|
||||
return host_stripped
|
||||
|
||||
|
||||
def _format_playlist_location(name: str, max_len: int = 48) -> str:
|
||||
"""Format playlist filename/URL for display while keeping backend untouched."""
|
||||
target = name or ""
|
||||
memory_target = _extract_target_from_memory_uri(target)
|
||||
if memory_target:
|
||||
target = memory_target
|
||||
|
||||
lower = target.lower()
|
||||
# Local paths: show basename only
|
||||
if re.match(r"^[a-z]:[\\/]", target, flags=re.IGNORECASE) or target.startswith("\\\\"):
|
||||
target = Path(target).name
|
||||
elif lower.startswith("file://"):
|
||||
parsed = urlparse(target)
|
||||
target = Path(parsed.path or "").name or target
|
||||
else:
|
||||
parsed = urlparse(target)
|
||||
host = parsed.netloc or ""
|
||||
if host:
|
||||
host_no_port = host.split(":", 1)[0]
|
||||
host_no_port = host_no_port[4:] if host_no_port.startswith("www.") else host_no_port
|
||||
tail = parsed.path.split('/')[-1] if parsed.path else ""
|
||||
if tail:
|
||||
target = f"{host_no_port}/{tail}"
|
||||
else:
|
||||
target = host_no_port
|
||||
|
||||
if len(target) > max_len:
|
||||
return target[: max_len - 3] + "..."
|
||||
return target
|
||||
|
||||
|
||||
def _build_hydrus_header(config: Dict[str, Any]) -> Optional[str]:
|
||||
"""Return header string for Hydrus auth if configured."""
|
||||
try:
|
||||
key = get_hydrus_access_key(config)
|
||||
except Exception:
|
||||
key = None
|
||||
if not key:
|
||||
return None
|
||||
return f"Hydrus-Client-API-Access-Key: {key}"
|
||||
|
||||
|
||||
def _build_ytdl_options(config: Optional[Dict[str, Any]], hydrus_header: Optional[str]) -> Optional[str]:
|
||||
"""Compose ytdl-raw-options string including cookies and optional Hydrus header."""
|
||||
opts: List[str] = []
|
||||
try:
|
||||
cookies_path = get_cookies_file_path()
|
||||
except Exception:
|
||||
cookies_path = None
|
||||
if cookies_path:
|
||||
opts.append(f"cookies={cookies_path.replace('\\', '/')}")
|
||||
else:
|
||||
opts.append("cookies-from-browser=chrome")
|
||||
if hydrus_header:
|
||||
opts.append(f"add-header={hydrus_header}")
|
||||
return ",".join(opts) if opts else None
|
||||
|
||||
|
||||
def _is_hydrus_target(target: str, hydrus_url: Optional[str]) -> bool:
|
||||
if not target:
|
||||
return False
|
||||
lower = target.lower()
|
||||
if "hydrus://" in lower:
|
||||
return True
|
||||
parsed = urlparse(target)
|
||||
host = (parsed.netloc or "").lower()
|
||||
path = parsed.path or ""
|
||||
if hydrus_url:
|
||||
try:
|
||||
hydrus_host = urlparse(hydrus_url).netloc.lower()
|
||||
if hydrus_host and hydrus_host in host:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
if "get_files" in path or "file?hash=" in path:
|
||||
return True
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _ensure_ytdl_cookies() -> None:
|
||||
"""Ensure yt-dlp options are set correctly for this session."""
|
||||
from pathlib import Path
|
||||
cookies_path = get_cookies_file_path()
|
||||
if cookies_path:
|
||||
# Check if file exists and has content (use forward slashes for path checking)
|
||||
check_path = cookies_path.replace('\\', '/')
|
||||
file_obj = Path(cookies_path)
|
||||
if file_obj.exists():
|
||||
file_size = file_obj.stat().st_size
|
||||
debug(f"Cookies file verified: {check_path} ({file_size} bytes)")
|
||||
else:
|
||||
debug(f"WARNING: Cookies file does not exist: {check_path}", file=sys.stderr)
|
||||
else:
|
||||
debug("No cookies file configured")
|
||||
|
||||
def _monitor_mpv_logs(duration: float = 3.0) -> None:
|
||||
"""Monitor MPV logs for a short duration to capture errors."""
|
||||
try:
|
||||
client = MPVIPCClient()
|
||||
if not client.connect():
|
||||
debug("Failed to connect to MPV for log monitoring", file=sys.stderr)
|
||||
return
|
||||
|
||||
# Request log messages
|
||||
client.send_command({"command": ["request_log_messages", "warn"]})
|
||||
|
||||
import time
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < duration:
|
||||
# We need to read raw lines from the socket
|
||||
if client.is_windows:
|
||||
try:
|
||||
line = client.sock.readline()
|
||||
if line:
|
||||
try:
|
||||
msg = json.loads(line)
|
||||
if msg.get("event") == "log-message":
|
||||
text = msg.get("text", "").strip()
|
||||
prefix = msg.get("prefix", "")
|
||||
level = msg.get("level", "")
|
||||
if "ytdl" in prefix or level == "error":
|
||||
debug(f"[MPV {prefix}] {text}", file=sys.stderr)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
except Exception:
|
||||
break
|
||||
else:
|
||||
# Unix socket handling (simplified)
|
||||
break
|
||||
time.sleep(0.05)
|
||||
|
||||
client.disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
def _queue_items(items: List[Any], clear_first: bool = False, config: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""Queue items to MPV, starting it if necessary.
|
||||
|
||||
Args:
|
||||
items: List of items to queue
|
||||
clear_first: If True, the first item will replace the current playlist
|
||||
|
||||
Returns:
|
||||
True if MPV was started, False if items were queued via IPC.
|
||||
"""
|
||||
# Just verify cookies are configured, don't try to set via IPC
|
||||
_ensure_ytdl_cookies()
|
||||
|
||||
hydrus_header = _build_hydrus_header(config or {})
|
||||
ytdl_opts = _build_ytdl_options(config, hydrus_header)
|
||||
hydrus_url = None
|
||||
try:
|
||||
hydrus_url = get_hydrus_url(config) if config is not None else None
|
||||
except Exception:
|
||||
hydrus_url = None
|
||||
|
||||
for i, item in enumerate(items):
|
||||
# Extract URL/Path
|
||||
target = None
|
||||
title = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||
title = item.get("title") or item.get("name")
|
||||
elif hasattr(item, "target"):
|
||||
target = item.target
|
||||
title = getattr(item, "title", None)
|
||||
elif isinstance(item, str):
|
||||
target = item
|
||||
|
||||
if target:
|
||||
# Check if it's a yt-dlp supported URL
|
||||
is_ytdlp = False
|
||||
if target.startswith("http") and is_url_supported_by_ytdlp(target):
|
||||
is_ytdlp = True
|
||||
|
||||
# Use memory:// M3U hack to pass title to MPV
|
||||
# Skip for yt-dlp URLs to ensure proper handling
|
||||
if title and not is_ytdlp:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target}"
|
||||
target_to_send = f"memory://{m3u_content}"
|
||||
else:
|
||||
target_to_send = target
|
||||
|
||||
mode = "append"
|
||||
if clear_first and i == 0:
|
||||
mode = "replace"
|
||||
|
||||
# If this is a Hydrus target, set header property and yt-dlp headers before loading
|
||||
if hydrus_header and _is_hydrus_target(target_to_send, hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 199}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
if ytdl_opts:
|
||||
ytdl_cmd = {"command": ["set_property", "ytdl-raw-options", ytdl_opts], "request_id": 197}
|
||||
_send_ipc_command(ytdl_cmd, silent=True)
|
||||
|
||||
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
||||
resp = _send_ipc_command(cmd)
|
||||
|
||||
if resp is None:
|
||||
# MPV not running (or died)
|
||||
# Start MPV with remaining items
|
||||
_start_mpv(items[i:], config=config)
|
||||
return True
|
||||
elif resp.get("error") == "success":
|
||||
# Also set property for good measure
|
||||
if title:
|
||||
title_cmd = {"command": ["set_property", "force-media-title", title], "request_id": 201}
|
||||
_send_ipc_command(title_cmd)
|
||||
debug(f"Queued: {title or target}")
|
||||
else:
|
||||
error_msg = str(resp.get('error'))
|
||||
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Manage and play items in the MPV playlist via IPC."""
|
||||
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
# Initialize mpv_started flag
|
||||
mpv_started = False
|
||||
|
||||
# Handle positional index argument if provided
|
||||
index_arg = parsed.get("index")
|
||||
url_arg = parsed.get("url")
|
||||
|
||||
# If index_arg is provided but is not an integer, treat it as a URL
|
||||
# This allows .pipe "http://..." without -url flag
|
||||
if index_arg is not None:
|
||||
try:
|
||||
int(index_arg)
|
||||
except ValueError:
|
||||
# Not an integer, treat as URL if url_arg is not set
|
||||
if not url_arg:
|
||||
url_arg = index_arg
|
||||
index_arg = None
|
||||
|
||||
clear_mode = parsed.get("clear")
|
||||
list_mode = parsed.get("list")
|
||||
play_mode = parsed.get("play")
|
||||
pause_mode = parsed.get("pause")
|
||||
save_mode = parsed.get("save")
|
||||
load_mode = parsed.get("load")
|
||||
current_mode = parsed.get("current")
|
||||
|
||||
# Handle --current flag: emit currently playing item to pipeline
|
||||
if current_mode:
|
||||
items = _get_playlist()
|
||||
if items is None:
|
||||
debug("MPV is not running or not accessible.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Find the currently playing item
|
||||
current_item = None
|
||||
for item in items:
|
||||
if item.get("current", False):
|
||||
current_item = item
|
||||
break
|
||||
|
||||
if current_item is None:
|
||||
debug("No item is currently playing.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Build result object with file info
|
||||
title = _extract_title_from_item(current_item)
|
||||
filename = current_item.get("filename", "")
|
||||
|
||||
# Emit the current item to pipeline
|
||||
result_obj = {
|
||||
'file_path': filename,
|
||||
'title': title,
|
||||
'cmdlet_name': '.pipe',
|
||||
'source': 'pipe',
|
||||
'__pipe_index': items.index(current_item),
|
||||
}
|
||||
|
||||
ctx.emit(result_obj)
|
||||
debug(f"Emitted current item: {title}")
|
||||
return 0
|
||||
|
||||
# Handle URL queuing
|
||||
mpv_started = False
|
||||
if url_arg:
|
||||
mpv_started = _queue_items([url_arg])
|
||||
# Auto-play the URL when it's queued via .pipe "url" (without explicit flags)
|
||||
# unless other flags are present
|
||||
if not (clear_mode or play_mode or pause_mode or save_mode or load_mode):
|
||||
if mpv_started:
|
||||
# MPV was just started, wait a moment for it to be ready, then play first item
|
||||
import time
|
||||
time.sleep(0.5)
|
||||
index_arg = "1" # 1-based index for first item
|
||||
play_mode = True
|
||||
else:
|
||||
# MPV was already running, get playlist and play the newly added item
|
||||
playlist = _get_playlist(silent=True)
|
||||
if playlist and len(playlist) > 0:
|
||||
# Auto-play the last item in the playlist (the one we just added)
|
||||
# Use 1-based indexing
|
||||
index_arg = str(len(playlist))
|
||||
play_mode = True
|
||||
else:
|
||||
# Fallback: just list the playlist if we can't determine index
|
||||
list_mode = True
|
||||
|
||||
# Handle Save Playlist
|
||||
if save_mode:
|
||||
playlist_name = index_arg or f"Playlist {subprocess.check_output(['date', '/t'], shell=True).decode().strip()}"
|
||||
# If index_arg was used for name, clear it so it doesn't trigger index logic
|
||||
if index_arg:
|
||||
index_arg = None
|
||||
|
||||
items = _get_playlist()
|
||||
if not items:
|
||||
debug("Cannot save: MPV playlist is empty or MPV is not running.")
|
||||
return 1
|
||||
|
||||
# Clean up items for saving (remove current flag, etc)
|
||||
clean_items = []
|
||||
for item in items:
|
||||
# If title was extracted from memory://, we should probably save the original filename
|
||||
# if it's a URL, or reconstruct a clean object.
|
||||
# Actually, _extract_title_from_item handles the display title.
|
||||
# But for playback, we need the 'filename' (which might be memory://...)
|
||||
# If we save 'memory://...', it will work when loaded back.
|
||||
clean_items.append(item)
|
||||
|
||||
# Use config from context or load it
|
||||
config_data = config if config else {}
|
||||
|
||||
storage_path = get_local_storage_path(config_data)
|
||||
if not storage_path:
|
||||
debug("Local storage path not configured.")
|
||||
return 1
|
||||
|
||||
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||
if db.save_playlist(playlist_name, clean_items):
|
||||
debug(f"Playlist saved as '{playlist_name}'")
|
||||
return 0
|
||||
else:
|
||||
debug(f"Failed to save playlist '{playlist_name}'")
|
||||
return 1
|
||||
|
||||
# Handle Load Playlist
|
||||
current_playlist_name = None
|
||||
if load_mode:
|
||||
# Use config from context or load it
|
||||
config_data = config if config else {}
|
||||
|
||||
storage_path = get_local_storage_path(config_data)
|
||||
if not storage_path:
|
||||
debug("Local storage path not configured.")
|
||||
return 1
|
||||
|
||||
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||
if index_arg:
|
||||
try:
|
||||
pl_id = int(index_arg)
|
||||
|
||||
# Handle Delete Playlist (if -clear is also passed)
|
||||
if clear_mode:
|
||||
if db.delete_playlist(pl_id):
|
||||
debug(f"Playlist ID {pl_id} deleted.")
|
||||
# Clear index_arg so we fall through to list mode and show updated list
|
||||
index_arg = None
|
||||
# Don't return, let it list the remaining playlists
|
||||
else:
|
||||
debug(f"Failed to delete playlist ID {pl_id}.")
|
||||
return 1
|
||||
else:
|
||||
# Handle Load Playlist
|
||||
result = db.get_playlist_by_id(pl_id)
|
||||
if result is None:
|
||||
debug(f"Playlist ID {pl_id} not found.")
|
||||
return 1
|
||||
|
||||
name, items = result
|
||||
current_playlist_name = name
|
||||
|
||||
# Queue items (replacing current playlist)
|
||||
if items:
|
||||
_queue_items(items, clear_first=True)
|
||||
else:
|
||||
# Empty playlist, just clear
|
||||
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
|
||||
|
||||
# Switch to list mode to show the result
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
# Fall through to list logic
|
||||
|
||||
except ValueError:
|
||||
debug(f"Invalid playlist ID: {index_arg}")
|
||||
return 1
|
||||
|
||||
# If we deleted or didn't have an index, list playlists
|
||||
if not index_arg:
|
||||
playlists = db.get_playlists()
|
||||
|
||||
if not playlists:
|
||||
debug("No saved playlists found.")
|
||||
return 0
|
||||
|
||||
table = ResultTable("Saved Playlists")
|
||||
for i, pl in enumerate(playlists):
|
||||
item_count = len(pl.get('items', []))
|
||||
row = table.add_row()
|
||||
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
|
||||
row.add_column("Name", pl['name'])
|
||||
row.add_column("Items", str(item_count))
|
||||
row.add_column("Updated", pl['updated_at'])
|
||||
|
||||
# Set the playlist items as the result object for this row
|
||||
# When user selects @N, they get the list of items
|
||||
# We also set the source command to .pipe -load <ID> so it loads it
|
||||
table.set_row_selection_args(i, ["-load", str(pl['id'])])
|
||||
|
||||
table.set_source_command(".pipe")
|
||||
|
||||
# Register results
|
||||
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
# Handle Play/Pause commands (but skip if we have index_arg to play a specific item)
|
||||
if play_mode and index_arg is None:
|
||||
cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
debug("Resumed playback")
|
||||
return 0
|
||||
else:
|
||||
debug("Failed to resume playback (MPV not running?)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if pause_mode:
|
||||
cmd = {"command": ["set_property", "pause", True], "request_id": 104}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
debug("Paused playback")
|
||||
return 0
|
||||
else:
|
||||
debug("Failed to pause playback (MPV not running?)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Handle Clear All command (no index provided)
|
||||
if clear_mode and index_arg is None:
|
||||
cmd = {"command": ["playlist-clear"], "request_id": 105}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
debug("Playlist cleared")
|
||||
return 0
|
||||
else:
|
||||
debug("Failed to clear playlist (MPV not running?)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Handle piped input (add to playlist)
|
||||
# Skip adding if -list is specified (user just wants to see current playlist)
|
||||
if result and not list_mode and not url_arg:
|
||||
# If result is a list of items, add them to playlist
|
||||
items_to_add = []
|
||||
if isinstance(result, list):
|
||||
items_to_add = result
|
||||
elif isinstance(result, dict):
|
||||
items_to_add = [result]
|
||||
|
||||
if _queue_items(items_to_add, config=config):
|
||||
mpv_started = True
|
||||
|
||||
if items_to_add:
|
||||
# If we added items, we might want to play the first one if nothing is playing?
|
||||
# For now, just list the playlist
|
||||
pass
|
||||
|
||||
# Get playlist from MPV
|
||||
items = _get_playlist()
|
||||
|
||||
if items is None:
|
||||
if mpv_started:
|
||||
# MPV was just started, retry getting playlist after a brief delay
|
||||
import time
|
||||
time.sleep(0.3)
|
||||
items = _get_playlist(silent=True)
|
||||
|
||||
if items is None:
|
||||
# Still can't connect, but MPV is starting
|
||||
debug("MPV is starting up...")
|
||||
return 0
|
||||
else:
|
||||
debug("MPV is not running. Starting new instance...")
|
||||
_start_mpv([], config=config)
|
||||
return 0
|
||||
|
||||
if not items:
|
||||
debug("MPV playlist is empty.")
|
||||
return 0
|
||||
|
||||
# If index is provided, perform action (Play or Clear)
|
||||
if index_arg is not None:
|
||||
try:
|
||||
# Handle 1-based index
|
||||
idx = int(index_arg) - 1
|
||||
|
||||
if idx < 0 or idx >= len(items):
|
||||
debug(f"Index {index_arg} out of range (1-{len(items)}).")
|
||||
return 1
|
||||
|
||||
item = items[idx]
|
||||
title = _extract_title_from_item(item)
|
||||
filename = item.get("filename", "") if isinstance(item, dict) else ""
|
||||
hydrus_header = _build_hydrus_header(config or {})
|
||||
hydrus_url = None
|
||||
try:
|
||||
hydrus_url = get_hydrus_url(config) if config is not None else None
|
||||
except Exception:
|
||||
hydrus_url = None
|
||||
|
||||
if clear_mode:
|
||||
# Remove item
|
||||
cmd = {"command": ["playlist-remove", idx], "request_id": 101}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
debug(f"Removed: {title}")
|
||||
# Refresh items for listing
|
||||
items = _get_playlist() or []
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
else:
|
||||
debug(f"Failed to remove item: {resp.get('error') if resp else 'No response'}")
|
||||
return 1
|
||||
else:
|
||||
# Play item
|
||||
if hydrus_header and _is_hydrus_target(filename, hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 198}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
# Ensure playback starts (unpause)
|
||||
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
_send_ipc_command(unpause_cmd)
|
||||
|
||||
debug(f"Playing: {title}")
|
||||
|
||||
# Monitor logs briefly for errors (e.g. ytdl failures)
|
||||
_monitor_mpv_logs(3.0)
|
||||
return 0
|
||||
else:
|
||||
debug(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
|
||||
return 1
|
||||
except ValueError:
|
||||
debug(f"Invalid index: {index_arg}")
|
||||
return 1
|
||||
|
||||
# List items (Default action or after clear)
|
||||
if list_mode or (index_arg is None and not url_arg):
|
||||
if not items:
|
||||
debug("MPV playlist is empty.")
|
||||
return 0
|
||||
|
||||
# Use the loaded playlist name if available, otherwise default
|
||||
# Note: current_playlist_name is defined in the load_mode block if a playlist was loaded
|
||||
try:
|
||||
table_title = current_playlist_name or "MPV Playlist"
|
||||
except NameError:
|
||||
table_title = "MPV Playlist"
|
||||
|
||||
table = ResultTable(table_title)
|
||||
|
||||
for i, item in enumerate(items):
|
||||
is_current = item.get("current", False)
|
||||
title = _extract_title_from_item(item)
|
||||
store = _infer_store_from_playlist_item(item)
|
||||
filename = item.get("filename", "") if isinstance(item, dict) else ""
|
||||
display_loc = _format_playlist_location(filename)
|
||||
|
||||
# Truncate if too long
|
||||
if len(title) > 80:
|
||||
title = title[:77] + "..."
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("Current", "*" if is_current else "")
|
||||
row.add_column("Store", store)
|
||||
row.add_column("Title", title)
|
||||
row.add_column("Filename", display_loc)
|
||||
|
||||
table.set_row_selection_args(i, [str(i + 1)])
|
||||
|
||||
table.set_source_command(".pipe")
|
||||
|
||||
# Register results with pipeline context so @N selection works
|
||||
ctx.set_last_result_table_overlay(table, items)
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
|
||||
return 0
|
||||
|
||||
def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Start MPV with a list of items."""
|
||||
import subprocess
|
||||
import time as _time_module
|
||||
|
||||
# Kill any existing MPV processes to ensure clean start
|
||||
try:
|
||||
subprocess.run(['taskkill', '/IM', 'mpv.exe', '/F'],
|
||||
stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL, timeout=2)
|
||||
_time_module.sleep(0.5) # Wait for process to die
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
|
||||
# Start MPV in idle mode with IPC server
|
||||
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}', '--idle', '--force-window']
|
||||
cmd.append('--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]')
|
||||
|
||||
hydrus_header = _build_hydrus_header(config or {})
|
||||
ytdl_opts = _build_ytdl_options(config, hydrus_header)
|
||||
|
||||
cookies_path = get_cookies_file_path()
|
||||
if cookies_path:
|
||||
debug(f"Starting MPV with cookies file: {cookies_path.replace('\\', '/')}")
|
||||
else:
|
||||
debug("Starting MPV with browser cookies: chrome")
|
||||
|
||||
if ytdl_opts:
|
||||
cmd.append(f'--ytdl-raw-options={ytdl_opts}')
|
||||
|
||||
try:
|
||||
kwargs = {}
|
||||
if platform.system() == 'Windows':
|
||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||
|
||||
# Log the complete MPV command being executed
|
||||
debug(f"DEBUG: Full MPV command: {' '.join(cmd)}")
|
||||
|
||||
if hydrus_header:
|
||||
cmd.append(f'--http-header-fields={hydrus_header}')
|
||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
|
||||
debug(f"Started MPV process")
|
||||
|
||||
# Wait for IPC pipe to be ready
|
||||
import time
|
||||
max_retries = 20
|
||||
for i in range(max_retries):
|
||||
time.sleep(0.2)
|
||||
client = MPVIPCClient(socket_path=ipc_pipe)
|
||||
if client.connect():
|
||||
client.disconnect()
|
||||
break
|
||||
else:
|
||||
debug("Timed out waiting for MPV IPC connection", file=sys.stderr)
|
||||
return
|
||||
|
||||
# Queue items via IPC
|
||||
if items:
|
||||
_queue_items(items, config=config)
|
||||
|
||||
except Exception as e:
|
||||
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".pipe",
|
||||
aliases=["pipe", "playlist", "queue", "ls-pipe"],
|
||||
summary="Manage and play items in the MPV playlist via IPC",
|
||||
usage=".pipe [index|url] [-current] [-clear] [-list] [-url URL]",
|
||||
args=[
|
||||
CmdletArg(
|
||||
name="index",
|
||||
type="string", # Changed to string to allow URL detection
|
||||
description="Index of item to play/clear, or URL to queue",
|
||||
required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="url",
|
||||
type="string",
|
||||
description="URL to queue",
|
||||
required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="clear",
|
||||
type="flag",
|
||||
description="Remove the selected item, or clear entire playlist if no index provided"
|
||||
),
|
||||
CmdletArg(
|
||||
name="list",
|
||||
type="flag",
|
||||
description="List items (default)"
|
||||
),
|
||||
CmdletArg(
|
||||
name="play",
|
||||
type="flag",
|
||||
description="Resume playback"
|
||||
),
|
||||
CmdletArg(
|
||||
name="pause",
|
||||
type="flag",
|
||||
description="Pause playback"
|
||||
),
|
||||
CmdletArg(
|
||||
name="save",
|
||||
type="flag",
|
||||
description="Save current playlist to database"
|
||||
),
|
||||
CmdletArg(
|
||||
name="load",
|
||||
type="flag",
|
||||
description="List saved playlists"
|
||||
),
|
||||
CmdletArg(
|
||||
name="current",
|
||||
type="flag",
|
||||
description="Emit the currently playing item to pipeline for further processing"
|
||||
),
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
|
||||
320
cmdnats/worker.py
Normal file
320
cmdnats/worker.py
Normal file
@@ -0,0 +1,320 @@
|
||||
"""Worker cmdlet: Display workers table in ResultTable format."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from cmdlets import register
|
||||
from cmdlets._shared import Cmdlet, CmdletArg
|
||||
import pipeline as ctx
|
||||
from helper.logger import log
|
||||
from config import get_local_storage_path
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".worker",
|
||||
summary="Display workers table in result table format.",
|
||||
usage=".worker [status] [-limit N] [@N]",
|
||||
args=[
|
||||
CmdletArg("status", description="Filter by status: running, completed, error (default: all)"),
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
||||
CmdletArg("@N", description="Select worker by index (1-based) and display full logs"),
|
||||
],
|
||||
details=[
|
||||
"- Shows all background worker tasks and their output",
|
||||
"- Can filter by status: running, completed, error",
|
||||
"- Search result stdout is captured from each worker",
|
||||
"- Use @N to select a specific worker by index and display its full logs",
|
||||
"Examples:",
|
||||
".worker # Show all workers",
|
||||
".worker running # Show running workers only",
|
||||
".worker completed -limit 50 # Show 50 most recent completed workers",
|
||||
".worker @3 # Show full logs for the 3rd worker",
|
||||
".worker running @2 # Show full logs for the 2nd running worker",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@register([".worker", "worker", "workers"])
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Display workers table or show detailed logs for a specific worker."""
|
||||
args_list = [str(arg) for arg in (args or [])]
|
||||
selection_indices = ctx.get_last_selection()
|
||||
selection_requested = bool(selection_indices) and isinstance(result, list) and len(result) > 0
|
||||
|
||||
# Parse arguments for list view
|
||||
status_filter: str | None = None
|
||||
limit = 100
|
||||
clear_requested = False
|
||||
worker_id_arg: str | None = None
|
||||
i = 0
|
||||
while i < len(args_list):
|
||||
arg = args_list[i]
|
||||
low = arg.lower()
|
||||
if low in {"-limit", "--limit"} and i + 1 < len(args_list):
|
||||
try:
|
||||
limit = max(1, int(args_list[i + 1]))
|
||||
except ValueError:
|
||||
limit = 100
|
||||
i += 2
|
||||
elif low in {"-id", "--id"} and i + 1 < len(args_list):
|
||||
worker_id_arg = args_list[i + 1]
|
||||
i += 2
|
||||
elif low in {"-clear", "--clear"}:
|
||||
clear_requested = True
|
||||
i += 1
|
||||
elif low in {"running", "completed", "error", "cancelled"}:
|
||||
status_filter = low
|
||||
i += 1
|
||||
elif not arg.startswith("-"):
|
||||
status_filter = low
|
||||
i += 1
|
||||
else:
|
||||
i += 1
|
||||
|
||||
try:
|
||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
library_root = get_local_storage_path(config or {})
|
||||
if not library_root:
|
||||
log("No library root configured", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
from helper.local_library import LocalLibraryDB
|
||||
with LocalLibraryDB(library_root) as db:
|
||||
if clear_requested:
|
||||
count = db.clear_finished_workers()
|
||||
log(f"Cleared {count} finished workers.")
|
||||
return 0
|
||||
|
||||
if worker_id_arg:
|
||||
worker = db.get_worker(worker_id_arg)
|
||||
if worker:
|
||||
events = []
|
||||
try:
|
||||
wid = worker.get("worker_id")
|
||||
if wid and hasattr(db, "get_worker_events"):
|
||||
events = db.get_worker_events(wid)
|
||||
except Exception:
|
||||
pass
|
||||
_emit_worker_detail(worker, events)
|
||||
return 0
|
||||
else:
|
||||
log(f"Worker not found: {worker_id_arg}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if selection_requested:
|
||||
return _render_worker_selection(db, result)
|
||||
return _render_worker_list(db, status_filter, limit)
|
||||
except Exception as exc:
|
||||
log(f"Workers query failed: {exc}", file=sys.stderr)
|
||||
import traceback
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def _render_worker_list(db, status_filter: str | None, limit: int) -> int:
|
||||
workers = db.get_all_workers(limit=limit)
|
||||
if status_filter:
|
||||
workers = [w for w in workers if str(w.get("status", "")).lower() == status_filter]
|
||||
|
||||
if not workers:
|
||||
log("No workers found", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
for worker in workers:
|
||||
started = worker.get("started_at", "")
|
||||
ended = worker.get("completed_at", worker.get("last_updated", ""))
|
||||
|
||||
date_str = _extract_date(started)
|
||||
start_time = _format_event_timestamp(started)
|
||||
end_time = _format_event_timestamp(ended)
|
||||
|
||||
item = {
|
||||
"columns": [
|
||||
("Status", worker.get("status", "")),
|
||||
("Pipe", _summarize_pipe(worker.get("pipe"))),
|
||||
("Date", date_str),
|
||||
("Start Time", start_time),
|
||||
("End Time", end_time),
|
||||
],
|
||||
"__worker_metadata": worker,
|
||||
"_selection_args": ["-id", worker.get("worker_id")]
|
||||
}
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
|
||||
def _render_worker_selection(db, selected_items: Any) -> int:
|
||||
if not isinstance(selected_items, list):
|
||||
log("Selection payload missing", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
emitted = False
|
||||
for item in selected_items:
|
||||
worker = _resolve_worker_record(db, item)
|
||||
if not worker:
|
||||
continue
|
||||
events = []
|
||||
try:
|
||||
events = db.get_worker_events(worker.get("worker_id")) if hasattr(db, "get_worker_events") else []
|
||||
except Exception:
|
||||
events = []
|
||||
_emit_worker_detail(worker, events)
|
||||
emitted = True
|
||||
if not emitted:
|
||||
log("Selected rows no longer exist", file=sys.stderr)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def _resolve_worker_record(db, payload: Any) -> Dict[str, Any] | None:
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
worker_data = payload.get("__worker_metadata")
|
||||
worker_id = None
|
||||
if isinstance(worker_data, dict):
|
||||
worker_id = worker_data.get("worker_id")
|
||||
else:
|
||||
worker_id = payload.get("worker_id")
|
||||
worker_data = None
|
||||
if worker_id:
|
||||
fresh = db.get_worker(worker_id)
|
||||
if fresh:
|
||||
return fresh
|
||||
return worker_data if isinstance(worker_data, dict) else None
|
||||
|
||||
|
||||
def _emit_worker_detail(worker: Dict[str, Any], events: List[Dict[str, Any]]) -> None:
|
||||
# Parse stdout logs into rows
|
||||
stdout_content = worker.get("stdout", "") or ""
|
||||
|
||||
# Try to parse lines if they follow the standard log format
|
||||
# Format: YYYY-MM-DD HH:MM:SS - name - level - message
|
||||
lines = stdout_content.splitlines()
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Default values
|
||||
timestamp = ""
|
||||
level = "INFO"
|
||||
message = line
|
||||
|
||||
# Try to parse standard format
|
||||
try:
|
||||
parts = line.split(" - ", 3)
|
||||
if len(parts) >= 4:
|
||||
# Full format
|
||||
ts_str, _, lvl, msg = parts
|
||||
timestamp = _format_event_timestamp(ts_str)
|
||||
level = lvl
|
||||
message = msg
|
||||
elif len(parts) == 3:
|
||||
# Missing name or level
|
||||
ts_str, lvl, msg = parts
|
||||
timestamp = _format_event_timestamp(ts_str)
|
||||
level = lvl
|
||||
message = msg
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
item = {
|
||||
"columns": [
|
||||
("Time", timestamp),
|
||||
("Level", level),
|
||||
("Message", message)
|
||||
]
|
||||
}
|
||||
ctx.emit(item)
|
||||
|
||||
# Also emit events if available and not redundant
|
||||
# (For now, just focusing on stdout logs as requested)
|
||||
|
||||
|
||||
def _summarize_pipe(pipe_value: Any, limit: int = 60) -> str:
|
||||
text = str(pipe_value or "").strip()
|
||||
if not text:
|
||||
return "(none)"
|
||||
return text if len(text) <= limit else text[: limit - 3] + "..."
|
||||
|
||||
|
||||
def _format_event_timestamp(raw_timestamp: Any) -> str:
|
||||
dt = _parse_to_local(raw_timestamp)
|
||||
if dt:
|
||||
return dt.strftime("%H:%M:%S")
|
||||
|
||||
if not raw_timestamp:
|
||||
return "--:--:--"
|
||||
text = str(raw_timestamp)
|
||||
if "T" in text:
|
||||
time_part = text.split("T", 1)[1]
|
||||
elif " " in text:
|
||||
time_part = text.split(" ", 1)[1]
|
||||
else:
|
||||
time_part = text
|
||||
return time_part[:8] if len(time_part) >= 8 else time_part
|
||||
|
||||
|
||||
def _parse_to_local(timestamp_str: Any) -> datetime | None:
|
||||
if not timestamp_str:
|
||||
return None
|
||||
text = str(timestamp_str).strip()
|
||||
if not text:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Check for T separator (Python isoformat - Local time)
|
||||
if 'T' in text:
|
||||
return datetime.fromisoformat(text)
|
||||
|
||||
# Check for space separator (SQLite CURRENT_TIMESTAMP - UTC)
|
||||
# Format: YYYY-MM-DD HH:MM:SS
|
||||
if ' ' in text:
|
||||
# Assume UTC
|
||||
dt = datetime.strptime(text, "%Y-%m-%d %H:%M:%S")
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone() # Convert to local
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _extract_date(raw_timestamp: Any) -> str:
|
||||
dt = _parse_to_local(raw_timestamp)
|
||||
if dt:
|
||||
return dt.strftime("%m-%d-%y")
|
||||
|
||||
# Fallback
|
||||
if not raw_timestamp:
|
||||
return ""
|
||||
text = str(raw_timestamp)
|
||||
# Extract YYYY-MM-DD part
|
||||
date_part = ""
|
||||
if "T" in text:
|
||||
date_part = text.split("T", 1)[0]
|
||||
elif " " in text:
|
||||
date_part = text.split(" ", 1)[0]
|
||||
else:
|
||||
date_part = text
|
||||
|
||||
# Convert YYYY-MM-DD to MM-DD-YY
|
||||
try:
|
||||
parts = date_part.split("-")
|
||||
if len(parts) == 3:
|
||||
year, month, day = parts
|
||||
return f"{month}-{day}-{year[2:]}"
|
||||
except Exception:
|
||||
pass
|
||||
return date_part
|
||||
Reference in New Issue
Block a user