df
This commit is contained in:
@@ -1,220 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.models import PipelineStageContext
|
||||
from SYS.rich_display import capture_rich_output
|
||||
|
||||
|
||||
CmdletCallable = Callable[[Any, Sequence[str], Dict[str, Any]], int]
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class CmdletRunResult:
|
||||
"""Programmatic result for a single cmdlet invocation."""
|
||||
|
||||
name: str
|
||||
args: Sequence[str]
|
||||
exit_code: int = 0
|
||||
emitted: List[Any] = field(default_factory=list)
|
||||
|
||||
# Best-effort: cmdlets can publish tables/items via pipeline state even when
|
||||
# they don't emit pipeline items.
|
||||
result_table: Optional[Any] = None
|
||||
result_items: List[Any] = field(default_factory=list)
|
||||
result_subject: Optional[Any] = None
|
||||
|
||||
stdout: str = ""
|
||||
stderr: str = ""
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
def _normalize_cmd_name(name: str) -> str:
|
||||
return str(name or "").replace("_", "-").strip().lower()
|
||||
|
||||
|
||||
def resolve_cmdlet(cmd_name: str) -> Optional[CmdletCallable]:
|
||||
"""Resolve a cmdlet callable by name from the registry (aliases supported)."""
|
||||
try:
|
||||
from SYS.cmdlet_catalog import ensure_registry_loaded
|
||||
|
||||
ensure_registry_loaded()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
import cmdlet as cmdlet_pkg
|
||||
|
||||
return cmdlet_pkg.get(cmd_name)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def run_cmdlet(
|
||||
cmd: str | CmdletCallable,
|
||||
args: Sequence[str] | None,
|
||||
config: Dict[str, Any],
|
||||
*,
|
||||
piped: Any = None,
|
||||
isolate: bool = True,
|
||||
capture_output: bool = True,
|
||||
stage_index: int = 0,
|
||||
total_stages: int = 1,
|
||||
pipe_index: Optional[int] = None,
|
||||
worker_id: Optional[str] = None,
|
||||
) -> CmdletRunResult:
|
||||
"""Run a single cmdlet programmatically and return structured results.
|
||||
|
||||
This is intended for TUI/webapp consumers that want cmdlet behavior without
|
||||
going through the interactive CLI loop.
|
||||
|
||||
Notes:
|
||||
- When `isolate=True` (default) this runs inside `ctx.new_pipeline_state()` so
|
||||
global CLI pipeline state is not mutated.
|
||||
- Output capturing covers both normal `print()` and Rich output via
|
||||
`capture_rich_output()`.
|
||||
"""
|
||||
|
||||
normalized_args: Sequence[str] = list(args or [])
|
||||
|
||||
if isinstance(cmd, str):
|
||||
name = _normalize_cmd_name(cmd)
|
||||
cmd_fn = resolve_cmdlet(name)
|
||||
else:
|
||||
name = getattr(cmd, "__name__", "cmdlet")
|
||||
cmd_fn = cmd
|
||||
|
||||
result = CmdletRunResult(name=name, args=normalized_args)
|
||||
|
||||
if not callable(cmd_fn):
|
||||
result.exit_code = 1
|
||||
result.error = f"Unknown command: {name}"
|
||||
result.stderr = result.error
|
||||
return result
|
||||
|
||||
stage_ctx = PipelineStageContext(
|
||||
stage_index=int(stage_index),
|
||||
total_stages=int(total_stages),
|
||||
pipe_index=pipe_index,
|
||||
worker_id=worker_id,
|
||||
)
|
||||
|
||||
stdout_buffer = io.StringIO()
|
||||
stderr_buffer = io.StringIO()
|
||||
|
||||
stage_text = " ".join([name, *list(normalized_args)]).strip()
|
||||
|
||||
state_cm = ctx.new_pipeline_state() if isolate else contextlib.nullcontext()
|
||||
|
||||
with state_cm:
|
||||
# Keep behavior predictable: start from a clean slate.
|
||||
try:
|
||||
ctx.reset()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx.set_stage_context(stage_ctx)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx.set_current_cmdlet_name(name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx.set_current_stage_text(stage_text)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx.set_current_command_text(stage_text)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
run_cm = (
|
||||
capture_rich_output(stdout=stdout_buffer, stderr=stderr_buffer)
|
||||
if capture_output
|
||||
else contextlib.nullcontext()
|
||||
)
|
||||
with run_cm:
|
||||
with (
|
||||
contextlib.redirect_stdout(stdout_buffer)
|
||||
if capture_output
|
||||
else contextlib.nullcontext()
|
||||
):
|
||||
with (
|
||||
contextlib.redirect_stderr(stderr_buffer)
|
||||
if capture_output
|
||||
else contextlib.nullcontext()
|
||||
):
|
||||
result.exit_code = int(cmd_fn(piped, list(normalized_args), config))
|
||||
except Exception as exc:
|
||||
result.exit_code = 1
|
||||
result.error = f"{type(exc).__name__}: {exc}"
|
||||
finally:
|
||||
result.stdout = stdout_buffer.getvalue()
|
||||
result.stderr = stderr_buffer.getvalue()
|
||||
|
||||
# Prefer cmdlet emits (pipeline semantics).
|
||||
try:
|
||||
result.emitted = list(stage_ctx.emits or [])
|
||||
except Exception:
|
||||
result.emitted = []
|
||||
|
||||
# Mirror CLI behavior: if cmdlet emitted items and there is no overlay table,
|
||||
# make emitted items the last result items for downstream consumers.
|
||||
try:
|
||||
has_overlay = bool(ctx.get_display_table())
|
||||
except Exception:
|
||||
has_overlay = False
|
||||
|
||||
if result.emitted and not has_overlay:
|
||||
try:
|
||||
ctx.set_last_result_items_only(list(result.emitted))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Best-effort snapshot of visible results.
|
||||
try:
|
||||
result.result_table = (
|
||||
ctx.get_display_table() or ctx.get_current_stage_table() or ctx.get_last_result_table()
|
||||
)
|
||||
except Exception:
|
||||
result.result_table = None
|
||||
|
||||
try:
|
||||
result.result_items = list(ctx.get_last_result_items() or [])
|
||||
except Exception:
|
||||
result.result_items = []
|
||||
|
||||
try:
|
||||
result.result_subject = ctx.get_last_result_subject()
|
||||
except Exception:
|
||||
result.result_subject = None
|
||||
|
||||
# Cleanup stage-local markers.
|
||||
try:
|
||||
ctx.clear_current_stage_text()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
ctx.clear_current_cmdlet_name()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
ctx.clear_current_command_text()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
ctx.set_stage_context(None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return result
|
||||
1116
SYS/download.py
1116
SYS/download.py
File diff suppressed because it is too large
Load Diff
1819
SYS/metadata.py
1819
SYS/metadata.py
File diff suppressed because it is too large
Load Diff
234
SYS/tasks.py
234
SYS/tasks.py
@@ -1,234 +0,0 @@
|
||||
"""Background task handling and IPC helpers for mpv integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from SYS.logger import log
|
||||
import threading
|
||||
import time
|
||||
from typing import IO, Iterable
|
||||
|
||||
|
||||
def connect_ipc(path: str, timeout: float = 5.0) -> IO[bytes] | None:
|
||||
"""Connect to the mpv IPC server located at *path*."""
|
||||
deadline = time.time() + timeout
|
||||
if not path:
|
||||
return None
|
||||
if os.name == "nt":
|
||||
# mpv exposes a named pipe on Windows. Keep retrying until it is ready.
|
||||
while True:
|
||||
try:
|
||||
return open(path, "r+b", buffering=0)
|
||||
except FileNotFoundError:
|
||||
if time.time() > deadline:
|
||||
return None
|
||||
time.sleep(0.05)
|
||||
except OSError as exc: # Pipe busy
|
||||
# Windows named pipes can intermittently raise EINVAL while the pipe exists
|
||||
# but is not ready/accepting connections yet.
|
||||
if exc.errno not in (errno.ENOENT,
|
||||
errno.EPIPE,
|
||||
errno.EBUSY,
|
||||
errno.EINVAL):
|
||||
raise
|
||||
if time.time() > deadline:
|
||||
return None
|
||||
time.sleep(0.05)
|
||||
else:
|
||||
sock = socket.socket(socket.AF_UNIX)
|
||||
while True:
|
||||
try:
|
||||
sock.connect(path)
|
||||
return sock.makefile("r+b", buffering=0)
|
||||
except FileNotFoundError:
|
||||
if time.time() > deadline:
|
||||
return None
|
||||
time.sleep(0.05)
|
||||
except OSError as exc:
|
||||
if exc.errno not in (errno.ENOENT, errno.ECONNREFUSED):
|
||||
raise
|
||||
if time.time() > deadline:
|
||||
return None
|
||||
time.sleep(0.05)
|
||||
|
||||
|
||||
def ipc_sender(ipc: IO[bytes] | None):
|
||||
"""Create a helper function for sending script messages via IPC."""
|
||||
if ipc is None:
|
||||
|
||||
def _noop(_event: str, _payload: dict) -> None:
|
||||
return None
|
||||
|
||||
return _noop
|
||||
lock = threading.Lock()
|
||||
|
||||
def _send(event: str, payload: dict) -> None:
|
||||
message = json.dumps(
|
||||
{
|
||||
"command": ["script-message",
|
||||
event,
|
||||
json.dumps(payload)]
|
||||
},
|
||||
ensure_ascii=False
|
||||
)
|
||||
encoded = message.encode("utf-8") + b"\n"
|
||||
with lock:
|
||||
try:
|
||||
ipc.write(encoded)
|
||||
ipc.flush()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return _send
|
||||
|
||||
|
||||
def iter_stream(stream: Iterable[str]) -> Iterable[str]:
|
||||
for raw in stream:
|
||||
yield raw.rstrip("\r\n")
|
||||
|
||||
|
||||
def _run_task(args, parser) -> int:
|
||||
if not args.command:
|
||||
parser.error(
|
||||
'run-task requires a command to execute (use "--" before the command).'
|
||||
)
|
||||
env = os.environ.copy()
|
||||
for entry in args.env:
|
||||
key, sep, value = entry.partition("=")
|
||||
if not sep:
|
||||
parser.error(f"Invalid environment variable definition: {entry!r}")
|
||||
env[key] = value
|
||||
command = list(args.command)
|
||||
if command and command[0] == "--":
|
||||
command.pop(0)
|
||||
notifier = ipc_sender(connect_ipc(args.ipc, timeout=args.ipc_timeout))
|
||||
if not command:
|
||||
notifier(
|
||||
"downlow-task-event",
|
||||
{
|
||||
"id": args.task_id,
|
||||
"event": "error",
|
||||
"message": "No command provided after separator",
|
||||
},
|
||||
)
|
||||
log("[downlow.py] No command provided for run-task", file=sys.stderr)
|
||||
return 1
|
||||
if command and isinstance(command[0], str) and sys.executable:
|
||||
first = command[0].lower()
|
||||
if first in {"python",
|
||||
"python3",
|
||||
"py",
|
||||
"python.exe",
|
||||
"python3.exe",
|
||||
"py.exe"}:
|
||||
command[0] = sys.executable
|
||||
if os.environ.get("DOWNLOW_DEBUG"):
|
||||
log(f"Launching command: {command}", file=sys.stderr)
|
||||
notifier(
|
||||
"downlow-task-event",
|
||||
{
|
||||
"id": args.task_id,
|
||||
"event": "start",
|
||||
"command": command,
|
||||
"cwd": args.cwd or os.getcwd(),
|
||||
},
|
||||
)
|
||||
|
||||
popen_kwargs = {}
|
||||
if os.name == "nt":
|
||||
# Avoid flashing a console window when spawning console-subsystem executables.
|
||||
flags = 0
|
||||
try:
|
||||
flags |= int(getattr(subprocess, "CREATE_NO_WINDOW", 0x08000000))
|
||||
except Exception:
|
||||
flags |= 0x08000000
|
||||
popen_kwargs["creationflags"] = flags
|
||||
try:
|
||||
si = subprocess.STARTUPINFO()
|
||||
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
si.wShowWindow = subprocess.SW_HIDE
|
||||
popen_kwargs["startupinfo"] = si
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=args.cwd or None,
|
||||
env=env,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
universal_newlines=True,
|
||||
**popen_kwargs,
|
||||
)
|
||||
except FileNotFoundError as exc:
|
||||
notifier(
|
||||
"downlow-task-event",
|
||||
{
|
||||
"id": args.task_id,
|
||||
"event": "error",
|
||||
"message": f"Executable not found: {exc.filename}",
|
||||
},
|
||||
)
|
||||
log(f"{exc}", file=sys.stderr)
|
||||
return 1
|
||||
stdout_lines: list[str] = []
|
||||
stderr_lines: list[str] = []
|
||||
|
||||
def pump(stream: IO[str], label: str, sink: list[str]) -> None:
|
||||
for line in iter_stream(stream):
|
||||
sink.append(line)
|
||||
notifier(
|
||||
"downlow-task-event",
|
||||
{
|
||||
"id": args.task_id,
|
||||
"event": label,
|
||||
"line": line,
|
||||
},
|
||||
)
|
||||
|
||||
threads = []
|
||||
if process.stdout:
|
||||
t_out = threading.Thread(
|
||||
target=pump,
|
||||
args=(process.stdout,
|
||||
"stdout",
|
||||
stdout_lines),
|
||||
daemon=True
|
||||
)
|
||||
t_out.start()
|
||||
threads.append(t_out)
|
||||
if process.stderr:
|
||||
t_err = threading.Thread(
|
||||
target=pump,
|
||||
args=(process.stderr,
|
||||
"stderr",
|
||||
stderr_lines),
|
||||
daemon=True
|
||||
)
|
||||
t_err.start()
|
||||
threads.append(t_err)
|
||||
return_code = process.wait()
|
||||
for t in threads:
|
||||
t.join(timeout=0.1)
|
||||
notifier(
|
||||
"downlow-task-event",
|
||||
{
|
||||
"id": args.task_id,
|
||||
"event": "exit",
|
||||
"returncode": return_code,
|
||||
"success": return_code == 0,
|
||||
},
|
||||
)
|
||||
# Also mirror aggregated output to stdout/stderr for compatibility when IPC is unavailable.
|
||||
if stdout_lines:
|
||||
log("\n".join(stdout_lines))
|
||||
if stderr_lines:
|
||||
log("\n".join(stderr_lines), file=sys.stderr)
|
||||
return return_code
|
||||
Reference in New Issue
Block a user