2025-11-25 20:09:33 -08:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
"""CLI REPL for Medeia-Macina with autocomplete support."""
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
import json
|
|
|
|
|
import re
|
|
|
|
|
import io
|
|
|
|
|
import uuid
|
|
|
|
|
import atexit
|
|
|
|
|
from copy import deepcopy
|
|
|
|
|
from importlib import import_module
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from typing import Any, Dict, List, Optional, Sequence, Set, TextIO, TYPE_CHECKING, cast
|
2025-12-11 12:47:30 -08:00
|
|
|
import time
|
|
|
|
|
import threading
|
|
|
|
|
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.logger import debug
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
import typer
|
|
|
|
|
except ImportError:
|
|
|
|
|
typer = None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from result_table import ResultTable, format_result
|
|
|
|
|
RESULT_TABLE_AVAILABLE = True
|
|
|
|
|
except ImportError:
|
|
|
|
|
RESULT_TABLE_AVAILABLE = False
|
|
|
|
|
ResultTable = None # type: ignore
|
|
|
|
|
format_result = None # type: ignore
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from prompt_toolkit import PromptSession
|
|
|
|
|
from prompt_toolkit.completion import Completer, Completion
|
|
|
|
|
from prompt_toolkit.document import Document
|
2025-11-25 22:34:41 -08:00
|
|
|
from prompt_toolkit.lexers import Lexer
|
|
|
|
|
from prompt_toolkit.styles import Style
|
2025-11-25 20:09:33 -08:00
|
|
|
PROMPT_TOOLKIT_AVAILABLE = True
|
|
|
|
|
except ImportError: # pragma: no cover - optional dependency
|
|
|
|
|
PromptSession = None # type: ignore
|
|
|
|
|
Completer = None # type: ignore
|
|
|
|
|
Completion = None # type: ignore
|
|
|
|
|
Document = None # type: ignore
|
2025-11-25 22:34:41 -08:00
|
|
|
Lexer = None # type: ignore
|
|
|
|
|
Style = None # type: ignore
|
2025-11-25 20:09:33 -08:00
|
|
|
PROMPT_TOOLKIT_AVAILABLE = False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.worker_manager import WorkerManager
|
2025-11-25 20:09:33 -08:00
|
|
|
except ImportError: # pragma: no cover - optional dependency
|
|
|
|
|
WorkerManager = None # type: ignore
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
try:
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.background_notifier import ensure_background_notifier
|
2025-12-11 12:47:30 -08:00
|
|
|
except ImportError: # pragma: no cover - optional dependency
|
|
|
|
|
ensure_background_notifier = lambda *_, **__: None # type: ignore
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
if TYPE_CHECKING: # pragma: no cover - typing helper
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.worker_manager import WorkerManager as WorkerManagerType
|
2025-11-25 20:09:33 -08:00
|
|
|
else:
|
|
|
|
|
WorkerManagerType = Any
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Global toolbar updater callback for prompt_toolkit integration
|
|
|
|
|
_TOOLBAR_UPDATER: Optional[Callable[[str], None]] = None
|
|
|
|
|
from typing import Callable
|
|
|
|
|
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
from config import get_local_storage_path, load_config
|
2025-12-16 23:23:43 -08:00
|
|
|
from cmdlet_catalog import (
|
2025-12-11 12:47:30 -08:00
|
|
|
import_cmd_module as _catalog_import_cmd_module,
|
|
|
|
|
list_cmdlet_metadata as _catalog_list_cmdlet_metadata,
|
|
|
|
|
list_cmdlet_names as _catalog_list_cmdlet_names,
|
|
|
|
|
get_cmdlet_arg_flags as _catalog_get_cmdlet_arg_flags,
|
|
|
|
|
get_cmdlet_arg_choices as _catalog_get_cmdlet_arg_choices,
|
|
|
|
|
get_cmdlet_metadata as _catalog_get_cmdlet_metadata,
|
|
|
|
|
)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class _WorkerOutputMirror(io.TextIOBase):
|
|
|
|
|
"""Mirror stdout/stderr to worker manager while preserving console output."""
|
|
|
|
|
|
|
|
|
|
def __init__(self, original: TextIO, manager: WorkerManagerType, worker_id: str, channel: str):
|
|
|
|
|
self._original = original
|
|
|
|
|
self._manager = manager
|
|
|
|
|
self._worker_id = worker_id
|
|
|
|
|
self._channel = channel
|
|
|
|
|
self._pending: str = ""
|
|
|
|
|
|
|
|
|
|
def write(self, data: str) -> int:
|
|
|
|
|
if not data:
|
|
|
|
|
return 0
|
|
|
|
|
self._original.write(data)
|
|
|
|
|
self._buffer_text(data)
|
|
|
|
|
return len(data)
|
|
|
|
|
|
|
|
|
|
def flush(self) -> None:
|
|
|
|
|
self._original.flush()
|
|
|
|
|
self._flush_pending(force=True)
|
|
|
|
|
|
|
|
|
|
def isatty(self) -> bool: # pragma: no cover - passthrough
|
|
|
|
|
return bool(getattr(self._original, "isatty", lambda: False)())
|
|
|
|
|
|
|
|
|
|
def _buffer_text(self, data: str) -> None:
|
|
|
|
|
combined = self._pending + data
|
|
|
|
|
lines = combined.splitlines(keepends=True)
|
|
|
|
|
if not lines:
|
|
|
|
|
self._pending = combined
|
|
|
|
|
return
|
|
|
|
|
if lines[-1].endswith(("\n", "\r")):
|
|
|
|
|
complete = lines
|
|
|
|
|
self._pending = ""
|
|
|
|
|
else:
|
|
|
|
|
complete = lines[:-1]
|
|
|
|
|
self._pending = lines[-1]
|
|
|
|
|
for chunk in complete:
|
|
|
|
|
self._emit(chunk)
|
|
|
|
|
|
|
|
|
|
def _flush_pending(self, force: bool = False) -> None:
|
|
|
|
|
if self._pending and force:
|
|
|
|
|
self._emit(self._pending)
|
|
|
|
|
self._pending = ""
|
|
|
|
|
|
|
|
|
|
def _emit(self, text: str) -> None:
|
|
|
|
|
if not text:
|
|
|
|
|
return
|
|
|
|
|
try:
|
|
|
|
|
self._manager.append_stdout(self._worker_id, text, channel=self._channel)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def encoding(self) -> str: # type: ignore[override]
|
|
|
|
|
return getattr(self._original, "encoding", "utf-8")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class _WorkerStageSession:
|
|
|
|
|
"""Lifecycle helper for wrapping a CLI cmdlet execution in a worker record."""
|
|
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
|
self,
|
|
|
|
|
manager: WorkerManagerType,
|
|
|
|
|
worker_id: str,
|
|
|
|
|
orig_stdout: TextIO,
|
|
|
|
|
orig_stderr: TextIO,
|
|
|
|
|
stdout_proxy: _WorkerOutputMirror,
|
|
|
|
|
stderr_proxy: _WorkerOutputMirror,
|
|
|
|
|
config: Optional[Dict[str, Any]],
|
|
|
|
|
logging_enabled: bool,
|
|
|
|
|
completion_label: str,
|
|
|
|
|
error_label: str,
|
|
|
|
|
) -> None:
|
|
|
|
|
self.manager = manager
|
|
|
|
|
self.worker_id = worker_id
|
|
|
|
|
self.orig_stdout = orig_stdout
|
|
|
|
|
self.orig_stderr = orig_stderr
|
|
|
|
|
self.stdout_proxy = stdout_proxy
|
|
|
|
|
self.stderr_proxy = stderr_proxy
|
|
|
|
|
self.config = config
|
|
|
|
|
self.logging_enabled = logging_enabled
|
|
|
|
|
self.closed = False
|
|
|
|
|
self._completion_label = completion_label
|
|
|
|
|
self._error_label = error_label
|
|
|
|
|
|
|
|
|
|
def close(self, status: str = "completed", error_msg: str = "") -> None:
|
|
|
|
|
if self.closed:
|
|
|
|
|
return
|
|
|
|
|
try:
|
|
|
|
|
self.stdout_proxy.flush()
|
|
|
|
|
self.stderr_proxy.flush()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
sys.stdout = self.orig_stdout
|
|
|
|
|
sys.stderr = self.orig_stderr
|
|
|
|
|
if self.logging_enabled:
|
|
|
|
|
try:
|
|
|
|
|
self.manager.disable_logging_for_worker(self.worker_id)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
if status == "completed":
|
|
|
|
|
self.manager.log_step(self.worker_id, self._completion_label)
|
|
|
|
|
else:
|
|
|
|
|
self.manager.log_step(self.worker_id, f"{self._error_label}: {error_msg or status}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
self.manager.finish_worker(self.worker_id, result=status or "completed", error_msg=error_msg or "")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
if self.config and self.config.get('_current_worker_id') == self.worker_id:
|
|
|
|
|
self.config.pop('_current_worker_id', None)
|
|
|
|
|
self.closed = True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
_CLI_WORKER_MANAGER: Optional[WorkerManagerType] = None
|
|
|
|
|
_CLI_ORPHAN_CLEANUP_DONE = False
|
|
|
|
|
CLI_ROOT = Path(__file__).resolve().parent
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _load_cli_config() -> Dict[str, Any]:
|
2025-12-13 00:18:30 -08:00
|
|
|
"""Load config.conf relative to the CLI script location."""
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
|
|
|
|
return deepcopy(load_config(config_dir=CLI_ROOT))
|
|
|
|
|
except Exception:
|
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
def _get_table_title_for_command(
|
|
|
|
|
cmd_name: str,
|
|
|
|
|
emitted_items: Optional[List[Any]] = None,
|
|
|
|
|
cmd_args: Optional[List[str]] = None,
|
|
|
|
|
) -> str:
|
2025-11-25 20:09:33 -08:00
|
|
|
"""Generate a dynamic table title based on the command and emitted items.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
cmd_name: The command name (e.g., 'search-file', 'get-tag', 'get-file')
|
|
|
|
|
emitted_items: The items being displayed
|
2025-12-16 01:45:01 -08:00
|
|
|
cmd_args: Arguments passed to the command (when available)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
A descriptive title for the result table
|
|
|
|
|
"""
|
2025-12-16 01:45:01 -08:00
|
|
|
# Prefer argument-aware titles where possible so table history is self-describing.
|
|
|
|
|
if cmd_name in ('search-provider', 'search_provider') and cmd_args:
|
|
|
|
|
# Support both positional form:
|
|
|
|
|
# search-provider <provider> <query>
|
|
|
|
|
# and flag form:
|
|
|
|
|
# search-provider -provider <provider> <query>
|
|
|
|
|
provider: str = ""
|
|
|
|
|
query: str = ""
|
|
|
|
|
tokens = [str(a) for a in (cmd_args or [])]
|
|
|
|
|
pos: List[str] = []
|
|
|
|
|
i = 0
|
|
|
|
|
while i < len(tokens):
|
|
|
|
|
low = tokens[i].lower()
|
|
|
|
|
if low in {"-provider", "--provider"} and i + 1 < len(tokens):
|
|
|
|
|
provider = str(tokens[i + 1]).strip()
|
|
|
|
|
i += 2
|
|
|
|
|
continue
|
|
|
|
|
if low in {"-query", "--query"} and i + 1 < len(tokens):
|
|
|
|
|
query = str(tokens[i + 1]).strip()
|
|
|
|
|
i += 2
|
|
|
|
|
continue
|
|
|
|
|
if low in {"-limit", "--limit"} and i + 1 < len(tokens):
|
|
|
|
|
i += 2
|
|
|
|
|
continue
|
|
|
|
|
if not str(tokens[i]).startswith("-"):
|
|
|
|
|
pos.append(str(tokens[i]))
|
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
|
|
if not provider and pos:
|
|
|
|
|
provider = str(pos[0]).strip()
|
|
|
|
|
pos = pos[1:]
|
|
|
|
|
if not query and pos:
|
|
|
|
|
query = " ".join(pos).strip()
|
|
|
|
|
|
|
|
|
|
if not provider or not query:
|
|
|
|
|
# Fall back to generic mapping below.
|
|
|
|
|
provider = ""
|
|
|
|
|
query = ""
|
|
|
|
|
|
|
|
|
|
provider_lower = provider.lower()
|
|
|
|
|
if provider_lower == 'youtube':
|
|
|
|
|
provider_label = 'Youtube'
|
|
|
|
|
elif provider_lower == 'openlibrary':
|
|
|
|
|
provider_label = 'OpenLibrary'
|
|
|
|
|
else:
|
|
|
|
|
provider_label = provider[:1].upper() + provider[1:] if provider else 'Provider'
|
|
|
|
|
if provider and query:
|
|
|
|
|
return f"{provider_label}: {query}".strip().rstrip(':')
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
# Mapping of commands to title templates
|
|
|
|
|
title_map = {
|
|
|
|
|
'search-file': 'Results',
|
|
|
|
|
'search_file': 'Results',
|
|
|
|
|
'download-data': 'Downloads',
|
|
|
|
|
'download_data': 'Downloads',
|
|
|
|
|
'get-tag': 'Tags',
|
|
|
|
|
'get_tag': 'Tags',
|
|
|
|
|
'get-file': 'Results',
|
|
|
|
|
'get_file': 'Results',
|
2025-12-11 23:21:45 -08:00
|
|
|
'add-tags': 'Results',
|
|
|
|
|
'add_tags': 'Results',
|
2025-11-25 20:09:33 -08:00
|
|
|
'delete-tag': 'Results',
|
|
|
|
|
'delete_tag': 'Results',
|
|
|
|
|
'add-url': 'Results',
|
|
|
|
|
'add_url': 'Results',
|
2025-12-11 12:47:30 -08:00
|
|
|
'get-url': 'url',
|
|
|
|
|
'get_url': 'url',
|
2025-11-25 20:09:33 -08:00
|
|
|
'delete-url': 'Results',
|
|
|
|
|
'delete_url': 'Results',
|
|
|
|
|
'get-note': 'Notes',
|
|
|
|
|
'get_note': 'Notes',
|
|
|
|
|
'add-note': 'Results',
|
|
|
|
|
'add_note': 'Results',
|
|
|
|
|
'delete-note': 'Results',
|
|
|
|
|
'delete_note': 'Results',
|
|
|
|
|
'get-relationship': 'Relationships',
|
|
|
|
|
'get_relationship': 'Relationships',
|
|
|
|
|
'add-relationship': 'Results',
|
|
|
|
|
'add_relationship': 'Results',
|
|
|
|
|
'add-file': 'Results',
|
|
|
|
|
'add_file': 'Results',
|
|
|
|
|
'delete-file': 'Results',
|
|
|
|
|
'delete_file': 'Results',
|
2025-12-07 00:21:30 -08:00
|
|
|
'get-metadata': None,
|
|
|
|
|
'get_metadata': None,
|
2025-11-25 20:09:33 -08:00
|
|
|
}
|
2025-12-07 00:21:30 -08:00
|
|
|
|
|
|
|
|
mapped = title_map.get(cmd_name, 'Results')
|
|
|
|
|
if mapped is not None:
|
|
|
|
|
return mapped
|
|
|
|
|
|
|
|
|
|
# For metadata, derive title from first item if available
|
|
|
|
|
if emitted_items:
|
|
|
|
|
first = emitted_items[0]
|
|
|
|
|
try:
|
|
|
|
|
if isinstance(first, dict) and first.get('title'):
|
|
|
|
|
return str(first.get('title'))
|
|
|
|
|
if hasattr(first, 'title') and getattr(first, 'title'):
|
|
|
|
|
return str(getattr(first, 'title'))
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
return 'Results'
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def _close_cli_worker_manager() -> None:
|
|
|
|
|
global _CLI_WORKER_MANAGER
|
|
|
|
|
if _CLI_WORKER_MANAGER:
|
|
|
|
|
try:
|
2025-11-27 10:59:01 -08:00
|
|
|
# print("[CLI] Closing worker manager...", file=sys.stderr)
|
2025-11-25 20:09:33 -08:00
|
|
|
_CLI_WORKER_MANAGER.close()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
_CLI_WORKER_MANAGER = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
atexit.register(_close_cli_worker_manager)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _ensure_worker_manager(config: Dict[str, Any]) -> Optional[WorkerManagerType]:
|
|
|
|
|
"""Attach a WorkerManager to the CLI config for cmdlet execution."""
|
|
|
|
|
global _CLI_WORKER_MANAGER, _CLI_ORPHAN_CLEANUP_DONE
|
|
|
|
|
if WorkerManager is None:
|
|
|
|
|
return None
|
|
|
|
|
if not isinstance(config, dict):
|
|
|
|
|
return None
|
|
|
|
|
existing = config.get('_worker_manager')
|
|
|
|
|
if isinstance(existing, WorkerManager):
|
|
|
|
|
return existing
|
|
|
|
|
library_root = get_local_storage_path(config)
|
|
|
|
|
if not library_root:
|
|
|
|
|
return None
|
|
|
|
|
try:
|
|
|
|
|
resolved_root = Path(library_root).resolve()
|
|
|
|
|
except Exception:
|
|
|
|
|
resolved_root = Path(library_root)
|
|
|
|
|
try:
|
|
|
|
|
if not _CLI_WORKER_MANAGER or Path(getattr(_CLI_WORKER_MANAGER, 'library_root', '')) != resolved_root:
|
|
|
|
|
if _CLI_WORKER_MANAGER:
|
|
|
|
|
try:
|
|
|
|
|
_CLI_WORKER_MANAGER.close()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-11-27 10:59:01 -08:00
|
|
|
_CLI_WORKER_MANAGER = WorkerManager(resolved_root, auto_refresh_interval=0.5)
|
2025-11-25 20:09:33 -08:00
|
|
|
manager = _CLI_WORKER_MANAGER
|
|
|
|
|
config['_worker_manager'] = manager
|
2025-12-11 12:47:30 -08:00
|
|
|
# Do NOT attach notifier here - it will be attached when we have session worker IDs
|
2025-11-25 20:09:33 -08:00
|
|
|
if manager and not _CLI_ORPHAN_CLEANUP_DONE:
|
|
|
|
|
try:
|
|
|
|
|
manager.expire_running_workers(
|
|
|
|
|
older_than_seconds=120,
|
|
|
|
|
worker_id_prefix="cli_%",
|
|
|
|
|
reason="CLI session ended unexpectedly; marking worker as failed",
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
_CLI_ORPHAN_CLEANUP_DONE = True
|
|
|
|
|
return manager
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
print(f"[worker] Could not initialize worker manager: {exc}", file=sys.stderr)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _start_worker_session(
|
|
|
|
|
worker_manager: Optional[WorkerManagerType],
|
|
|
|
|
*,
|
|
|
|
|
worker_type: str,
|
|
|
|
|
title: str,
|
|
|
|
|
description: str,
|
|
|
|
|
pipe_text: str,
|
|
|
|
|
config: Optional[Dict[str, Any]],
|
|
|
|
|
completion_label: str,
|
|
|
|
|
error_label: str,
|
|
|
|
|
skip_logging_for: Optional[Set[str]] = None,
|
2025-12-11 12:47:30 -08:00
|
|
|
session_worker_ids: Optional[Set[str]] = None,
|
2025-11-25 20:09:33 -08:00
|
|
|
) -> Optional[_WorkerStageSession]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Create a worker session wrapper and mirror stdout/stderr.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
worker_manager: The worker manager
|
|
|
|
|
worker_type: Type of worker (e.g., 'pipeline', 'search-file')
|
|
|
|
|
title: Human-readable title
|
|
|
|
|
description: Worker description
|
|
|
|
|
pipe_text: Pipeline/command text
|
|
|
|
|
config: CLI configuration dict
|
|
|
|
|
completion_label: Label for successful completion
|
|
|
|
|
error_label: Label for errors
|
|
|
|
|
skip_logging_for: Set of worker types to skip logging for
|
|
|
|
|
session_worker_ids: Optional set to register this worker's ID in (for filtering notifications)
|
|
|
|
|
"""
|
2025-11-25 20:09:33 -08:00
|
|
|
if worker_manager is None:
|
|
|
|
|
return None
|
|
|
|
|
if skip_logging_for and worker_type in skip_logging_for:
|
|
|
|
|
return None
|
|
|
|
|
safe_type = worker_type or "cmd"
|
|
|
|
|
worker_id = f"cli_{safe_type[:8]}_{uuid.uuid4().hex[:6]}"
|
|
|
|
|
try:
|
|
|
|
|
tracked = worker_manager.track_worker(
|
|
|
|
|
worker_id,
|
|
|
|
|
worker_type=worker_type,
|
|
|
|
|
title=title,
|
|
|
|
|
description=description or "(no args)",
|
|
|
|
|
pipe=pipe_text,
|
|
|
|
|
)
|
|
|
|
|
if not tracked:
|
|
|
|
|
return None
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
print(f"[worker] Failed to track {worker_type}: {exc}", file=sys.stderr)
|
|
|
|
|
return None
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Register this worker ID with the session if provided
|
|
|
|
|
if session_worker_ids is not None:
|
|
|
|
|
session_worker_ids.add(worker_id)
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
logging_enabled = False
|
|
|
|
|
try:
|
|
|
|
|
handler = worker_manager.enable_logging_for_worker(worker_id)
|
|
|
|
|
logging_enabled = handler is not None
|
|
|
|
|
except Exception:
|
|
|
|
|
logging_enabled = False
|
|
|
|
|
orig_stdout = sys.stdout
|
|
|
|
|
orig_stderr = sys.stderr
|
|
|
|
|
stdout_proxy = _WorkerOutputMirror(orig_stdout, worker_manager, worker_id, 'stdout')
|
|
|
|
|
stderr_proxy = _WorkerOutputMirror(orig_stderr, worker_manager, worker_id, 'stderr')
|
|
|
|
|
sys.stdout = stdout_proxy
|
|
|
|
|
sys.stderr = stderr_proxy
|
|
|
|
|
if isinstance(config, dict):
|
|
|
|
|
config['_current_worker_id'] = worker_id
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(worker_id, f"Started {worker_type}")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
return _WorkerStageSession(
|
|
|
|
|
manager=worker_manager,
|
|
|
|
|
worker_id=worker_id,
|
|
|
|
|
orig_stdout=orig_stdout,
|
|
|
|
|
orig_stderr=orig_stderr,
|
|
|
|
|
stdout_proxy=stdout_proxy,
|
|
|
|
|
stderr_proxy=stderr_proxy,
|
|
|
|
|
config=config,
|
|
|
|
|
logging_enabled=logging_enabled,
|
|
|
|
|
completion_label=completion_label,
|
|
|
|
|
error_label=error_label,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _begin_worker_stage(
|
|
|
|
|
worker_manager: Optional[WorkerManagerType],
|
|
|
|
|
cmd_name: str,
|
|
|
|
|
stage_tokens: Sequence[str],
|
|
|
|
|
config: Optional[Dict[str, Any]],
|
|
|
|
|
command_text: str,
|
|
|
|
|
) -> Optional[_WorkerStageSession]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Start a worker entry for an individual CLI stage.
|
|
|
|
|
|
|
|
|
|
If a session_worker_ids set exists in config, register this stage with it.
|
|
|
|
|
"""
|
2025-11-25 20:09:33 -08:00
|
|
|
description = " ".join(stage_tokens[1:]) if len(stage_tokens) > 1 else "(no args)"
|
2025-12-11 12:47:30 -08:00
|
|
|
session_worker_ids = None
|
|
|
|
|
if isinstance(config, dict):
|
|
|
|
|
session_worker_ids = config.get('_session_worker_ids')
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
return _start_worker_session(
|
|
|
|
|
worker_manager,
|
|
|
|
|
worker_type=cmd_name,
|
|
|
|
|
title=f"{cmd_name} stage",
|
|
|
|
|
description=description,
|
|
|
|
|
pipe_text=command_text,
|
|
|
|
|
config=config,
|
|
|
|
|
completion_label="Stage completed",
|
|
|
|
|
error_label="Stage error",
|
|
|
|
|
skip_logging_for={".worker", "worker", "workers"},
|
2025-12-11 12:47:30 -08:00
|
|
|
session_worker_ids=session_worker_ids,
|
2025-11-25 20:09:33 -08:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _begin_pipeline_worker(
|
|
|
|
|
worker_manager: Optional[WorkerManagerType],
|
|
|
|
|
pipeline_text: str,
|
|
|
|
|
config: Optional[Dict[str, Any]],
|
|
|
|
|
) -> Optional[_WorkerStageSession]:
|
2025-12-11 12:47:30 -08:00
|
|
|
"""Start a worker that represents the entire pipeline execution.
|
|
|
|
|
|
|
|
|
|
Also initializes a session_worker_ids set in config for tracking pipeline workers.
|
|
|
|
|
"""
|
|
|
|
|
# Create a session ID set for this pipeline execution
|
|
|
|
|
session_worker_ids: Set[str] = set()
|
|
|
|
|
if isinstance(config, dict):
|
|
|
|
|
config['_session_worker_ids'] = session_worker_ids
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
return _start_worker_session(
|
|
|
|
|
worker_manager,
|
|
|
|
|
worker_type="pipeline",
|
|
|
|
|
title="Pipeline run",
|
|
|
|
|
description=pipeline_text,
|
|
|
|
|
pipe_text=pipeline_text,
|
|
|
|
|
config=config,
|
|
|
|
|
completion_label="Pipeline completed",
|
|
|
|
|
error_label="Pipeline error",
|
2025-12-11 12:47:30 -08:00
|
|
|
session_worker_ids=session_worker_ids,
|
2025-11-25 20:09:33 -08:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_cmdlet_names() -> List[str]:
|
|
|
|
|
"""Get list of all available cmdlet names."""
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
return _catalog_list_cmdlet_names()
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2025-12-05 03:42:57 -08:00
|
|
|
def _import_cmd_module(mod_name: str):
|
2025-12-12 21:55:38 -08:00
|
|
|
"""Import a cmdlet/native module from cmdlet or cmdnat packages."""
|
2025-12-11 12:47:30 -08:00
|
|
|
try:
|
|
|
|
|
return _catalog_import_cmd_module(mod_name)
|
|
|
|
|
except Exception:
|
2025-12-07 00:21:30 -08:00
|
|
|
return None
|
|
|
|
|
|
2025-12-05 03:42:57 -08:00
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
def _get_cmdlet_args(cmd_name: str) -> List[str]:
|
|
|
|
|
"""Get list of argument flags for a cmdlet (with - and -- prefixes)."""
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
return _catalog_get_cmdlet_arg_flags(cmd_name)
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2025-12-18 22:50:21 -08:00
|
|
|
def get_store_choices() -> List[str]:
|
|
|
|
|
"""Return configured store backend names.
|
|
|
|
|
|
|
|
|
|
This is the same list used for REPL/Typer autocomplete for `-store`.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from Store import Store
|
|
|
|
|
storage = Store(_load_cli_config(), suppress_debug=True)
|
|
|
|
|
backends = storage.list_backends()
|
|
|
|
|
return list(backends or [])
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
def _get_arg_choices(cmd_name: str, arg_name: str) -> List[str]:
|
|
|
|
|
"""Get list of valid choices for a specific cmdlet argument."""
|
|
|
|
|
try:
|
|
|
|
|
mod_name = cmd_name.replace("-", "_")
|
2025-12-05 03:42:57 -08:00
|
|
|
normalized_arg = arg_name.lstrip("-")
|
|
|
|
|
|
|
|
|
|
# Dynamic storage backends: use current config to enumerate available storages
|
2025-12-11 12:47:30 -08:00
|
|
|
# Support both "storage" and "store" argument names
|
|
|
|
|
if normalized_arg in ("storage", "store"):
|
2025-12-18 22:50:21 -08:00
|
|
|
backends = get_store_choices()
|
|
|
|
|
if backends:
|
|
|
|
|
return backends
|
2025-12-05 03:42:57 -08:00
|
|
|
|
|
|
|
|
# Dynamic search providers
|
|
|
|
|
if normalized_arg == "provider":
|
|
|
|
|
try:
|
2025-12-16 01:45:01 -08:00
|
|
|
canonical_cmd = (cmd_name or "").replace("_", "-").lower()
|
|
|
|
|
|
|
|
|
|
# cmdlet-aware provider choices:
|
|
|
|
|
# - search-provider: search providers
|
|
|
|
|
# - add-file: file providers (0x0, matrix)
|
|
|
|
|
if canonical_cmd in {"search-provider"}:
|
|
|
|
|
from ProviderCore.registry import list_search_providers
|
|
|
|
|
providers = list_search_providers(_load_cli_config())
|
|
|
|
|
available = [name for name, is_ready in providers.items() if is_ready]
|
|
|
|
|
return sorted(available) if available else sorted(providers.keys())
|
|
|
|
|
|
|
|
|
|
if canonical_cmd in {"add-file"}:
|
|
|
|
|
from ProviderCore.registry import list_file_providers
|
|
|
|
|
providers = list_file_providers(_load_cli_config())
|
|
|
|
|
available = [name for name, is_ready in providers.items() if is_ready]
|
|
|
|
|
return sorted(available) if available else sorted(providers.keys())
|
|
|
|
|
|
|
|
|
|
# Default behavior (legacy): merge search providers and metadata providers.
|
2025-12-12 21:55:38 -08:00
|
|
|
from ProviderCore.registry import list_search_providers
|
2025-12-11 19:04:02 -08:00
|
|
|
providers = list_search_providers(_load_cli_config())
|
2025-12-05 03:42:57 -08:00
|
|
|
available = [name for name, is_ready in providers.items() if is_ready]
|
|
|
|
|
provider_choices = sorted(available) if available else sorted(providers.keys())
|
|
|
|
|
except Exception:
|
|
|
|
|
provider_choices = []
|
|
|
|
|
|
|
|
|
|
try:
|
2025-12-11 19:04:02 -08:00
|
|
|
from Provider.metadata_provider import list_metadata_providers
|
2025-12-05 03:42:57 -08:00
|
|
|
meta_providers = list_metadata_providers(_load_cli_config())
|
|
|
|
|
meta_available = [n for n, ready in meta_providers.items() if ready]
|
|
|
|
|
meta_choices = sorted(meta_available) if meta_available else sorted(meta_providers.keys())
|
|
|
|
|
except Exception:
|
|
|
|
|
meta_choices = []
|
|
|
|
|
|
|
|
|
|
merged = sorted(set(provider_choices + meta_choices))
|
|
|
|
|
if merged:
|
|
|
|
|
return merged
|
2025-12-07 00:21:30 -08:00
|
|
|
|
|
|
|
|
if normalized_arg == "scrape":
|
|
|
|
|
try:
|
2025-12-11 19:04:02 -08:00
|
|
|
from Provider.metadata_provider import list_metadata_providers
|
2025-12-07 00:21:30 -08:00
|
|
|
meta_providers = list_metadata_providers(_load_cli_config())
|
|
|
|
|
if meta_providers:
|
|
|
|
|
return sorted(meta_providers.keys())
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-12-11 12:47:30 -08:00
|
|
|
choices = _catalog_get_cmdlet_arg_choices(cmd_name, arg_name)
|
|
|
|
|
return choices or []
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (
|
|
|
|
|
PROMPT_TOOLKIT_AVAILABLE
|
|
|
|
|
and PromptSession is not None
|
|
|
|
|
and Completion is not None
|
|
|
|
|
and Completer is not None
|
|
|
|
|
and Document is not None
|
2025-12-19 02:29:42 -08:00
|
|
|
and Lexer is not None
|
2025-11-25 20:09:33 -08:00
|
|
|
):
|
|
|
|
|
CompletionType = cast(Any, Completion)
|
|
|
|
|
|
|
|
|
|
class CmdletCompleter(Completer):
|
|
|
|
|
"""Custom completer for cmdlet REPL with autocomplete tied to cmdlet metadata."""
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.cmdlet_names = _get_cmdlet_names()
|
|
|
|
|
|
|
|
|
|
def get_completions(self, document: Document, complete_event): # type: ignore[override]
|
|
|
|
|
"""Generate completions for the current input."""
|
|
|
|
|
text = document.text_before_cursor
|
|
|
|
|
tokens = text.split()
|
2025-12-16 01:45:01 -08:00
|
|
|
ends_with_space = bool(text) and text[-1].isspace()
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
# Respect pipeline stages: only use tokens after the last '|'
|
|
|
|
|
last_pipe = -1
|
|
|
|
|
for idx, tok in enumerate(tokens):
|
|
|
|
|
if tok == "|":
|
|
|
|
|
last_pipe = idx
|
|
|
|
|
stage_tokens = tokens[last_pipe + 1:] if last_pipe >= 0 else tokens
|
|
|
|
|
|
|
|
|
|
if not stage_tokens:
|
2025-11-25 20:09:33 -08:00
|
|
|
for cmd in self.cmdlet_names:
|
|
|
|
|
yield CompletionType(cmd, start_position=0)
|
2025-12-07 00:21:30 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Single token at this stage -> suggest command names/keywords
|
|
|
|
|
if len(stage_tokens) == 1:
|
|
|
|
|
current = stage_tokens[0].lower()
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
# If the user has finished typing the command and added a space,
|
|
|
|
|
# complete that command's flags (or sub-choices) instead of command names.
|
|
|
|
|
if ends_with_space:
|
|
|
|
|
cmd_name = current.replace("_", "-")
|
|
|
|
|
if cmd_name in {"help"}:
|
|
|
|
|
for cmd in self.cmdlet_names:
|
|
|
|
|
yield CompletionType(cmd, start_position=0)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
arg_names = _get_cmdlet_args(cmd_name)
|
|
|
|
|
logical_seen: Set[str] = set()
|
|
|
|
|
for arg in arg_names:
|
|
|
|
|
arg_low = arg.lower()
|
|
|
|
|
if arg_low.startswith("--"):
|
|
|
|
|
continue
|
|
|
|
|
logical = arg.lstrip("-").lower()
|
|
|
|
|
if logical in logical_seen:
|
|
|
|
|
continue
|
|
|
|
|
yield CompletionType(arg, start_position=0)
|
|
|
|
|
logical_seen.add(logical)
|
|
|
|
|
|
|
|
|
|
yield CompletionType("-help", start_position=0)
|
|
|
|
|
return
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
for cmd in self.cmdlet_names:
|
|
|
|
|
if cmd.startswith(current):
|
|
|
|
|
yield CompletionType(cmd, start_position=-len(current))
|
|
|
|
|
for keyword in ["help", "exit", "quit"]:
|
|
|
|
|
if keyword.startswith(current):
|
|
|
|
|
yield CompletionType(keyword, start_position=-len(current))
|
2025-12-07 00:21:30 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Otherwise treat first token of stage as command and complete its args
|
|
|
|
|
cmd_name = stage_tokens[0].replace("_", "-").lower()
|
2025-12-16 01:45:01 -08:00
|
|
|
if ends_with_space:
|
|
|
|
|
current_token = ""
|
|
|
|
|
prev_token = stage_tokens[-1].lower()
|
|
|
|
|
else:
|
|
|
|
|
current_token = stage_tokens[-1].lower()
|
|
|
|
|
prev_token = stage_tokens[-2].lower() if len(stage_tokens) > 1 else ""
|
2025-12-07 00:21:30 -08:00
|
|
|
|
|
|
|
|
choices = _get_arg_choices(cmd_name, prev_token)
|
|
|
|
|
if choices:
|
|
|
|
|
for choice in choices:
|
|
|
|
|
if choice.lower().startswith(current_token):
|
|
|
|
|
yield CompletionType(choice, start_position=-len(current_token))
|
|
|
|
|
return
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
arg_names = _get_cmdlet_args(cmd_name)
|
2025-12-12 21:55:38 -08:00
|
|
|
logical_seen: Set[str] = set()
|
2025-12-07 00:21:30 -08:00
|
|
|
for arg in arg_names:
|
2025-12-12 21:55:38 -08:00
|
|
|
arg_low = arg.lower()
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
prefer_single_dash = current_token in {"", "-"}
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
# If the user has only typed '-', prefer single-dash flags (e.g. -url)
|
|
|
|
|
# and avoid suggesting both -name and --name for the same logical arg.
|
2025-12-16 01:45:01 -08:00
|
|
|
if prefer_single_dash and arg_low.startswith("--"):
|
2025-12-12 21:55:38 -08:00
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
logical = arg.lstrip("-").lower()
|
2025-12-16 01:45:01 -08:00
|
|
|
if prefer_single_dash and logical in logical_seen:
|
2025-12-12 21:55:38 -08:00
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if arg_low.startswith(current_token):
|
2025-12-07 00:21:30 -08:00
|
|
|
yield CompletionType(arg, start_position=-len(current_token))
|
2025-12-16 01:45:01 -08:00
|
|
|
if prefer_single_dash:
|
2025-12-12 21:55:38 -08:00
|
|
|
logical_seen.add(logical)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# Help completion: prefer -help unless user explicitly starts '--'
|
|
|
|
|
if current_token.startswith("--"):
|
|
|
|
|
if "--help".startswith(current_token):
|
|
|
|
|
yield CompletionType("--help", start_position=-len(current_token))
|
|
|
|
|
else:
|
|
|
|
|
if "-help".startswith(current_token):
|
|
|
|
|
yield CompletionType("-help", start_position=-len(current_token))
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
async def get_completions_async(self, document: Document, complete_event): # type: ignore[override]
|
|
|
|
|
for completion in self.get_completions(document, complete_event):
|
|
|
|
|
yield completion
|
2025-11-25 22:34:41 -08:00
|
|
|
|
|
|
|
|
class MedeiaLexer(Lexer):
|
|
|
|
|
def lex_document(self, document):
|
|
|
|
|
def get_line(lineno):
|
|
|
|
|
line = document.lines[lineno]
|
|
|
|
|
tokens = []
|
|
|
|
|
|
|
|
|
|
import re
|
|
|
|
|
# Match: Whitespace, Pipe, Quoted string, or Word
|
|
|
|
|
pattern = re.compile(r'''
|
|
|
|
|
(\s+) | # 1. Whitespace
|
|
|
|
|
(\|) | # 2. Pipe
|
|
|
|
|
("(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*') | # 3. Quoted string
|
|
|
|
|
([^\s\|]+) # 4. Word
|
|
|
|
|
''', re.VERBOSE)
|
|
|
|
|
|
|
|
|
|
is_cmdlet = True
|
|
|
|
|
|
|
|
|
|
for match in pattern.finditer(line):
|
|
|
|
|
ws, pipe, quote, word = match.groups()
|
|
|
|
|
|
|
|
|
|
if ws:
|
|
|
|
|
tokens.append(('', ws))
|
|
|
|
|
elif pipe:
|
|
|
|
|
tokens.append(('class:pipe', pipe))
|
|
|
|
|
is_cmdlet = True
|
|
|
|
|
elif quote:
|
|
|
|
|
tokens.append(('class:string', quote))
|
|
|
|
|
is_cmdlet = False
|
|
|
|
|
elif word:
|
|
|
|
|
if is_cmdlet:
|
|
|
|
|
tokens.append(('class:cmdlet', word))
|
|
|
|
|
is_cmdlet = False
|
|
|
|
|
elif word.startswith('-'):
|
|
|
|
|
tokens.append(('class:argument', word))
|
|
|
|
|
else:
|
|
|
|
|
tokens.append(('class:value', word))
|
|
|
|
|
|
|
|
|
|
return tokens
|
|
|
|
|
return get_line
|
2025-11-25 20:09:33 -08:00
|
|
|
else: # pragma: no cover - prompt toolkit unavailable
|
|
|
|
|
CmdletCompleter = None # type: ignore[assignment]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _create_cmdlet_cli():
|
|
|
|
|
"""Create Typer CLI app for cmdlet-based commands."""
|
|
|
|
|
if typer is None:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
app = typer.Typer(help="Medeia-Macina CLI")
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
def _complete_search_provider(ctx, param, incomplete: str): # pragma: no cover
|
|
|
|
|
"""Shell completion for --provider values on the Typer search-provider command."""
|
|
|
|
|
try:
|
|
|
|
|
import click
|
|
|
|
|
from click.shell_completion import CompletionItem
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from ProviderCore.registry import list_search_providers
|
|
|
|
|
providers = list_search_providers(_load_cli_config())
|
|
|
|
|
available = [n for n, ok in (providers or {}).items() if ok]
|
|
|
|
|
choices = sorted(available) if available else sorted((providers or {}).keys())
|
|
|
|
|
except Exception:
|
|
|
|
|
choices = []
|
|
|
|
|
|
|
|
|
|
inc = (incomplete or "").lower()
|
|
|
|
|
out = []
|
|
|
|
|
for name in choices:
|
|
|
|
|
if not name:
|
|
|
|
|
continue
|
|
|
|
|
if name.lower().startswith(inc):
|
|
|
|
|
out.append(CompletionItem(name))
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
@app.command("search-provider")
|
|
|
|
|
def search_provider(
|
|
|
|
|
provider: str = typer.Option(
|
|
|
|
|
..., "--provider", "-p",
|
|
|
|
|
help="Provider name (bandcamp, libgen, soulseek, youtube)",
|
|
|
|
|
shell_complete=_complete_search_provider,
|
|
|
|
|
),
|
|
|
|
|
query: str = typer.Argument(..., help="Search query (quote for spaces)"),
|
2025-12-17 03:16:41 -08:00
|
|
|
limit: int = typer.Option(36, "--limit", "-l", help="Maximum results to return"),
|
2025-12-16 01:45:01 -08:00
|
|
|
):
|
|
|
|
|
"""Search external providers (Typer wrapper around the cmdlet)."""
|
|
|
|
|
# Delegate to the existing cmdlet so behavior stays consistent.
|
|
|
|
|
_execute_cmdlet("search-provider", ["-provider", provider, query, "-limit", str(limit)])
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-11-27 10:59:01 -08:00
|
|
|
@app.command("pipeline")
|
|
|
|
|
def pipeline(
|
|
|
|
|
command: str = typer.Option(..., "--pipeline", "-p", help="Pipeline command string to execute"),
|
|
|
|
|
seeds_json: Optional[str] = typer.Option(None, "--seeds-json", "-s", help="JSON string of seed items")
|
|
|
|
|
):
|
|
|
|
|
"""Execute a pipeline command non-interactively."""
|
|
|
|
|
import shlex
|
|
|
|
|
import json
|
|
|
|
|
import pipeline as ctx
|
|
|
|
|
|
|
|
|
|
# Load config
|
|
|
|
|
config = _load_cli_config()
|
|
|
|
|
|
|
|
|
|
# Initialize debug logging if enabled
|
|
|
|
|
if config:
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.logger import set_debug
|
2025-11-27 18:35:06 -08:00
|
|
|
debug_enabled = config.get("debug", False)
|
|
|
|
|
set_debug(debug_enabled)
|
|
|
|
|
|
|
|
|
|
# Also configure standard logging for libraries that use it (like local_library.py)
|
|
|
|
|
if debug_enabled:
|
|
|
|
|
import logging
|
|
|
|
|
logging.basicConfig(
|
|
|
|
|
level=logging.DEBUG,
|
|
|
|
|
format='[%(name)s] %(levelname)s: %(message)s',
|
|
|
|
|
stream=sys.stderr
|
|
|
|
|
)
|
2025-12-17 17:42:46 -08:00
|
|
|
|
|
|
|
|
# httpx/httpcore can be extremely verbose and will drown useful debug output,
|
|
|
|
|
# especially when invoked from MPV (where console output is truncated).
|
|
|
|
|
for noisy in ("httpx", "httpcore", "httpcore.http11", "httpcore.connection"):
|
|
|
|
|
try:
|
|
|
|
|
logging.getLogger(noisy).setLevel(logging.WARNING)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
# Handle seeds if provided
|
|
|
|
|
if seeds_json:
|
|
|
|
|
try:
|
|
|
|
|
seeds = json.loads(seeds_json)
|
|
|
|
|
# If seeds is a list, use it directly. If single item, wrap in list.
|
|
|
|
|
if not isinstance(seeds, list):
|
|
|
|
|
seeds = [seeds]
|
|
|
|
|
|
|
|
|
|
# Set seeds as the result of a "virtual" previous stage
|
|
|
|
|
# This allows the first command in the pipeline to receive them as input
|
|
|
|
|
ctx.set_last_result_items_only(seeds)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error parsing seeds JSON: {e}")
|
|
|
|
|
return
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
try:
|
|
|
|
|
from cli_syntax import validate_pipeline_text
|
|
|
|
|
syntax_error = validate_pipeline_text(command)
|
|
|
|
|
if syntax_error:
|
|
|
|
|
print(syntax_error.message, file=sys.stderr)
|
|
|
|
|
return
|
|
|
|
|
except Exception:
|
|
|
|
|
# Best-effort only; if validator can't load, fall back to shlex handling below.
|
|
|
|
|
pass
|
|
|
|
|
|
2025-11-27 10:59:01 -08:00
|
|
|
try:
|
|
|
|
|
tokens = shlex.split(command)
|
2025-12-12 21:55:38 -08:00
|
|
|
except ValueError as exc:
|
|
|
|
|
print(f"Syntax error: {exc}", file=sys.stderr)
|
|
|
|
|
return
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
if not tokens:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Execute
|
|
|
|
|
_execute_pipeline(tokens)
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
@app.command("repl")
|
|
|
|
|
def repl():
|
2025-12-12 21:55:38 -08:00
|
|
|
"""Start interactive REPL for cmdlet with autocomplete."""
|
2025-11-25 20:09:33 -08:00
|
|
|
banner = """
|
2025-11-27 10:59:01 -08:00
|
|
|
Medeia-Macina
|
|
|
|
|
=====================
|
|
|
|
|
|123456789|ABCDEFGHI|
|
|
|
|
|
|246813579|JKLMNOPQR|
|
|
|
|
|
|369369369|STUVWXYZ0|
|
|
|
|
|
|483726159|ABCDEFGHI|
|
2025-12-07 00:21:30 -08:00
|
|
|
|=========+=========|
|
2025-11-27 10:59:01 -08:00
|
|
|
|516273849|JKLMNOPQR|
|
|
|
|
|
|639639639|STUVWXYZ0|
|
|
|
|
|
|753186429|ABCDEFGHI|
|
|
|
|
|
|876543219|JKLMNOPQR|
|
|
|
|
|
|999999999|STUVWXYZ0|
|
|
|
|
|
=====================
|
2025-11-25 20:09:33 -08:00
|
|
|
"""
|
|
|
|
|
print(banner)
|
|
|
|
|
|
2025-11-27 10:59:01 -08:00
|
|
|
# Configurable prompt
|
2025-12-07 00:21:30 -08:00
|
|
|
prompt_text = "🜂🜄🜁🜃|"
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Prepare startup table (always attempt; fall back gracefully if import fails)
|
2025-12-19 02:29:42 -08:00
|
|
|
startup_table = None
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and ResultTable is not None:
|
|
|
|
|
startup_table = ResultTable(
|
|
|
|
|
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
|
|
|
|
|
)
|
2025-12-11 12:47:30 -08:00
|
|
|
if startup_table:
|
|
|
|
|
startup_table.set_no_choice(True).set_preserve_order(True)
|
|
|
|
|
|
2025-12-17 03:16:41 -08:00
|
|
|
def _add_startup_check(
|
|
|
|
|
status: str,
|
|
|
|
|
name: str,
|
|
|
|
|
*,
|
|
|
|
|
provider: str = "",
|
|
|
|
|
store: str = "",
|
|
|
|
|
files: int | str | None = None,
|
|
|
|
|
detail: str = "",
|
|
|
|
|
) -> None:
|
2025-12-11 12:47:30 -08:00
|
|
|
if startup_table is None:
|
|
|
|
|
return
|
|
|
|
|
row = startup_table.add_row()
|
|
|
|
|
row.add_column("Status", status)
|
2025-12-13 12:09:50 -08:00
|
|
|
row.add_column("Name", name)
|
2025-12-17 03:16:41 -08:00
|
|
|
row.add_column("Provider", provider or "")
|
|
|
|
|
row.add_column("Store", store or "")
|
|
|
|
|
row.add_column("Files", "" if files is None else str(files))
|
2025-12-13 12:09:50 -08:00
|
|
|
row.add_column("Detail", detail or "")
|
2025-12-11 12:47:30 -08:00
|
|
|
|
2025-12-13 00:18:30 -08:00
|
|
|
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
|
|
|
|
|
store_cfg = cfg.get("store")
|
|
|
|
|
if not isinstance(store_cfg, dict):
|
|
|
|
|
return False
|
|
|
|
|
bucket = store_cfg.get(subtype)
|
|
|
|
|
if not isinstance(bucket, dict):
|
|
|
|
|
return False
|
|
|
|
|
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
|
|
|
|
|
|
|
|
|
|
def _has_provider(cfg: dict, name: str) -> bool:
|
|
|
|
|
provider_cfg = cfg.get("provider")
|
|
|
|
|
if not isinstance(provider_cfg, dict):
|
|
|
|
|
return False
|
|
|
|
|
block = provider_cfg.get(str(name).strip().lower())
|
|
|
|
|
return isinstance(block, dict) and bool(block)
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
def _ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
|
|
|
|
|
try:
|
|
|
|
|
from API.HTTP import HTTPClient
|
|
|
|
|
|
|
|
|
|
with HTTPClient(timeout=timeout, retries=1) as client:
|
|
|
|
|
resp = client.get(url, allow_redirects=True)
|
|
|
|
|
code = int(getattr(resp, "status_code", 0) or 0)
|
|
|
|
|
ok = 200 <= code < 500
|
|
|
|
|
return ok, f"{url} (HTTP {code})"
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
return False, f"{url} ({type(exc).__name__})"
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Load config and initialize debug logging
|
|
|
|
|
config = {}
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
|
|
|
|
config = _load_cli_config()
|
2025-12-11 12:47:30 -08:00
|
|
|
except Exception:
|
|
|
|
|
config = {}
|
|
|
|
|
|
|
|
|
|
try:
|
2025-11-25 20:09:33 -08:00
|
|
|
if config:
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.logger import set_debug, debug
|
2025-11-25 20:09:33 -08:00
|
|
|
debug_enabled = config.get("debug", False)
|
|
|
|
|
set_debug(debug_enabled)
|
|
|
|
|
if debug_enabled:
|
|
|
|
|
debug("✓ Debug logging enabled")
|
|
|
|
|
|
|
|
|
|
try:
|
2025-12-11 19:04:02 -08:00
|
|
|
from API.HydrusNetwork import get_client
|
2025-11-30 11:39:04 -08:00
|
|
|
# get_client(config) # Pre-acquire and cache session key
|
|
|
|
|
# debug("✓ Hydrus session key acquired")
|
2025-12-11 12:47:30 -08:00
|
|
|
except RuntimeError:
|
|
|
|
|
# Hydrus is not available - expected sometimes; continue
|
2025-11-25 20:09:33 -08:00
|
|
|
pass
|
|
|
|
|
except Exception as e:
|
|
|
|
|
debug(f"⚠ Could not pre-acquire Hydrus session key: {e}")
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Run startup checks and render table
|
|
|
|
|
try:
|
2025-12-13 12:09:50 -08:00
|
|
|
# MPV availability is validated by MPV.MPV.__init__.
|
|
|
|
|
try:
|
|
|
|
|
from MPV.mpv_ipc import MPV
|
2025-12-11 12:47:30 -08:00
|
|
|
|
2025-12-13 12:09:50 -08:00
|
|
|
MPV()
|
2025-11-30 11:39:04 -08:00
|
|
|
try:
|
2025-12-13 12:09:50 -08:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
|
|
mpv_path = shutil.which("mpv")
|
|
|
|
|
except Exception:
|
|
|
|
|
mpv_path = None
|
|
|
|
|
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED", "MPV", detail=mpv_path or "Available")
|
2025-12-13 12:09:50 -08:00
|
|
|
except Exception as exc:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", "MPV", detail=str(exc))
|
2025-12-13 12:09:50 -08:00
|
|
|
|
|
|
|
|
store_registry = None
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
if config:
|
2025-12-13 12:09:50 -08:00
|
|
|
# Instantiate store registry once; store __init__ performs its own validation.
|
|
|
|
|
try:
|
|
|
|
|
from Store import Store as StoreRegistry
|
|
|
|
|
|
|
|
|
|
store_registry = StoreRegistry(config=config, suppress_debug=True)
|
|
|
|
|
except Exception:
|
|
|
|
|
store_registry = None
|
|
|
|
|
|
2025-12-13 00:18:30 -08:00
|
|
|
# Only show checks that are configured in config.conf
|
|
|
|
|
if _has_store_subtype(config, "hydrusnetwork"):
|
2025-12-13 12:09:50 -08:00
|
|
|
# HydrusNetwork self-validates in its __init__. We derive instance status from
|
|
|
|
|
# store instantiation rather than a separate Hydrus-specific health check.
|
|
|
|
|
store_cfg = config.get("store")
|
|
|
|
|
hydrus_cfg = store_cfg.get("hydrusnetwork", {}) if isinstance(store_cfg, dict) else {}
|
|
|
|
|
if isinstance(hydrus_cfg, dict):
|
|
|
|
|
for instance_name, instance_cfg in hydrus_cfg.items():
|
|
|
|
|
if not isinstance(instance_cfg, dict):
|
|
|
|
|
continue
|
|
|
|
|
name_key = str(instance_cfg.get("NAME") or instance_name)
|
|
|
|
|
url_val = str(instance_cfg.get("URL") or "").strip()
|
|
|
|
|
|
|
|
|
|
ok = bool(store_registry and store_registry.is_available(name_key))
|
|
|
|
|
status = "ENABLED" if ok else "DISABLED"
|
|
|
|
|
if ok:
|
|
|
|
|
total = None
|
|
|
|
|
try:
|
|
|
|
|
if store_registry:
|
|
|
|
|
backend = store_registry[name_key]
|
|
|
|
|
total = getattr(backend, "total_count", None)
|
2025-12-17 03:16:41 -08:00
|
|
|
if total is None:
|
|
|
|
|
getter = getattr(backend, "get_total_count", None)
|
|
|
|
|
if callable(getter):
|
|
|
|
|
total = getter()
|
2025-12-13 12:09:50 -08:00
|
|
|
except Exception:
|
|
|
|
|
total = None
|
|
|
|
|
|
2025-12-17 03:16:41 -08:00
|
|
|
detail = url_val
|
|
|
|
|
files = total if isinstance(total, int) and total >= 0 else None
|
2025-12-13 12:09:50 -08:00
|
|
|
else:
|
|
|
|
|
err = None
|
|
|
|
|
if store_registry:
|
|
|
|
|
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
|
|
|
|
detail = (url_val + (" - " if url_val else "")) + (err or "Unavailable")
|
2025-12-17 03:16:41 -08:00
|
|
|
files = None
|
|
|
|
|
_add_startup_check(status, name_key, store="hydrusnetwork", files=files, detail=detail)
|
2025-12-13 00:18:30 -08:00
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# Configured providers (dynamic): show any [provider=...] blocks.
|
|
|
|
|
# This complements store checks and avoids hardcoding per-provider rows.
|
|
|
|
|
provider_cfg = config.get("provider") if isinstance(config, dict) else None
|
|
|
|
|
if isinstance(provider_cfg, dict) and provider_cfg:
|
|
|
|
|
try:
|
|
|
|
|
from ProviderCore.registry import (
|
|
|
|
|
list_search_providers,
|
|
|
|
|
list_file_providers,
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
list_search_providers = None # type: ignore
|
|
|
|
|
list_file_providers = None # type: ignore
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from Provider.metadata_provider import list_metadata_providers
|
|
|
|
|
except Exception:
|
|
|
|
|
list_metadata_providers = None # type: ignore
|
|
|
|
|
|
|
|
|
|
search_availability = {}
|
|
|
|
|
file_availability = {}
|
|
|
|
|
meta_availability = {}
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
if list_search_providers is not None:
|
|
|
|
|
search_availability = list_search_providers(config) or {}
|
|
|
|
|
except Exception:
|
|
|
|
|
search_availability = {}
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
if list_file_providers is not None:
|
|
|
|
|
file_availability = list_file_providers(config) or {}
|
|
|
|
|
except Exception:
|
|
|
|
|
file_availability = {}
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
if list_metadata_providers is not None:
|
|
|
|
|
meta_availability = list_metadata_providers(config) or {}
|
|
|
|
|
except Exception:
|
|
|
|
|
meta_availability = {}
|
|
|
|
|
|
|
|
|
|
def _provider_display_name(key: str) -> str:
|
|
|
|
|
k = (key or "").strip()
|
|
|
|
|
low = k.lower()
|
|
|
|
|
if low == "openlibrary":
|
|
|
|
|
return "OpenLibrary"
|
|
|
|
|
if low == "alldebrid":
|
|
|
|
|
return "AllDebrid"
|
|
|
|
|
if low == "youtube":
|
|
|
|
|
return "YouTube"
|
|
|
|
|
return k[:1].upper() + k[1:] if k else "Provider"
|
|
|
|
|
|
|
|
|
|
# Avoid duplicating the existing Matrix row.
|
|
|
|
|
already_checked = {"matrix"}
|
|
|
|
|
|
|
|
|
|
def _default_provider_ping_targets(provider_key: str) -> list[str]:
|
|
|
|
|
prov = (provider_key or "").strip().lower()
|
|
|
|
|
if prov == "openlibrary":
|
|
|
|
|
return ["https://openlibrary.org"]
|
|
|
|
|
if prov == "youtube":
|
|
|
|
|
return ["https://www.youtube.com"]
|
|
|
|
|
if prov == "bandcamp":
|
|
|
|
|
return ["https://bandcamp.com"]
|
|
|
|
|
if prov == "libgen":
|
|
|
|
|
try:
|
|
|
|
|
from Provider.libgen import MIRRORS
|
|
|
|
|
|
|
|
|
|
mirrors = [str(x).rstrip("/") for x in (MIRRORS or []) if str(x).strip()]
|
|
|
|
|
return [m + "/json.php" for m in mirrors]
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
def _ping_first(urls: list[str]) -> tuple[bool, str]:
|
|
|
|
|
for u in urls:
|
|
|
|
|
ok, detail = _ping_url(u)
|
|
|
|
|
if ok:
|
|
|
|
|
return True, detail
|
|
|
|
|
if urls:
|
|
|
|
|
ok, detail = _ping_url(urls[0])
|
|
|
|
|
return ok, detail
|
|
|
|
|
return False, "No ping target"
|
|
|
|
|
|
|
|
|
|
for provider_name in provider_cfg.keys():
|
|
|
|
|
prov = str(provider_name or "").strip().lower()
|
|
|
|
|
if not prov or prov in already_checked:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
display = _provider_display_name(prov)
|
|
|
|
|
|
|
|
|
|
# Special-case AllDebrid to show a richer detail and validate connectivity.
|
|
|
|
|
if prov == "alldebrid":
|
|
|
|
|
try:
|
|
|
|
|
from Provider.alldebrid import _get_debrid_api_key # type: ignore
|
|
|
|
|
|
|
|
|
|
api_key = _get_debrid_api_key(config)
|
|
|
|
|
if not api_key:
|
2025-12-19 02:29:42 -08:00
|
|
|
_add_startup_check("DISABLED", display, provider=prov, detail="Not configured")
|
2025-12-16 01:45:01 -08:00
|
|
|
else:
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
base_url = str(getattr(client, "base_url", "") or "").strip()
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED", display, provider=prov, detail=base_url or "Connected")
|
2025-12-16 01:45:01 -08:00
|
|
|
except Exception as exc:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", display, provider=prov, detail=str(exc))
|
2025-12-16 01:45:01 -08:00
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
is_known = False
|
|
|
|
|
ok = None
|
|
|
|
|
|
|
|
|
|
if prov in search_availability:
|
|
|
|
|
is_known = True
|
|
|
|
|
ok = bool(search_availability.get(prov))
|
|
|
|
|
elif prov in file_availability:
|
|
|
|
|
is_known = True
|
|
|
|
|
ok = bool(file_availability.get(prov))
|
|
|
|
|
elif prov in meta_availability:
|
|
|
|
|
is_known = True
|
|
|
|
|
ok = bool(meta_availability.get(prov))
|
|
|
|
|
|
|
|
|
|
if not is_known:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("UNKNOWN", display, provider=prov, detail="Not registered")
|
2025-12-16 01:45:01 -08:00
|
|
|
else:
|
|
|
|
|
# For non-login providers, include a lightweight URL reachability check.
|
|
|
|
|
detail = "Configured" if ok else "Not configured"
|
|
|
|
|
ping_targets = _default_provider_ping_targets(prov)
|
|
|
|
|
if ping_targets:
|
|
|
|
|
ping_ok, ping_detail = _ping_first(ping_targets)
|
|
|
|
|
if ok:
|
|
|
|
|
detail = ping_detail
|
|
|
|
|
else:
|
|
|
|
|
detail = (detail + " | " + ping_detail) if ping_detail else detail
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED" if ok else "DISABLED", display, provider=prov, detail=detail)
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
already_checked.add(prov)
|
|
|
|
|
|
|
|
|
|
# Also show default non-login providers even if they aren't configured.
|
|
|
|
|
# This helps users know what's available/reachable out of the box.
|
|
|
|
|
default_search_providers = ["openlibrary", "libgen", "youtube", "bandcamp"]
|
|
|
|
|
for prov in default_search_providers:
|
|
|
|
|
if prov in already_checked:
|
|
|
|
|
continue
|
|
|
|
|
display = _provider_display_name(prov)
|
|
|
|
|
ok = bool(search_availability.get(prov)) if prov in search_availability else False
|
|
|
|
|
ping_targets = _default_provider_ping_targets(prov)
|
|
|
|
|
ping_ok, ping_detail = _ping_first(ping_targets) if ping_targets else (False, "No ping target")
|
|
|
|
|
detail = ping_detail if ping_detail else ("Available" if ok else "Unavailable")
|
|
|
|
|
# If the provider isn't even import/dep available, show that first.
|
|
|
|
|
if not ok:
|
|
|
|
|
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", display, provider=prov, detail=detail)
|
2025-12-16 01:45:01 -08:00
|
|
|
already_checked.add(prov)
|
|
|
|
|
|
|
|
|
|
# Default file providers (no login): 0x0
|
|
|
|
|
if "0x0" not in already_checked:
|
|
|
|
|
ok = bool(file_availability.get("0x0")) if "0x0" in file_availability else False
|
|
|
|
|
ping_ok, ping_detail = _ping_url("https://0x0.st")
|
|
|
|
|
detail = ping_detail
|
|
|
|
|
if not ok:
|
|
|
|
|
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", "0x0", provider="0x0", detail=detail)
|
2025-12-16 01:45:01 -08:00
|
|
|
already_checked.add("0x0")
|
|
|
|
|
|
2025-12-13 00:18:30 -08:00
|
|
|
if _has_provider(config, "matrix"):
|
2025-12-13 12:09:50 -08:00
|
|
|
# Matrix availability is validated by Provider.matrix.Matrix.__init__.
|
|
|
|
|
try:
|
|
|
|
|
from Provider.matrix import Matrix
|
|
|
|
|
|
|
|
|
|
provider = Matrix(config)
|
|
|
|
|
matrix_conf = config.get("provider", {}).get("matrix", {}) if isinstance(config, dict) else {}
|
|
|
|
|
homeserver = str(matrix_conf.get("homeserver") or "").strip()
|
|
|
|
|
room_id = str(matrix_conf.get("room_id") or "").strip()
|
|
|
|
|
if homeserver and not homeserver.startswith("http"):
|
|
|
|
|
homeserver = f"https://{homeserver}"
|
|
|
|
|
target = homeserver.rstrip("/")
|
|
|
|
|
if room_id:
|
|
|
|
|
target = (target + (" " if target else "")) + f"room:{room_id}"
|
|
|
|
|
|
|
|
|
|
if provider.validate():
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED", "Matrix", provider="matrix", detail=target or "Connected")
|
2025-12-13 12:09:50 -08:00
|
|
|
else:
|
|
|
|
|
missing: list[str] = []
|
|
|
|
|
if not homeserver:
|
|
|
|
|
missing.append("homeserver")
|
|
|
|
|
if not room_id:
|
|
|
|
|
missing.append("room_id")
|
|
|
|
|
if not (matrix_conf.get("access_token") or matrix_conf.get("password")):
|
|
|
|
|
missing.append("access_token/password")
|
|
|
|
|
detail = "Not configured" + (f" ({', '.join(missing)})" if missing else "")
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", "Matrix", provider="matrix", detail=detail)
|
2025-12-13 12:09:50 -08:00
|
|
|
except Exception as exc:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", "Matrix", provider="matrix", detail=str(exc))
|
2025-12-13 00:18:30 -08:00
|
|
|
|
|
|
|
|
if _has_store_subtype(config, "folder"):
|
2025-12-13 12:09:50 -08:00
|
|
|
# Folder local scan/index is performed by Store.Folder.__init__.
|
|
|
|
|
store_cfg = config.get("store")
|
|
|
|
|
folder_cfg = store_cfg.get("folder", {}) if isinstance(store_cfg, dict) else {}
|
|
|
|
|
if isinstance(folder_cfg, dict) and folder_cfg:
|
|
|
|
|
for instance_name, instance_cfg in folder_cfg.items():
|
|
|
|
|
if not isinstance(instance_cfg, dict):
|
|
|
|
|
continue
|
|
|
|
|
name_key = str(instance_cfg.get("NAME") or instance_name)
|
|
|
|
|
path_val = str(instance_cfg.get("PATH") or instance_cfg.get("path") or "").strip()
|
|
|
|
|
|
|
|
|
|
ok = bool(store_registry and store_registry.is_available(name_key))
|
|
|
|
|
if ok and store_registry:
|
|
|
|
|
backend = store_registry[name_key]
|
|
|
|
|
scan_ok = bool(getattr(backend, "scan_ok", True))
|
|
|
|
|
scan_detail = str(getattr(backend, "scan_detail", "") or "")
|
2025-12-17 03:16:41 -08:00
|
|
|
stats = getattr(backend, "scan_stats", None)
|
|
|
|
|
files = None
|
|
|
|
|
if isinstance(stats, dict):
|
|
|
|
|
try:
|
|
|
|
|
total_db = stats.get("files_total_db")
|
|
|
|
|
if isinstance(total_db, (int, float)):
|
|
|
|
|
files = int(total_db)
|
|
|
|
|
except Exception:
|
|
|
|
|
files = None
|
2025-12-13 12:09:50 -08:00
|
|
|
status = "SCANNED" if scan_ok else "ERROR"
|
|
|
|
|
detail = (path_val + (" - " if path_val else "")) + (scan_detail or "Up to date")
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check(status, name_key, store="folder", files=files, detail=detail)
|
2025-12-13 12:09:50 -08:00
|
|
|
else:
|
|
|
|
|
err = None
|
|
|
|
|
if store_registry:
|
|
|
|
|
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
|
|
|
|
detail = (path_val + (" - " if path_val else "")) + (err or "Unavailable")
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ERROR", name_key, store="folder", detail=detail)
|
2025-12-13 00:18:30 -08:00
|
|
|
else:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("SKIPPED", "Folder", store="folder", detail="No folder stores configured")
|
2025-12-13 00:18:30 -08:00
|
|
|
|
|
|
|
|
if _has_store_subtype(config, "debrid"):
|
2025-12-13 12:09:50 -08:00
|
|
|
# Debrid availability is validated by API.alldebrid.AllDebridClient.__init__.
|
|
|
|
|
try:
|
|
|
|
|
from config import get_debrid_api_key
|
2025-12-13 00:18:30 -08:00
|
|
|
|
2025-12-13 12:09:50 -08:00
|
|
|
api_key = get_debrid_api_key(config)
|
|
|
|
|
if not api_key:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", "Debrid", store="debrid", detail="Not configured")
|
2025-12-13 12:09:50 -08:00
|
|
|
else:
|
|
|
|
|
from API.alldebrid import AllDebridClient
|
|
|
|
|
|
|
|
|
|
client = AllDebridClient(api_key)
|
|
|
|
|
base_url = str(getattr(client, "base_url", "") or "").strip()
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ENABLED", "Debrid", store="debrid", detail=base_url or "Connected")
|
2025-12-13 12:09:50 -08:00
|
|
|
except Exception as exc:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("DISABLED", "Debrid", store="debrid", detail=str(exc))
|
2025-12-13 12:09:50 -08:00
|
|
|
|
|
|
|
|
# Cookies are used by yt-dlp; keep this centralized utility.
|
|
|
|
|
try:
|
2025-12-16 23:23:43 -08:00
|
|
|
from tool.ytdlp import YtDlpTool
|
|
|
|
|
|
|
|
|
|
cookiefile = YtDlpTool(config).resolve_cookiefile()
|
|
|
|
|
if cookiefile is not None:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("FOUND", "Cookies", detail=str(cookiefile))
|
2025-12-16 23:23:43 -08:00
|
|
|
else:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("MISSING", "Cookies", detail="Not found")
|
2025-12-13 12:09:50 -08:00
|
|
|
except Exception as exc:
|
2025-12-17 03:16:41 -08:00
|
|
|
_add_startup_check("ERROR", "Cookies", detail=str(exc))
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
if startup_table is not None and startup_table.rows:
|
|
|
|
|
print()
|
|
|
|
|
print(startup_table.format_plain())
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
if config:
|
2025-12-11 19:04:02 -08:00
|
|
|
from SYS.logger import debug # local import to avoid failing when debug disabled
|
2025-11-27 10:59:01 -08:00
|
|
|
debug(f"⚠ Could not check service availability: {e}")
|
2025-11-25 20:09:33 -08:00
|
|
|
except Exception:
|
|
|
|
|
pass # Silently ignore if config loading fails
|
|
|
|
|
|
2025-12-19 02:29:42 -08:00
|
|
|
if PROMPT_TOOLKIT_AVAILABLE and PromptSession is not None and CmdletCompleter is not None and Style is not None:
|
2025-11-25 20:09:33 -08:00
|
|
|
completer = CmdletCompleter()
|
2025-11-25 22:34:41 -08:00
|
|
|
|
|
|
|
|
# Define style for syntax highlighting
|
|
|
|
|
style = Style.from_dict({
|
|
|
|
|
'cmdlet': '#ffffff', # white
|
|
|
|
|
'argument': '#3b8eea', # blue-ish
|
2025-11-27 10:59:01 -08:00
|
|
|
'value': "#9a3209", # red-ish
|
|
|
|
|
'string': "#6d0d93", # purple
|
2025-11-25 22:34:41 -08:00
|
|
|
'pipe': '#4caf50', # green
|
2025-12-11 12:47:30 -08:00
|
|
|
'bottom-toolbar': 'noreverse', # Blend in with default background
|
2025-11-25 22:34:41 -08:00
|
|
|
})
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Toolbar state for background notifications
|
|
|
|
|
class ToolbarState:
|
|
|
|
|
text = ""
|
2025-12-19 02:29:42 -08:00
|
|
|
last_update_time: float = 0.0
|
2025-12-11 12:47:30 -08:00
|
|
|
clear_timer: Optional[threading.Timer] = None
|
|
|
|
|
|
|
|
|
|
toolbar_state = ToolbarState()
|
|
|
|
|
|
|
|
|
|
def get_toolbar():
|
|
|
|
|
# Only show toolbar if there's text AND it's within the 3-second window
|
|
|
|
|
if not toolbar_state.text or not toolbar_state.text.strip():
|
|
|
|
|
return None # None completely hides the toolbar
|
|
|
|
|
elapsed = time.time() - toolbar_state.last_update_time
|
|
|
|
|
if elapsed > 3:
|
|
|
|
|
toolbar_state.text = ""
|
|
|
|
|
return None
|
|
|
|
|
return toolbar_state.text
|
|
|
|
|
|
|
|
|
|
def update_toolbar(text: str):
|
|
|
|
|
text = text.strip()
|
|
|
|
|
toolbar_state.text = text
|
|
|
|
|
toolbar_state.last_update_time = time.time()
|
|
|
|
|
|
|
|
|
|
# Cancel any pending clear timer
|
|
|
|
|
if toolbar_state.clear_timer:
|
|
|
|
|
toolbar_state.clear_timer.cancel()
|
|
|
|
|
toolbar_state.clear_timer = None
|
|
|
|
|
|
|
|
|
|
# Schedule auto-clear in 3 seconds
|
|
|
|
|
if text:
|
|
|
|
|
def clear_toolbar():
|
|
|
|
|
toolbar_state.text = ""
|
|
|
|
|
toolbar_state.clear_timer = None
|
|
|
|
|
if 'session' in locals() and session and hasattr(session, 'app') and session.app.is_running:
|
|
|
|
|
session.app.invalidate()
|
|
|
|
|
|
|
|
|
|
toolbar_state.clear_timer = threading.Timer(3.0, clear_toolbar)
|
|
|
|
|
toolbar_state.clear_timer.daemon = True
|
|
|
|
|
toolbar_state.clear_timer.start()
|
|
|
|
|
|
|
|
|
|
# Force redraw if the prompt is active
|
|
|
|
|
if 'session' in locals() and session and hasattr(session, 'app') and session.app.is_running:
|
|
|
|
|
session.app.invalidate()
|
|
|
|
|
|
|
|
|
|
# Register global updater
|
|
|
|
|
global _TOOLBAR_UPDATER
|
|
|
|
|
_TOOLBAR_UPDATER = update_toolbar
|
|
|
|
|
|
2025-11-25 22:34:41 -08:00
|
|
|
session = PromptSession(
|
|
|
|
|
completer=cast(Any, completer),
|
|
|
|
|
lexer=MedeiaLexer(),
|
2025-12-11 12:47:30 -08:00
|
|
|
style=style,
|
|
|
|
|
bottom_toolbar=get_toolbar,
|
|
|
|
|
refresh_interval=0.5, # Refresh periodically
|
2025-11-25 22:34:41 -08:00
|
|
|
)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-11-27 10:59:01 -08:00
|
|
|
def get_input(prompt: str = prompt_text) -> str:
|
2025-11-25 20:09:33 -08:00
|
|
|
return session.prompt(prompt)
|
|
|
|
|
|
|
|
|
|
else:
|
2025-11-27 10:59:01 -08:00
|
|
|
def get_input(prompt: str = prompt_text) -> str:
|
2025-11-25 20:09:33 -08:00
|
|
|
return input(prompt)
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
try:
|
2025-11-27 10:59:01 -08:00
|
|
|
user_input = get_input(prompt_text).strip()
|
2025-11-25 20:09:33 -08:00
|
|
|
except (EOFError, KeyboardInterrupt):
|
2025-12-11 12:47:30 -08:00
|
|
|
print("He who is victorious through deceit is defeated by the truth.")
|
2025-11-25 20:09:33 -08:00
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if not user_input:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
low = user_input.lower()
|
|
|
|
|
if low in {"exit", "quit", "q"}:
|
2025-12-11 12:47:30 -08:00
|
|
|
print("He who is victorious through deceit is defeated by the truth.")
|
2025-11-25 20:09:33 -08:00
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if low in {"help", "?"}:
|
|
|
|
|
_show_cmdlet_list()
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
pipeline_ctx_ref = None
|
|
|
|
|
try:
|
|
|
|
|
import pipeline as ctx # noqa: F401
|
|
|
|
|
ctx.set_current_command_text(user_input)
|
|
|
|
|
pipeline_ctx_ref = ctx
|
|
|
|
|
except Exception:
|
|
|
|
|
pipeline_ctx_ref = None
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
try:
|
|
|
|
|
from cli_syntax import validate_pipeline_text
|
|
|
|
|
syntax_error = validate_pipeline_text(user_input)
|
|
|
|
|
if syntax_error:
|
|
|
|
|
print(syntax_error.message, file=sys.stderr)
|
|
|
|
|
continue
|
|
|
|
|
except Exception:
|
|
|
|
|
# Best-effort only; if validator can't load, continue with shlex.
|
|
|
|
|
pass
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
|
|
|
|
import shlex
|
|
|
|
|
tokens = shlex.split(user_input)
|
2025-12-12 21:55:38 -08:00
|
|
|
except ValueError as exc:
|
|
|
|
|
print(f"Syntax error: {exc}", file=sys.stderr)
|
|
|
|
|
continue
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
if not tokens:
|
|
|
|
|
continue
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Handle special @,, selector to restore next result table (forward navigation)
|
|
|
|
|
if len(tokens) == 1 and tokens[0] == "@,,":
|
|
|
|
|
try:
|
|
|
|
|
import pipeline as ctx
|
|
|
|
|
if ctx.restore_next_result_table():
|
|
|
|
|
# Check for overlay table first
|
|
|
|
|
if hasattr(ctx, 'get_display_table'):
|
|
|
|
|
last_table = ctx.get_display_table()
|
|
|
|
|
else:
|
|
|
|
|
last_table = None
|
|
|
|
|
|
|
|
|
|
if last_table is None:
|
|
|
|
|
last_table = ctx.get_last_result_table()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if last_table:
|
|
|
|
|
print()
|
|
|
|
|
# Also update current stage table so @N expansion works correctly
|
|
|
|
|
ctx.set_current_stage_table(last_table)
|
|
|
|
|
print(last_table.format_plain())
|
|
|
|
|
else:
|
|
|
|
|
# Fallback to items if no table object
|
|
|
|
|
items = ctx.get_last_result_items()
|
|
|
|
|
if items:
|
|
|
|
|
# Clear current stage table if we only have items
|
|
|
|
|
ctx.set_current_stage_table(None)
|
|
|
|
|
print(f"Restored {len(items)} items (no table format available)")
|
|
|
|
|
else:
|
|
|
|
|
print("No forward history available", file=sys.stderr)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
print(f"Error restoring next table: {exc}", file=sys.stderr)
|
|
|
|
|
continue
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
# Handle special @.. selector to restore previous result table
|
|
|
|
|
if len(tokens) == 1 and tokens[0] == "@..":
|
2025-12-11 12:47:30 -08:00
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
|
|
|
|
import pipeline as ctx
|
|
|
|
|
if ctx.restore_previous_result_table():
|
|
|
|
|
# Check for overlay table first
|
|
|
|
|
if hasattr(ctx, 'get_display_table'):
|
|
|
|
|
last_table = ctx.get_display_table()
|
|
|
|
|
else:
|
|
|
|
|
last_table = None
|
|
|
|
|
|
|
|
|
|
if last_table is None:
|
|
|
|
|
last_table = ctx.get_last_result_table()
|
|
|
|
|
|
2025-11-25 22:34:41 -08:00
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
if last_table:
|
|
|
|
|
print()
|
|
|
|
|
# Also update current stage table so @N expansion works correctly
|
|
|
|
|
ctx.set_current_stage_table(last_table)
|
|
|
|
|
print(last_table.format_plain())
|
|
|
|
|
else:
|
|
|
|
|
# Fallback to items if no table object
|
|
|
|
|
items = ctx.get_last_result_items()
|
|
|
|
|
if items:
|
|
|
|
|
# Clear current stage table if we only have items
|
|
|
|
|
ctx.set_current_stage_table(None)
|
|
|
|
|
print(f"Restored {len(items)} items (no table format available)")
|
|
|
|
|
else:
|
|
|
|
|
print("No previous result table in history")
|
|
|
|
|
else:
|
|
|
|
|
print("Result table history is empty")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error restoring previous result table: {e}")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Check for pipe operators to support chaining: cmd1 arg1 | cmd2 arg2 | cmd3 arg3
|
|
|
|
|
# Also treat selection commands (@1, @*, etc) as pipelines so they can be expanded
|
|
|
|
|
try:
|
|
|
|
|
if '|' in tokens or (tokens and tokens[0].startswith('@')):
|
|
|
|
|
_execute_pipeline(tokens)
|
|
|
|
|
else:
|
|
|
|
|
cmd_name = tokens[0].replace("_", "-").lower()
|
|
|
|
|
is_help = any(arg in {"-help", "--help", "-h"} for arg in tokens[1:])
|
|
|
|
|
|
|
|
|
|
if is_help:
|
|
|
|
|
_show_cmdlet_help(cmd_name)
|
|
|
|
|
else:
|
|
|
|
|
# Execute the cmdlet
|
|
|
|
|
_execute_cmdlet(cmd_name, tokens[1:])
|
|
|
|
|
finally:
|
|
|
|
|
if pipeline_ctx_ref:
|
|
|
|
|
pipeline_ctx_ref.clear_current_command_text()
|
2025-11-27 10:59:01 -08:00
|
|
|
|
|
|
|
|
@app.callback(invoke_without_command=True)
|
|
|
|
|
def main_callback(ctx: typer.Context):
|
|
|
|
|
"""
|
|
|
|
|
Medeia-Macina CLI entry point.
|
|
|
|
|
If no command is provided, starts the interactive REPL.
|
|
|
|
|
"""
|
|
|
|
|
# Check if a subcommand is invoked
|
|
|
|
|
# Note: ctx.invoked_subcommand is None if no command was passed
|
|
|
|
|
if ctx.invoked_subcommand is None:
|
|
|
|
|
repl()
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
return app
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _execute_pipeline(tokens: list):
|
2025-12-12 21:55:38 -08:00
|
|
|
"""Execute a pipeline of cmdlet separated by pipes (|).
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
Example: cmd1 arg1 arg2 | cmd2 arg2 | cmd3 arg3
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-12 21:55:38 -08:00
|
|
|
from cmdlet import REGISTRY
|
2025-11-25 20:09:33 -08:00
|
|
|
import json
|
|
|
|
|
import pipeline as ctx
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
def _resolve_items_for_selection(table_obj, items_list):
|
|
|
|
|
"""Return items in the same order as the displayed table rows.
|
|
|
|
|
|
|
|
|
|
When a user sees row #2 in the table and selects @2, they get row #2.
|
|
|
|
|
No mapping, no math - the displayed order IS the selection order.
|
|
|
|
|
|
|
|
|
|
The table and items list should already be in sync after sorting.
|
|
|
|
|
"""
|
|
|
|
|
# Simply return items as-is - they should match the table row order
|
|
|
|
|
return items_list if items_list else []
|
|
|
|
|
|
|
|
|
|
def _debug_selection(label, selection_indices, table_obj, items_list, resolved_list=None):
|
|
|
|
|
"""Print debug info for selection mapping when troubleshooting.
|
|
|
|
|
|
|
|
|
|
Shows the correspondence between displayed row numbers, source indices,
|
|
|
|
|
and the actual items being selected to help diagnose reordering issues.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-13 12:09:50 -08:00
|
|
|
debug(f"[debug] {label}: sel={selection_indices} rows={len(table_obj.rows) if table_obj and hasattr(table_obj, 'rows') else 'n/a'} items={len(items_list) if items_list is not None else 'n/a'}")
|
2025-12-11 12:47:30 -08:00
|
|
|
if table_obj and hasattr(table_obj, 'rows') and items_list:
|
|
|
|
|
# Show correspondence: displayed row # -> source_index -> item hash/title
|
|
|
|
|
for i in selection_indices:
|
|
|
|
|
if 0 <= i < len(table_obj.rows):
|
|
|
|
|
row = table_obj.rows[i]
|
|
|
|
|
src_idx = getattr(row, 'source_index', None)
|
2025-12-13 12:09:50 -08:00
|
|
|
debug(f"[debug] @{i+1} -> row_index={i}, source_index={src_idx}", end='')
|
2025-12-11 12:47:30 -08:00
|
|
|
if src_idx is not None and 0 <= src_idx < len(items_list):
|
|
|
|
|
item = items_list[src_idx]
|
|
|
|
|
# Try to show hash/title for verification
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
hash_val = item.get('hash', item.get('hash_hex', 'N/A'))
|
|
|
|
|
title_val = item.get('title', 'N/A')
|
|
|
|
|
else:
|
|
|
|
|
hash_val = getattr(item, 'hash', getattr(item, 'hash_hex', 'N/A'))
|
|
|
|
|
title_val = getattr(item, 'title', 'N/A')
|
|
|
|
|
if hash_val != 'N/A':
|
2025-12-16 23:23:43 -08:00
|
|
|
hash_display = str(hash_val)
|
|
|
|
|
title_display = str(title_val)
|
2025-12-17 03:16:41 -08:00
|
|
|
debug(f" -> hash:{hash_display}, title:{title_display}")
|
2025-12-11 12:47:30 -08:00
|
|
|
else:
|
2025-12-17 03:16:41 -08:00
|
|
|
debug(f" -> title:{title_val}")
|
2025-12-11 12:47:30 -08:00
|
|
|
else:
|
2025-12-17 03:16:41 -08:00
|
|
|
debug(" -> [source_index out of range]")
|
2025-12-11 12:47:30 -08:00
|
|
|
if resolved_list is not None:
|
2025-12-13 12:09:50 -08:00
|
|
|
debug(f"[debug] resolved_len={len(resolved_list)}")
|
2025-12-11 12:47:30 -08:00
|
|
|
except Exception as e:
|
2025-12-13 12:09:50 -08:00
|
|
|
debug(f"[debug] error in _debug_selection: {e}")
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Split tokens by pipe operator
|
|
|
|
|
stages = []
|
|
|
|
|
current_stage = []
|
|
|
|
|
|
|
|
|
|
for token in tokens:
|
|
|
|
|
if token == '|':
|
|
|
|
|
if current_stage:
|
|
|
|
|
stages.append(current_stage)
|
|
|
|
|
current_stage = []
|
|
|
|
|
else:
|
|
|
|
|
current_stage.append(token)
|
|
|
|
|
|
|
|
|
|
if current_stage:
|
|
|
|
|
stages.append(current_stage)
|
|
|
|
|
|
|
|
|
|
if not stages:
|
|
|
|
|
print("Invalid pipeline syntax\n")
|
|
|
|
|
return
|
2025-12-07 00:21:30 -08:00
|
|
|
|
|
|
|
|
# If a previous stage paused for selection, attach its remaining stages when the user runs only @N
|
|
|
|
|
pending_tail = ctx.get_pending_pipeline_tail() if hasattr(ctx, 'get_pending_pipeline_tail') else []
|
|
|
|
|
pending_source = ctx.get_pending_pipeline_source() if hasattr(ctx, 'get_pending_pipeline_source') else None
|
2025-12-11 12:47:30 -08:00
|
|
|
# Ensure current stage table is restored before checking source (helps selection-only resumes)
|
|
|
|
|
if hasattr(ctx, 'get_current_stage_table') and not ctx.get_current_stage_table():
|
|
|
|
|
display_table = ctx.get_display_table() if hasattr(ctx, 'get_display_table') else None
|
|
|
|
|
if display_table:
|
|
|
|
|
ctx.set_current_stage_table(display_table)
|
|
|
|
|
else:
|
|
|
|
|
last_table = ctx.get_last_result_table() if hasattr(ctx, 'get_last_result_table') else None
|
|
|
|
|
if last_table:
|
|
|
|
|
ctx.set_current_stage_table(last_table)
|
|
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
current_source = ctx.get_current_stage_table_source_command() if hasattr(ctx, 'get_current_stage_table_source_command') else None
|
2025-12-11 12:47:30 -08:00
|
|
|
effective_source = current_source or (ctx.get_last_result_table_source_command() if hasattr(ctx, 'get_last_result_table_source_command') else None)
|
2025-12-07 00:21:30 -08:00
|
|
|
selection_only = len(stages) == 1 and stages[0] and stages[0][0].startswith('@')
|
|
|
|
|
if pending_tail and selection_only:
|
2025-12-11 12:47:30 -08:00
|
|
|
if (pending_source is None) or (effective_source and pending_source == effective_source):
|
2025-12-07 00:21:30 -08:00
|
|
|
stages.extend(pending_tail)
|
|
|
|
|
if hasattr(ctx, 'clear_pending_pipeline_tail'):
|
|
|
|
|
ctx.clear_pending_pipeline_tail()
|
|
|
|
|
elif hasattr(ctx, 'clear_pending_pipeline_tail'):
|
|
|
|
|
ctx.clear_pending_pipeline_tail()
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Load config relative to CLI root
|
|
|
|
|
config = _load_cli_config()
|
2025-12-11 12:47:30 -08:00
|
|
|
if isinstance(config, dict):
|
|
|
|
|
# Request terminal-only background updates for this pipeline session
|
|
|
|
|
config['_quiet_background_output'] = True
|
2025-12-19 02:29:42 -08:00
|
|
|
|
|
|
|
|
def _maybe_run_class_selector(selected_items: list, *, stage_is_last: bool) -> bool:
|
|
|
|
|
"""Allow providers/stores to override `@N` selection semantics."""
|
|
|
|
|
if not stage_is_last:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# Gather potential keys from table + selected rows.
|
|
|
|
|
candidates: list[str] = []
|
|
|
|
|
seen: set[str] = set()
|
|
|
|
|
|
|
|
|
|
def _add(value) -> None:
|
|
|
|
|
try:
|
|
|
|
|
text = str(value or '').strip().lower()
|
|
|
|
|
except Exception:
|
|
|
|
|
return
|
|
|
|
|
if not text or text in seen:
|
|
|
|
|
return
|
|
|
|
|
seen.add(text)
|
|
|
|
|
candidates.append(text)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
|
|
|
|
|
_add(current_table.table if current_table and hasattr(current_table, 'table') else None)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
for item in selected_items or []:
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
_add(item.get('provider'))
|
|
|
|
|
_add(item.get('store'))
|
|
|
|
|
_add(item.get('table'))
|
|
|
|
|
else:
|
|
|
|
|
_add(getattr(item, 'provider', None))
|
|
|
|
|
_add(getattr(item, 'store', None))
|
|
|
|
|
_add(getattr(item, 'table', None))
|
|
|
|
|
|
|
|
|
|
# Provider selector
|
|
|
|
|
try:
|
|
|
|
|
from ProviderCore.registry import get_provider as _get_provider
|
|
|
|
|
except Exception:
|
|
|
|
|
_get_provider = None
|
|
|
|
|
|
|
|
|
|
if _get_provider is not None:
|
|
|
|
|
for key in candidates:
|
|
|
|
|
try:
|
|
|
|
|
provider = _get_provider(key, config)
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
try:
|
|
|
|
|
handled = bool(provider.selector(selected_items, ctx=ctx, stage_is_last=True))
|
|
|
|
|
except TypeError:
|
|
|
|
|
# Backwards-compat: selector(selected_items)
|
|
|
|
|
handled = bool(provider.selector(selected_items))
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
print(f"{key} selector failed: {exc}\n")
|
|
|
|
|
return True
|
|
|
|
|
if handled:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
# Store selector
|
|
|
|
|
store_keys: list[str] = []
|
|
|
|
|
for item in selected_items or []:
|
|
|
|
|
if isinstance(item, dict):
|
|
|
|
|
v = item.get('store')
|
|
|
|
|
else:
|
|
|
|
|
v = getattr(item, 'store', None)
|
|
|
|
|
try:
|
|
|
|
|
name = str(v or '').strip()
|
|
|
|
|
except Exception:
|
|
|
|
|
name = ''
|
|
|
|
|
if name:
|
|
|
|
|
store_keys.append(name)
|
|
|
|
|
|
|
|
|
|
if store_keys:
|
|
|
|
|
try:
|
|
|
|
|
from Store.registry import Store as _StoreRegistry
|
|
|
|
|
store_registry = _StoreRegistry(config, suppress_debug=True)
|
|
|
|
|
try:
|
|
|
|
|
_backend_names = list(store_registry.list_backends())
|
|
|
|
|
except Exception:
|
|
|
|
|
_backend_names = []
|
|
|
|
|
_backend_by_lower = {str(n).lower(): str(n) for n in _backend_names if str(n).strip()}
|
|
|
|
|
for name in store_keys:
|
|
|
|
|
resolved_name = name
|
|
|
|
|
if not store_registry.is_available(resolved_name):
|
|
|
|
|
try:
|
|
|
|
|
resolved_name = _backend_by_lower.get(str(name).lower(), name)
|
|
|
|
|
except Exception:
|
|
|
|
|
resolved_name = name
|
|
|
|
|
if not store_registry.is_available(resolved_name):
|
|
|
|
|
continue
|
|
|
|
|
backend = store_registry[resolved_name]
|
|
|
|
|
selector = getattr(backend, 'selector', None)
|
|
|
|
|
if selector is None:
|
|
|
|
|
continue
|
|
|
|
|
try:
|
|
|
|
|
handled = bool(selector(selected_items, ctx=ctx, stage_is_last=True))
|
|
|
|
|
except TypeError:
|
|
|
|
|
handled = bool(selector(selected_items))
|
|
|
|
|
if handled:
|
|
|
|
|
return True
|
|
|
|
|
except Exception:
|
|
|
|
|
# Store init failure should not break normal selection.
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return False
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Check if the first stage has @ selection - if so, apply it before pipeline execution
|
|
|
|
|
first_stage_tokens = stages[0] if stages else []
|
|
|
|
|
first_stage_selection_indices = []
|
|
|
|
|
first_stage_had_extra_args = False
|
|
|
|
|
if first_stage_tokens:
|
|
|
|
|
# Look for @N, @N-M, @{N,M} in the first stage args
|
|
|
|
|
new_first_stage = []
|
|
|
|
|
first_stage_select_all = False
|
|
|
|
|
for token in first_stage_tokens:
|
|
|
|
|
if token.startswith('@'):
|
|
|
|
|
selection = _parse_selection_syntax(token)
|
|
|
|
|
if selection is not None:
|
|
|
|
|
# This is a selection syntax - apply it to get initial piped_result
|
|
|
|
|
first_stage_selection_indices = sorted([i - 1 for i in selection])
|
|
|
|
|
elif token == "@*":
|
|
|
|
|
# Special case: select all items
|
|
|
|
|
first_stage_select_all = True
|
|
|
|
|
else:
|
|
|
|
|
# Not a valid selection, keep as arg
|
|
|
|
|
new_first_stage.append(token)
|
|
|
|
|
else:
|
|
|
|
|
new_first_stage.append(token)
|
|
|
|
|
# Update first stage - if it's now empty (only had @N), keep the selection for later processing
|
|
|
|
|
if new_first_stage:
|
|
|
|
|
stages[0] = new_first_stage
|
|
|
|
|
# If we found selection indices but still have tokens, these are extra args
|
|
|
|
|
if first_stage_selection_indices or first_stage_select_all:
|
|
|
|
|
first_stage_had_extra_args = True
|
|
|
|
|
elif first_stage_selection_indices or first_stage_select_all:
|
|
|
|
|
# First stage was ONLY selection (@N or @*) - remove it and apply selection to next stage's input
|
|
|
|
|
stages.pop(0)
|
|
|
|
|
|
2025-12-01 01:10:16 -08:00
|
|
|
# Handle @* expansion by selecting all available items
|
|
|
|
|
if first_stage_select_all:
|
|
|
|
|
last_items = ctx.get_last_result_items()
|
|
|
|
|
if last_items:
|
|
|
|
|
first_stage_selection_indices = list(range(len(last_items)))
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
# Execute each stage, threading results to the next
|
|
|
|
|
piped_result = None
|
|
|
|
|
worker_manager = _ensure_worker_manager(config)
|
|
|
|
|
pipeline_text = " | ".join(" ".join(stage) for stage in stages)
|
|
|
|
|
pipeline_session = _begin_pipeline_worker(worker_manager, pipeline_text, config)
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Update background notifier with session worker IDs so it only shows workers from this pipeline
|
|
|
|
|
if pipeline_session and worker_manager and isinstance(config, dict):
|
|
|
|
|
session_worker_ids = config.get('_session_worker_ids')
|
|
|
|
|
if session_worker_ids:
|
|
|
|
|
try:
|
|
|
|
|
# Use toolbar updater if available
|
|
|
|
|
output_fn = _TOOLBAR_UPDATER
|
|
|
|
|
|
|
|
|
|
# If using toolbar, we want continuous updates, not just terminal completion
|
|
|
|
|
quiet_mode = bool(config.get('_quiet_background_output'))
|
|
|
|
|
terminal_only = quiet_mode and not _TOOLBAR_UPDATER
|
|
|
|
|
|
|
|
|
|
kwargs = {
|
|
|
|
|
"session_worker_ids": session_worker_ids,
|
|
|
|
|
"only_terminal_updates": terminal_only,
|
|
|
|
|
"overlay_mode": bool(output_fn),
|
|
|
|
|
}
|
|
|
|
|
if output_fn:
|
|
|
|
|
kwargs["output"] = output_fn
|
|
|
|
|
|
|
|
|
|
ensure_background_notifier(worker_manager, **kwargs)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
pipeline_status = "completed"
|
|
|
|
|
pipeline_error = ""
|
|
|
|
|
|
|
|
|
|
# Apply first-stage selection if present
|
|
|
|
|
if first_stage_selection_indices:
|
|
|
|
|
# Ensure we have a table context for expansion from previous command
|
|
|
|
|
if not ctx.get_current_stage_table_source_command():
|
2025-12-11 12:47:30 -08:00
|
|
|
display_table = ctx.get_display_table() if hasattr(ctx, 'get_display_table') else None
|
|
|
|
|
table_for_stage = display_table or ctx.get_last_result_table()
|
|
|
|
|
if table_for_stage:
|
|
|
|
|
ctx.set_current_stage_table(table_for_stage)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# Special check for table-specific behavior BEFORE command expansion.
|
|
|
|
|
# For some provider tables, we prefer item-based selection over command expansion,
|
|
|
|
|
# and may auto-append a sensible follow-up stage (e.g. YouTube -> download-media).
|
2025-11-25 20:09:33 -08:00
|
|
|
source_cmd = ctx.get_current_stage_table_source_command()
|
|
|
|
|
source_args = ctx.get_current_stage_table_source_args()
|
|
|
|
|
|
2025-12-11 19:04:02 -08:00
|
|
|
# Check table property
|
|
|
|
|
current_table = ctx.get_current_stage_table()
|
|
|
|
|
table_type = current_table.table if current_table and hasattr(current_table, 'table') else None
|
|
|
|
|
|
|
|
|
|
# Logic based on table type
|
|
|
|
|
if table_type == 'youtube' or table_type == 'soulseek':
|
2025-12-16 01:45:01 -08:00
|
|
|
# Force fallback to item-based selection so we can auto-append a follow-up stage
|
2025-11-25 20:09:33 -08:00
|
|
|
command_expanded = False
|
|
|
|
|
# Skip the command expansion block below
|
2025-12-11 19:04:02 -08:00
|
|
|
elif source_cmd == 'search-file' and source_args and 'youtube' in source_args:
|
|
|
|
|
# Legacy check for youtube
|
|
|
|
|
command_expanded = False
|
2025-11-25 20:09:33 -08:00
|
|
|
else:
|
|
|
|
|
# Try command-based expansion first if we have source command info
|
|
|
|
|
command_expanded = False
|
2025-11-25 22:34:41 -08:00
|
|
|
selected_row_args = []
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
skip_pipe_expansion = source_cmd == '.pipe' and len(stages) > 0
|
|
|
|
|
|
|
|
|
|
if source_cmd and not skip_pipe_expansion:
|
2025-11-25 20:09:33 -08:00
|
|
|
# Try to find row args for the selected indices
|
|
|
|
|
for idx in first_stage_selection_indices:
|
|
|
|
|
row_args = ctx.get_current_stage_table_row_selection_args(idx)
|
|
|
|
|
if row_args:
|
|
|
|
|
selected_row_args.extend(row_args)
|
|
|
|
|
break # For now, take first selected row's args
|
|
|
|
|
|
|
|
|
|
if selected_row_args:
|
|
|
|
|
# Success: Reconstruct the command with selection args
|
2025-12-11 12:47:30 -08:00
|
|
|
# Handle case where source_cmd might be a list (though it should be a string)
|
|
|
|
|
cmd_list = source_cmd if isinstance(source_cmd, list) else [source_cmd]
|
|
|
|
|
expanded_stage = cmd_list + source_args + selected_row_args
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
if first_stage_had_extra_args:
|
|
|
|
|
# Append extra args from the first stage (e.g. @3 arg1 arg2)
|
|
|
|
|
expanded_stage += stages[0]
|
|
|
|
|
stages[0] = expanded_stage
|
|
|
|
|
else:
|
|
|
|
|
# Insert expanded command as first stage (it was popped earlier if it was only @N)
|
|
|
|
|
stages.insert(0, expanded_stage)
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
log_msg = f"@N expansion: {source_cmd} + {' '.join(str(x) for x in selected_row_args)}"
|
2025-11-25 20:09:33 -08:00
|
|
|
worker_manager.log_step(pipeline_session.worker_id, log_msg) if pipeline_session and worker_manager else None
|
|
|
|
|
|
|
|
|
|
first_stage_selection_indices = [] # Clear, we've expanded it
|
|
|
|
|
command_expanded = True
|
|
|
|
|
|
|
|
|
|
# If command-based expansion didn't work, fall back to item-based selection
|
|
|
|
|
if not command_expanded and first_stage_selection_indices:
|
|
|
|
|
# FALLBACK: Item-based selection (filter piped items directly)
|
|
|
|
|
last_piped_items = ctx.get_last_result_items()
|
2025-12-11 12:47:30 -08:00
|
|
|
# Align to the displayed row order so @N matches what the user sees
|
|
|
|
|
stage_table = ctx.get_current_stage_table()
|
|
|
|
|
if not stage_table and hasattr(ctx, 'get_display_table'):
|
|
|
|
|
stage_table = ctx.get_display_table()
|
|
|
|
|
if not stage_table:
|
|
|
|
|
stage_table = ctx.get_last_result_table()
|
|
|
|
|
resolved_items = _resolve_items_for_selection(stage_table, last_piped_items)
|
|
|
|
|
_debug_selection("first-stage", first_stage_selection_indices, stage_table, last_piped_items, resolved_items)
|
2025-11-25 20:09:33 -08:00
|
|
|
if last_piped_items:
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
filtered = [resolved_items[i] for i in first_stage_selection_indices if 0 <= i < len(resolved_items)]
|
2025-11-25 20:09:33 -08:00
|
|
|
if filtered:
|
2025-12-19 02:29:42 -08:00
|
|
|
# Allow providers/stores to override selection behavior (e.g., Matrix room picker).
|
|
|
|
|
if _maybe_run_class_selector(filtered, stage_is_last=(not stages)):
|
|
|
|
|
return
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Convert filtered items to PipeObjects for consistent pipeline handling
|
2025-12-12 21:55:38 -08:00
|
|
|
from cmdlet._shared import coerce_to_pipe_object
|
2025-12-11 12:47:30 -08:00
|
|
|
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
|
|
|
|
|
piped_result = filtered_pipe_objs if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
|
|
|
|
|
# Build log message with proper string conversion
|
|
|
|
|
selection_parts = []
|
|
|
|
|
for i in first_stage_selection_indices:
|
|
|
|
|
selection_parts.append(f'@{i+1}')
|
|
|
|
|
log_msg = f"Applied @N selection {' | '.join(selection_parts)}"
|
2025-11-25 20:09:33 -08:00
|
|
|
worker_manager.log_step(pipeline_session.worker_id, log_msg) if pipeline_session and worker_manager else None
|
|
|
|
|
|
2025-12-11 19:04:02 -08:00
|
|
|
# Special case for table-specific auto-piping
|
2025-11-25 20:09:33 -08:00
|
|
|
# This handles the case where @N is the ONLY stage (e.g. user typed "@1")
|
|
|
|
|
# In this case, stages is [['@1']], but we are in the fallback block because command_expanded is False
|
2025-12-11 19:04:02 -08:00
|
|
|
|
|
|
|
|
# Check table type
|
|
|
|
|
current_table = ctx.get_current_stage_table()
|
|
|
|
|
if not current_table:
|
|
|
|
|
current_table = ctx.get_last_result_table()
|
|
|
|
|
|
|
|
|
|
table_type = current_table.table if current_table and hasattr(current_table, 'table') else None
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
source_cmd = ctx.get_last_result_table_source_command()
|
|
|
|
|
source_args = ctx.get_last_result_table_source_args()
|
|
|
|
|
|
2025-12-11 19:04:02 -08:00
|
|
|
if not stages:
|
|
|
|
|
if table_type == 'youtube':
|
2025-12-16 01:45:01 -08:00
|
|
|
print(f"Auto-running YouTube selection via download-media")
|
|
|
|
|
stages.append(['download-media'])
|
2025-12-11 19:04:02 -08:00
|
|
|
elif table_type == 'soulseek':
|
2025-12-11 23:21:45 -08:00
|
|
|
print(f"Auto-piping Soulseek selection to download-file")
|
|
|
|
|
stages.append(['download-file'])
|
2025-12-14 00:53:52 -08:00
|
|
|
elif table_type == 'openlibrary':
|
|
|
|
|
print(f"Auto-piping OpenLibrary selection to download-file")
|
|
|
|
|
stages.append(['download-file'])
|
2025-12-16 01:45:01 -08:00
|
|
|
elif table_type == 'libgen':
|
|
|
|
|
print(f"Auto-piping Libgen selection to download-file")
|
|
|
|
|
stages.append(['download-file'])
|
2025-12-11 19:04:02 -08:00
|
|
|
elif source_cmd == 'search-file' and source_args and 'youtube' in source_args:
|
|
|
|
|
# Legacy check
|
2025-12-16 01:45:01 -08:00
|
|
|
print(f"Auto-running YouTube selection via download-media")
|
|
|
|
|
stages.append(['download-media'])
|
2025-12-11 23:21:45 -08:00
|
|
|
else:
|
|
|
|
|
# If the user is piping a provider selection into additional stages (e.g. add-file),
|
|
|
|
|
# automatically insert the appropriate download stage so @N is "logical".
|
|
|
|
|
# This prevents add-file from receiving an unreachable provider path like "share\...".
|
|
|
|
|
first_cmd = stages[0][0] if stages and stages[0] else None
|
|
|
|
|
if table_type == 'soulseek' and first_cmd not in ('download-file', 'download-media', 'download_media', '.pipe'):
|
|
|
|
|
print(f"Auto-inserting download-file after Soulseek selection")
|
|
|
|
|
stages.insert(0, ['download-file'])
|
2025-12-16 01:45:01 -08:00
|
|
|
if table_type == 'youtube' and first_cmd not in ('download-media', 'download_media', 'download-file', '.pipe'):
|
|
|
|
|
print(f"Auto-inserting download-media after YouTube selection")
|
|
|
|
|
stages.insert(0, ['download-media'])
|
|
|
|
|
if table_type == 'libgen' and first_cmd not in ('download-file', 'download-media', 'download_media', '.pipe'):
|
|
|
|
|
print(f"Auto-inserting download-file after Libgen selection")
|
|
|
|
|
stages.insert(0, ['download-file'])
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
print(f"No items matched selection in pipeline\n")
|
|
|
|
|
return
|
|
|
|
|
except (TypeError, IndexError) as e:
|
|
|
|
|
print(f"Error applying selection in pipeline: {e}\n")
|
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
print(f"No previous results to select from\n")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for stage_index, stage_tokens in enumerate(stages):
|
|
|
|
|
if not stage_tokens:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
cmd_name = stage_tokens[0].replace("_", "-").lower()
|
|
|
|
|
stage_args = stage_tokens[1:]
|
2025-12-06 00:10:19 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Bare '@' means "use the subject for the current result table" (e.g., the file whose tags/url are shown)
|
2025-12-06 00:10:19 -08:00
|
|
|
if cmd_name == "@":
|
|
|
|
|
subject = ctx.get_last_result_subject()
|
|
|
|
|
if subject is None:
|
|
|
|
|
print("No current result context available for '@'\n")
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = "No result subject for @"
|
|
|
|
|
return
|
|
|
|
|
# Normalize to list for downstream expectations
|
|
|
|
|
piped_result = subject
|
|
|
|
|
try:
|
|
|
|
|
subject_items = subject if isinstance(subject, list) else [subject]
|
|
|
|
|
ctx.set_last_items(subject_items)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
if pipeline_session and worker_manager:
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(pipeline_session.worker_id, "@ used current table subject")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
continue
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Check if this is a selection syntax (@N, @N-M, @{N,M,K}, @*, @3,5,7, @3-6,8) instead of a command
|
|
|
|
|
if cmd_name.startswith('@'):
|
|
|
|
|
selection = _parse_selection_syntax(cmd_name)
|
|
|
|
|
is_select_all = (cmd_name == "@*")
|
|
|
|
|
|
|
|
|
|
if selection is not None or is_select_all:
|
|
|
|
|
# This is a selection stage
|
|
|
|
|
# Check if we should expand it to a full command instead of just filtering
|
|
|
|
|
should_expand_to_command = False
|
|
|
|
|
|
|
|
|
|
# Check if piped_result contains format objects and we have expansion info
|
|
|
|
|
source_cmd = ctx.get_current_stage_table_source_command()
|
|
|
|
|
source_args = ctx.get_current_stage_table_source_args()
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
# If selecting from a YouTube results table and this is the last stage,
|
|
|
|
|
# auto-run download-media instead of leaving a bare selection.
|
|
|
|
|
current_table = ctx.get_current_stage_table()
|
|
|
|
|
table_type = current_table.table if current_table and hasattr(current_table, 'table') else None
|
|
|
|
|
if table_type == 'youtube' and stage_index + 1 >= len(stages):
|
|
|
|
|
print(f"Auto-running YouTube selection via download-media")
|
|
|
|
|
stages.append(['download-media', *stage_args])
|
|
|
|
|
should_expand_to_command = False
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
if source_cmd == '.pipe' or source_cmd == '.adjective':
|
|
|
|
|
should_expand_to_command = True
|
2025-12-07 00:21:30 -08:00
|
|
|
if source_cmd == '.pipe' and (stage_index + 1 < len(stages) or stage_args):
|
|
|
|
|
# When piping playlist rows to another cmdlet, prefer item-based selection
|
|
|
|
|
should_expand_to_command = False
|
2025-11-25 20:09:33 -08:00
|
|
|
elif source_cmd == 'search-file' and source_args and 'youtube' in source_args:
|
2025-12-16 01:45:01 -08:00
|
|
|
# Legacy behavior: selection at end should run a sensible follow-up.
|
2025-11-25 20:09:33 -08:00
|
|
|
if stage_index + 1 >= len(stages):
|
|
|
|
|
# Only auto-pipe if this is the last stage
|
2025-12-16 01:45:01 -08:00
|
|
|
print(f"Auto-running YouTube selection via download-media")
|
|
|
|
|
stages.append(['download-media'])
|
2025-11-25 20:09:33 -08:00
|
|
|
# Force should_expand_to_command to False so we fall through to filtering
|
|
|
|
|
should_expand_to_command = False
|
2025-11-27 10:59:01 -08:00
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
elif isinstance(piped_result, (list, tuple)):
|
|
|
|
|
first_item = piped_result[0] if piped_result else None
|
|
|
|
|
if isinstance(first_item, dict) and first_item.get('format_id') is not None:
|
|
|
|
|
# Format objects detected - check for source command
|
|
|
|
|
if source_cmd:
|
|
|
|
|
should_expand_to_command = True
|
|
|
|
|
elif isinstance(piped_result, dict) and piped_result.get('format_id') is not None:
|
|
|
|
|
# Single format object
|
|
|
|
|
if source_cmd:
|
|
|
|
|
should_expand_to_command = True
|
2025-12-07 00:21:30 -08:00
|
|
|
|
|
|
|
|
# If we have a source command but no piped data (paused for selection), expand to command
|
|
|
|
|
if not should_expand_to_command and source_cmd and selection is not None and piped_result is None:
|
|
|
|
|
should_expand_to_command = True
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# If expanding to command, replace this stage and re-execute
|
|
|
|
|
if should_expand_to_command and selection is not None:
|
|
|
|
|
source_cmd = ctx.get_current_stage_table_source_command()
|
|
|
|
|
source_args = ctx.get_current_stage_table_source_args()
|
|
|
|
|
selection_indices = sorted([i - 1 for i in selection])
|
|
|
|
|
|
|
|
|
|
# Get row args for first selected index
|
|
|
|
|
selected_row_args = []
|
|
|
|
|
for idx in selection_indices:
|
|
|
|
|
row_args = ctx.get_current_stage_table_row_selection_args(idx)
|
|
|
|
|
if row_args:
|
|
|
|
|
selected_row_args.extend(row_args)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if selected_row_args:
|
|
|
|
|
# Expand to full command
|
|
|
|
|
# Include any arguments passed to the selection command (e.g. @3 arg1 arg2)
|
|
|
|
|
extra_args = stage_tokens[1:]
|
|
|
|
|
expanded_stage = [source_cmd] + source_args + selected_row_args + extra_args
|
|
|
|
|
print(f"Expanding {cmd_name} to: {' '.join(expanded_stage)}")
|
|
|
|
|
|
|
|
|
|
# Replace current stage and re-execute it
|
|
|
|
|
stages[stage_index] = expanded_stage
|
|
|
|
|
stage_tokens = expanded_stage
|
|
|
|
|
cmd_name = expanded_stage[0].replace("_", "-").lower()
|
|
|
|
|
stage_args = expanded_stage[1:]
|
|
|
|
|
|
|
|
|
|
# Clear piped_result so the expanded command doesn't receive the format objects
|
|
|
|
|
piped_result = None
|
|
|
|
|
|
|
|
|
|
# Don't continue - fall through to execute the expanded command
|
|
|
|
|
|
|
|
|
|
# If not expanding, use as filter
|
|
|
|
|
if not should_expand_to_command:
|
|
|
|
|
# This is a selection stage - filter piped results
|
2025-12-19 02:29:42 -08:00
|
|
|
# Prefer selecting from the active result context even when nothing is piped.
|
|
|
|
|
# Some cmdlets present a selectable table and rely on @N afterwards.
|
2025-11-25 20:09:33 -08:00
|
|
|
if piped_result is None:
|
2025-12-19 02:29:42 -08:00
|
|
|
piped_result_list = ctx.get_last_result_items()
|
|
|
|
|
if not piped_result_list:
|
|
|
|
|
print(f"No piped results to select from with {cmd_name}\n")
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"Selection {cmd_name} without upstream results"
|
|
|
|
|
return
|
2025-11-25 20:09:33 -08:00
|
|
|
else:
|
2025-12-19 02:29:42 -08:00
|
|
|
# Normalize piped_result to always be a list for indexing
|
|
|
|
|
if isinstance(piped_result, dict) or not isinstance(piped_result, (list, tuple)):
|
|
|
|
|
piped_result_list = [piped_result]
|
|
|
|
|
else:
|
|
|
|
|
piped_result_list = piped_result
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Get indices to select
|
|
|
|
|
if is_select_all:
|
|
|
|
|
# @* means select all items
|
|
|
|
|
selection_indices = list(range(len(piped_result_list)))
|
|
|
|
|
elif selection is not None:
|
|
|
|
|
# Convert to 0-based indices
|
|
|
|
|
selection_indices = sorted([i - 1 for i in selection])
|
|
|
|
|
else:
|
|
|
|
|
selection_indices = []
|
2025-12-11 12:47:30 -08:00
|
|
|
# Align indices to the displayed row order
|
|
|
|
|
stage_table = ctx.get_current_stage_table()
|
|
|
|
|
if not stage_table and hasattr(ctx, 'get_display_table'):
|
|
|
|
|
stage_table = ctx.get_display_table()
|
|
|
|
|
if not stage_table:
|
|
|
|
|
stage_table = ctx.get_last_result_table()
|
2025-12-19 02:29:42 -08:00
|
|
|
# Prefer selecting from the displayed table's items if available.
|
|
|
|
|
# This matters when a cmdlet shows a selectable overlay table but does not emit
|
|
|
|
|
# items downstream (e.g., add-file -provider matrix shows rooms, but the piped
|
|
|
|
|
# value is still the original file).
|
|
|
|
|
selection_base = list(piped_result_list)
|
|
|
|
|
try:
|
|
|
|
|
table_rows = len(stage_table.rows) if stage_table and hasattr(stage_table, 'rows') and stage_table.rows else None
|
|
|
|
|
last_items = ctx.get_last_result_items()
|
|
|
|
|
if last_items and table_rows is not None and len(last_items) == table_rows:
|
|
|
|
|
selection_base = list(last_items)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
resolved_list = _resolve_items_for_selection(stage_table, selection_base)
|
|
|
|
|
_debug_selection("pipeline-stage", selection_indices, stage_table, selection_base, resolved_list)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
filtered = [resolved_list[i] for i in selection_indices if 0 <= i < len(resolved_list)]
|
2025-11-25 20:09:33 -08:00
|
|
|
if filtered:
|
2025-12-19 02:29:42 -08:00
|
|
|
# Allow providers/stores to override selection behavior (e.g., Matrix room picker).
|
|
|
|
|
if _maybe_run_class_selector(filtered, stage_is_last=(stage_index + 1 >= len(stages))):
|
|
|
|
|
return
|
|
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
# Convert filtered items to PipeObjects for consistent pipeline handling
|
2025-12-12 21:55:38 -08:00
|
|
|
from cmdlet._shared import coerce_to_pipe_object
|
2025-12-11 12:47:30 -08:00
|
|
|
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
|
|
|
|
|
piped_result = filtered_pipe_objs if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
|
2025-11-25 20:09:33 -08:00
|
|
|
print(f"Selected {len(filtered)} item(s) using {cmd_name}")
|
2025-12-14 00:53:52 -08:00
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# If selecting YouTube results and there are downstream stages,
|
|
|
|
|
# insert download-media so subsequent cmdlets receive a local temp file.
|
|
|
|
|
try:
|
|
|
|
|
current_table = ctx.get_current_stage_table()
|
|
|
|
|
table_type = current_table.table if current_table and hasattr(current_table, 'table') else None
|
|
|
|
|
except Exception:
|
|
|
|
|
table_type = None
|
|
|
|
|
|
|
|
|
|
if table_type == 'youtube' and stage_index + 1 < len(stages):
|
|
|
|
|
next_cmd = stages[stage_index + 1][0] if stages[stage_index + 1] else None
|
|
|
|
|
if next_cmd not in ('download-media', 'download_media', 'download-file', '.pipe'):
|
|
|
|
|
print("Auto-inserting download-media after YouTube selection")
|
|
|
|
|
stages.insert(stage_index + 1, ['download-media'])
|
|
|
|
|
|
|
|
|
|
if table_type == 'libgen' and stage_index + 1 < len(stages):
|
|
|
|
|
next_cmd = stages[stage_index + 1][0] if stages[stage_index + 1] else None
|
|
|
|
|
if next_cmd not in ('download-file', 'download-media', 'download_media', '.pipe'):
|
|
|
|
|
print("Auto-inserting download-file after Libgen selection")
|
|
|
|
|
stages.insert(stage_index + 1, ['download-file'])
|
|
|
|
|
|
2025-12-14 00:53:52 -08:00
|
|
|
# If selection is the last stage and looks like a provider result,
|
|
|
|
|
# auto-initiate the borrow/download flow.
|
|
|
|
|
if stage_index + 1 >= len(stages):
|
|
|
|
|
try:
|
|
|
|
|
from ProviderCore.registry import get_search_provider as _get_search_provider
|
|
|
|
|
except Exception:
|
|
|
|
|
_get_search_provider = None
|
|
|
|
|
|
|
|
|
|
if _get_search_provider is not None:
|
|
|
|
|
selected_list = filtered_pipe_objs
|
|
|
|
|
provider_table: Optional[str] = None
|
|
|
|
|
try:
|
|
|
|
|
for obj in selected_list:
|
|
|
|
|
extra = getattr(obj, "extra", None)
|
|
|
|
|
if isinstance(extra, dict) and extra.get("table"):
|
|
|
|
|
provider_table = str(extra.get("table"))
|
|
|
|
|
break
|
|
|
|
|
except Exception:
|
|
|
|
|
provider_table = None
|
|
|
|
|
|
|
|
|
|
if provider_table:
|
|
|
|
|
try:
|
|
|
|
|
provider = _get_search_provider(provider_table, config)
|
|
|
|
|
except Exception:
|
|
|
|
|
provider = None
|
|
|
|
|
if provider is not None:
|
|
|
|
|
print("Auto-downloading selection via download-file")
|
|
|
|
|
stages.append(["download-file"])
|
2025-12-16 01:45:01 -08:00
|
|
|
else:
|
|
|
|
|
# Fallback: if we know the current table type, prefer a sensible default.
|
|
|
|
|
if table_type == 'libgen':
|
|
|
|
|
print("Auto-downloading Libgen selection via download-file")
|
|
|
|
|
stages.append(["download-file"])
|
2025-11-25 20:09:33 -08:00
|
|
|
continue
|
|
|
|
|
else:
|
|
|
|
|
print(f"No items matched selection {cmd_name}\n")
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"Selection {cmd_name} matched nothing"
|
|
|
|
|
return
|
|
|
|
|
except (TypeError, IndexError) as e:
|
|
|
|
|
print(f"Error applying selection {cmd_name}: {e}\n")
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"Selection error: {e}"
|
|
|
|
|
return
|
|
|
|
|
# If parse failed, treat as regular command name (will fail below)
|
|
|
|
|
|
|
|
|
|
# Get the cmdlet function
|
|
|
|
|
cmd_fn = REGISTRY.get(cmd_name)
|
|
|
|
|
if not cmd_fn:
|
|
|
|
|
print(f"Unknown command in pipeline: {cmd_name}\n")
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"Unknown command {cmd_name}"
|
|
|
|
|
return
|
2025-12-16 01:45:01 -08:00
|
|
|
|
|
|
|
|
# Prevent stale tables (e.g., a previous download-media format picker)
|
|
|
|
|
# from leaking into subsequent stages and being displayed again.
|
|
|
|
|
try:
|
|
|
|
|
ctx.set_current_stage_table(None)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-11 12:47:30 -08:00
|
|
|
debug(f"[pipeline] Stage {stage_index}: cmd_name={cmd_name}, cmd_fn type={type(cmd_fn)}, piped_result type={type(piped_result)}, stage_args={stage_args}")
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Execute the cmdlet with piped input
|
|
|
|
|
stage_session: Optional[_WorkerStageSession] = None
|
|
|
|
|
stage_status = "completed"
|
|
|
|
|
stage_error = ""
|
|
|
|
|
stage_label = f"[Stage {stage_index + 1}/{len(stages)}] {cmd_name}"
|
|
|
|
|
if pipeline_session and worker_manager:
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(pipeline_session.worker_id, f"{stage_label} started")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
stage_session = _begin_worker_stage(
|
|
|
|
|
worker_manager=worker_manager,
|
|
|
|
|
cmd_name=cmd_name,
|
|
|
|
|
stage_tokens=stage_tokens,
|
|
|
|
|
config=config,
|
|
|
|
|
command_text=" ".join(stage_tokens),
|
|
|
|
|
)
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Create pipeline context for this stage with the worker ID
|
|
|
|
|
is_last_stage = (stage_index == len(stages) - 1)
|
|
|
|
|
stage_worker_id = stage_session.worker_id if stage_session else (pipeline_session.worker_id if pipeline_session else None)
|
|
|
|
|
pipeline_ctx = ctx.PipelineStageContext(stage_index=stage_index, total_stages=len(stages), worker_id=stage_worker_id)
|
|
|
|
|
ctx.set_stage_context(pipeline_ctx)
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
if isinstance(config, dict):
|
|
|
|
|
config['_pipeline_remaining_after_current'] = stages[stage_index + 1:]
|
|
|
|
|
debug(f"[pipeline] Calling cmd_fn({type(piped_result).__name__}, {stage_args}, config)")
|
2025-11-25 20:09:33 -08:00
|
|
|
ret_code = cmd_fn(piped_result, stage_args, config)
|
2025-12-11 12:47:30 -08:00
|
|
|
debug(f"[pipeline] cmd_fn returned: {ret_code} (type: {type(ret_code)})")
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Store emitted results for next stage (or display if last stage)
|
|
|
|
|
if pipeline_ctx.emits:
|
|
|
|
|
if is_last_stage:
|
|
|
|
|
# Last stage - display results
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and ResultTable is not None and pipeline_ctx.emits:
|
2025-12-16 01:45:01 -08:00
|
|
|
table_title = _get_table_title_for_command(cmd_name, pipeline_ctx.emits, stage_args)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Only set source_command for search/filter commands (not display-only or action commands)
|
|
|
|
|
# This preserves context so @N refers to the original search, not intermediate results
|
|
|
|
|
selectable_commands = {
|
2025-12-11 12:47:30 -08:00
|
|
|
'search-file', 'download-data', 'download-media', 'search_file', 'download_data', 'download_media',
|
2025-11-25 20:09:33 -08:00
|
|
|
'.config', '.worker'
|
|
|
|
|
}
|
|
|
|
|
# Display-only commands (just show data, don't modify or search)
|
|
|
|
|
display_only_commands = {
|
2025-12-14 00:53:52 -08:00
|
|
|
'get-note', 'get_note',
|
2025-11-25 20:09:33 -08:00
|
|
|
'get-relationship', 'get_relationship', 'get-file', 'get_file',
|
|
|
|
|
}
|
|
|
|
|
# Commands that manage their own table/history state (e.g. get-tag)
|
|
|
|
|
self_managing_commands = {
|
2025-12-06 00:10:19 -08:00
|
|
|
'get-tag', 'get_tag', 'tags',
|
2025-12-14 00:53:52 -08:00
|
|
|
'get-url', 'get_url',
|
2025-12-16 01:45:01 -08:00
|
|
|
'search-file', 'search_file',
|
2025-12-16 23:23:43 -08:00
|
|
|
'search-provider', 'search_provider',
|
|
|
|
|
'search-store', 'search_store'
|
2025-11-25 20:09:33 -08:00
|
|
|
}
|
2025-12-06 00:10:19 -08:00
|
|
|
|
|
|
|
|
overlay_table = ctx.get_display_table() if hasattr(ctx, 'get_display_table') else None
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
if cmd_name in self_managing_commands:
|
|
|
|
|
# Command has already set the table and history
|
|
|
|
|
# Retrieve the table it set so we print the correct custom formatting
|
|
|
|
|
|
|
|
|
|
# Check for overlay table first (e.g. get-tag)
|
|
|
|
|
if hasattr(ctx, 'get_display_table'):
|
|
|
|
|
table = ctx.get_display_table()
|
|
|
|
|
else:
|
|
|
|
|
table = None
|
|
|
|
|
|
|
|
|
|
if table is None:
|
|
|
|
|
table = ctx.get_last_result_table()
|
|
|
|
|
|
|
|
|
|
if table is None:
|
|
|
|
|
# Fallback if something went wrong
|
|
|
|
|
table = ResultTable(table_title)
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
table.add_result(emitted)
|
|
|
|
|
else:
|
|
|
|
|
if cmd_name in selectable_commands:
|
2025-12-06 00:10:19 -08:00
|
|
|
table = ResultTable(table_title)
|
2025-12-11 19:04:02 -08:00
|
|
|
|
|
|
|
|
# Detect table type from items
|
|
|
|
|
first_table = None
|
|
|
|
|
consistent = True
|
|
|
|
|
|
2025-12-06 00:10:19 -08:00
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
table.add_result(emitted)
|
2025-12-11 19:04:02 -08:00
|
|
|
|
|
|
|
|
# Check for table property
|
|
|
|
|
item_table = None
|
|
|
|
|
if isinstance(emitted, dict):
|
|
|
|
|
item_table = emitted.get('table')
|
|
|
|
|
else:
|
|
|
|
|
item_table = getattr(emitted, 'table', None)
|
|
|
|
|
|
|
|
|
|
if item_table:
|
|
|
|
|
if first_table is None:
|
|
|
|
|
first_table = item_table
|
|
|
|
|
elif first_table != item_table:
|
|
|
|
|
consistent = False
|
|
|
|
|
|
|
|
|
|
if consistent and first_table:
|
|
|
|
|
table.set_table(first_table)
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
table.set_source_command(cmd_name, stage_args)
|
|
|
|
|
ctx.set_last_result_table(table, pipeline_ctx.emits)
|
|
|
|
|
elif cmd_name in display_only_commands:
|
2025-12-06 00:10:19 -08:00
|
|
|
table = ResultTable(table_title)
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
table.add_result(emitted)
|
2025-11-25 20:09:33 -08:00
|
|
|
# Display-only: show table but preserve search context
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
else:
|
2025-12-06 00:10:19 -08:00
|
|
|
# Action commands: avoid overwriting search history/table unless a display overlay exists
|
|
|
|
|
if overlay_table is not None:
|
|
|
|
|
table = overlay_table
|
|
|
|
|
else:
|
|
|
|
|
table = None
|
2025-11-25 20:09:33 -08:00
|
|
|
|
2025-12-06 00:10:19 -08:00
|
|
|
if table is not None:
|
|
|
|
|
print()
|
|
|
|
|
print(table.format_plain())
|
2025-11-25 20:09:33 -08:00
|
|
|
else:
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
if isinstance(emitted, dict):
|
|
|
|
|
print(json.dumps(emitted, indent=2))
|
|
|
|
|
else:
|
|
|
|
|
print(emitted)
|
|
|
|
|
# For display-only results, also preserve context by not calling set_last_result_table
|
|
|
|
|
else:
|
|
|
|
|
# Intermediate stage - thread to next stage
|
|
|
|
|
piped_result = pipeline_ctx.emits
|
|
|
|
|
ctx.set_last_result_table(None, pipeline_ctx.emits)
|
2025-12-07 00:21:30 -08:00
|
|
|
else:
|
|
|
|
|
# No output from this stage. If it presented a selectable table (e.g., format list), pause
|
|
|
|
|
# and stash the remaining pipeline so @N can resume with the selection applied.
|
2025-12-16 01:45:01 -08:00
|
|
|
if is_last_stage:
|
|
|
|
|
# Last stage with no emitted items: only display a *current* selectable table set by
|
|
|
|
|
# the cmdlet (e.g., download-media format picker). Do NOT fall back to last_result_table,
|
|
|
|
|
# which may be stale from a previous command.
|
|
|
|
|
stage_table_source = ctx.get_current_stage_table_source_command()
|
|
|
|
|
row_has_selection = ctx.get_current_stage_table_row_selection_args(0) is not None
|
|
|
|
|
stage_table = ctx.get_current_stage_table()
|
|
|
|
|
if not stage_table and hasattr(ctx, 'get_display_table'):
|
|
|
|
|
stage_table = ctx.get_display_table()
|
|
|
|
|
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and stage_table is not None and stage_table_source and row_has_selection:
|
|
|
|
|
try:
|
|
|
|
|
print()
|
|
|
|
|
print(stage_table.format_plain())
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
continue
|
|
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
if not is_last_stage:
|
|
|
|
|
stage_table_source = ctx.get_current_stage_table_source_command()
|
|
|
|
|
row_has_selection = ctx.get_current_stage_table_row_selection_args(0) is not None
|
2025-12-11 12:47:30 -08:00
|
|
|
stage_table = ctx.get_current_stage_table()
|
|
|
|
|
|
|
|
|
|
# Check if next stage is @N selection - if so, don't pause, let it process
|
|
|
|
|
next_stage = stages[stage_index + 1] if stage_index + 1 < len(stages) else None
|
|
|
|
|
next_is_selection = next_stage and next_stage[0] and next_stage[0][0].startswith('@')
|
|
|
|
|
|
|
|
|
|
debug(f"[pipeline] Stage {stage_index} pause check: source={stage_table_source}, has_selection={row_has_selection}, table={stage_table is not None}, next_is_selection={next_is_selection}")
|
|
|
|
|
|
|
|
|
|
if stage_table_source and row_has_selection and not next_is_selection:
|
|
|
|
|
# Display the table before pausing
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and stage_table is not None:
|
|
|
|
|
debug(f"[pipeline] Displaying stage table with {len(stage_table.rows) if hasattr(stage_table, 'rows') else 0} rows")
|
|
|
|
|
print()
|
|
|
|
|
print(stage_table.format_plain())
|
|
|
|
|
print()
|
|
|
|
|
|
2025-12-07 00:21:30 -08:00
|
|
|
pending_tail = stages[stage_index + 1:]
|
|
|
|
|
if pending_tail and pending_tail[0] and pending_tail[0][0].startswith('@'):
|
|
|
|
|
pending_tail = pending_tail[1:]
|
|
|
|
|
if hasattr(ctx, 'set_pending_pipeline_tail') and pending_tail:
|
|
|
|
|
ctx.set_pending_pipeline_tail(pending_tail, stage_table_source)
|
|
|
|
|
elif hasattr(ctx, 'clear_pending_pipeline_tail'):
|
|
|
|
|
ctx.clear_pending_pipeline_tail()
|
|
|
|
|
if pipeline_session and worker_manager:
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(pipeline_session.worker_id, "Pipeline paused for @N selection")
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
print("Pipeline paused: select a format with @N to continue remaining stages")
|
|
|
|
|
return
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# If the stage requested pipeline abort (e.g., queued async work), stop processing further stages
|
|
|
|
|
if getattr(pipeline_ctx, "abort_remaining", False):
|
|
|
|
|
if pipeline_session and worker_manager:
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(
|
|
|
|
|
pipeline_session.worker_id,
|
|
|
|
|
f"{stage_label} queued background work; skipping remaining stages",
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
return
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
if ret_code != 0:
|
|
|
|
|
stage_status = "failed"
|
|
|
|
|
stage_error = f"exit code {ret_code}"
|
2025-12-11 12:47:30 -08:00
|
|
|
# Only print exit code if it's an integer (not the cmdlet object)
|
|
|
|
|
if isinstance(ret_code, int):
|
|
|
|
|
print(f"[stage {stage_index} exit code: {ret_code}]\n")
|
|
|
|
|
else:
|
|
|
|
|
print(f"[stage {stage_index} failed]\n")
|
2025-11-25 20:09:33 -08:00
|
|
|
if pipeline_session:
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"{stage_label} failed ({stage_error})"
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
stage_status = "failed"
|
|
|
|
|
stage_error = f"{type(e).__name__}: {e}"
|
|
|
|
|
print(f"[error in stage {stage_index} ({cmd_name})]: {type(e).__name__}: {e}\n")
|
|
|
|
|
import traceback
|
|
|
|
|
traceback.print_exc()
|
|
|
|
|
if pipeline_session:
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = f"{stage_label} error: {e}"
|
|
|
|
|
return
|
|
|
|
|
finally:
|
|
|
|
|
if stage_session:
|
|
|
|
|
stage_session.close(status=stage_status, error_msg=stage_error)
|
|
|
|
|
elif pipeline_session and worker_manager:
|
|
|
|
|
try:
|
|
|
|
|
worker_manager.log_step(
|
|
|
|
|
pipeline_session.worker_id,
|
|
|
|
|
f"{stage_label} {'completed' if stage_status == 'completed' else 'failed'}",
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# If we have a result but no stages left (e.g. pure selection @3 that didn't expand to a command), display it
|
|
|
|
|
if not stages and piped_result is not None:
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and ResultTable is not None:
|
|
|
|
|
# Create a simple table for the result
|
|
|
|
|
table = ResultTable("Selection Result")
|
|
|
|
|
|
|
|
|
|
# Normalize to list
|
|
|
|
|
items = piped_result if isinstance(piped_result, list) else [piped_result]
|
|
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
|
table.add_result(item)
|
|
|
|
|
|
|
|
|
|
# Preserve context for further selection
|
|
|
|
|
ctx.set_last_result_items_only(items)
|
|
|
|
|
|
|
|
|
|
print()
|
|
|
|
|
print(table.format_plain())
|
|
|
|
|
else:
|
|
|
|
|
print(piped_result)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
pipeline_status = "failed"
|
|
|
|
|
pipeline_error = str(e)
|
|
|
|
|
print(f"[error] Failed to execute pipeline: {e}\n")
|
|
|
|
|
import traceback
|
|
|
|
|
traceback.print_exc()
|
|
|
|
|
finally:
|
|
|
|
|
if pipeline_session:
|
|
|
|
|
pipeline_session.close(status=pipeline_status, error_msg=pipeline_error)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"[error] Failed to execute pipeline: {e}\n")
|
|
|
|
|
import traceback
|
|
|
|
|
traceback.print_exc()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _execute_cmdlet(cmd_name: str, args: list):
|
|
|
|
|
"""Execute a cmdlet with the given arguments.
|
|
|
|
|
|
|
|
|
|
Supports @ selection syntax for filtering results from previous commands:
|
|
|
|
|
- @2 - select row 2
|
|
|
|
|
- @2-5 - select rows 2-5
|
|
|
|
|
- @{1,3,5} - select rows 1, 3, 5
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-12 21:55:38 -08:00
|
|
|
from cmdlet import REGISTRY
|
2025-11-25 20:09:33 -08:00
|
|
|
import json
|
|
|
|
|
import pipeline as ctx
|
|
|
|
|
|
2025-12-12 21:55:38 -08:00
|
|
|
# Ensure native commands (cmdnat) are loaded
|
2025-12-11 12:47:30 -08:00
|
|
|
try:
|
2025-12-16 23:23:43 -08:00
|
|
|
from cmdlet_catalog import ensure_registry_loaded as _ensure_registry_loaded
|
2025-12-11 12:47:30 -08:00
|
|
|
_ensure_registry_loaded()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
# Get the cmdlet function
|
|
|
|
|
cmd_fn = REGISTRY.get(cmd_name)
|
2025-12-11 12:47:30 -08:00
|
|
|
if not cmd_fn:
|
|
|
|
|
# Attempt lazy import of the module and retry
|
2025-12-16 23:23:43 -08:00
|
|
|
from cmdlet_catalog import import_cmd_module as _catalog_import
|
2025-12-11 12:47:30 -08:00
|
|
|
try:
|
|
|
|
|
mod = _catalog_import(cmd_name)
|
|
|
|
|
data = getattr(mod, "CMDLET", None) if mod else None
|
|
|
|
|
if data and hasattr(data, "exec") and callable(getattr(data, "exec")):
|
|
|
|
|
run_fn = getattr(data, "exec")
|
|
|
|
|
REGISTRY[cmd_name] = run_fn
|
|
|
|
|
cmd_fn = run_fn
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
2025-11-25 20:09:33 -08:00
|
|
|
if not cmd_fn:
|
|
|
|
|
print(f"Unknown command: {cmd_name}\n")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Load config relative to CLI root
|
|
|
|
|
config = _load_cli_config()
|
|
|
|
|
|
2025-12-16 01:45:01 -08:00
|
|
|
# Check for @ selection syntax in arguments.
|
|
|
|
|
# IMPORTANT: support using @N as a VALUE for a value-taking flag (e.g. add-relationship -king @1).
|
|
|
|
|
# Only treat @ tokens as selection when they are NOT in a value position.
|
|
|
|
|
filtered_args: list[str] = []
|
|
|
|
|
selected_indices: list[int] = []
|
|
|
|
|
select_all = False
|
|
|
|
|
|
|
|
|
|
# Build a set of flag tokens that consume a value for this cmdlet.
|
|
|
|
|
# We use cmdlet metadata so we don't break patterns like: get-tag -raw @1 (where -raw is a flag).
|
|
|
|
|
value_flags: set[str] = set()
|
|
|
|
|
try:
|
|
|
|
|
meta = _catalog_get_cmdlet_metadata(cmd_name)
|
|
|
|
|
raw = meta.get("raw") if isinstance(meta, dict) else None
|
|
|
|
|
arg_specs = getattr(raw, "arg", None) if raw is not None else None
|
|
|
|
|
if isinstance(arg_specs, list):
|
|
|
|
|
for spec in arg_specs:
|
|
|
|
|
try:
|
|
|
|
|
spec_type = str(getattr(spec, "type", "string") or "string").strip().lower()
|
|
|
|
|
if spec_type == "flag":
|
|
|
|
|
continue
|
|
|
|
|
spec_name = str(getattr(spec, "name", "") or "")
|
|
|
|
|
canonical = spec_name.lstrip("-").strip()
|
|
|
|
|
if not canonical:
|
|
|
|
|
continue
|
|
|
|
|
value_flags.add(f"-{canonical}".lower())
|
|
|
|
|
value_flags.add(f"--{canonical}".lower())
|
|
|
|
|
alias = str(getattr(spec, "alias", "") or "").strip()
|
|
|
|
|
if alias:
|
|
|
|
|
value_flags.add(f"-{alias}".lower())
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
except Exception:
|
|
|
|
|
value_flags = set()
|
|
|
|
|
|
|
|
|
|
for i, arg in enumerate(args):
|
|
|
|
|
if isinstance(arg, str) and arg.startswith('@'):
|
|
|
|
|
prev = str(args[i - 1]).lower() if i > 0 else ""
|
|
|
|
|
# If this @ token is the value for a value-taking flag, keep it.
|
|
|
|
|
if prev in value_flags:
|
|
|
|
|
filtered_args.append(arg)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Special case: @"string" should be treated as "string" (stripping @)
|
|
|
|
|
# This allows adding new items via @"New Item" syntax
|
|
|
|
|
if len(arg) >= 2 and (arg[1] == '"' or arg[1] == "'"):
|
|
|
|
|
filtered_args.append(arg[1:].strip('"\''))
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Parse selection: @2, @2-5, @{1,3,5}, @3,5,7, @3-6,8, @*
|
|
|
|
|
if arg.strip() == "@*":
|
|
|
|
|
select_all = True
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
selection = _parse_selection_syntax(arg)
|
|
|
|
|
if selection is not None:
|
|
|
|
|
zero_based = sorted(i - 1 for i in selection if isinstance(i, int) and i > 0)
|
|
|
|
|
selected_indices.extend([idx for idx in zero_based if idx not in selected_indices])
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Not a valid selection, treat as regular arg
|
2025-11-25 20:09:33 -08:00
|
|
|
filtered_args.append(arg)
|
2025-12-16 01:45:01 -08:00
|
|
|
else:
|
|
|
|
|
filtered_args.append(str(arg))
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Get piped items from previous command results
|
|
|
|
|
piped_items = ctx.get_last_result_items()
|
|
|
|
|
|
|
|
|
|
# Create result object - pass full list (or filtered list if @ selection used) to cmdlet
|
|
|
|
|
result = None
|
|
|
|
|
if piped_items:
|
2025-12-16 01:45:01 -08:00
|
|
|
if select_all:
|
|
|
|
|
result = piped_items
|
|
|
|
|
elif selected_indices:
|
2025-11-25 20:09:33 -08:00
|
|
|
# Filter to selected indices only
|
|
|
|
|
result = [piped_items[idx] for idx in selected_indices if 0 <= idx < len(piped_items)]
|
|
|
|
|
else:
|
2025-12-12 21:55:38 -08:00
|
|
|
# No selection specified, pass all items (cmdlet handle lists via normalize_result_input)
|
2025-11-25 20:09:33 -08:00
|
|
|
result = piped_items
|
|
|
|
|
|
|
|
|
|
worker_manager = _ensure_worker_manager(config)
|
|
|
|
|
stage_session = _begin_worker_stage(
|
|
|
|
|
worker_manager=worker_manager,
|
|
|
|
|
cmd_name=cmd_name,
|
|
|
|
|
stage_tokens=[cmd_name, *filtered_args],
|
|
|
|
|
config=config,
|
|
|
|
|
command_text=" ".join([cmd_name, *filtered_args]).strip() or cmd_name,
|
|
|
|
|
)
|
2025-12-11 12:47:30 -08:00
|
|
|
|
|
|
|
|
# Create pipeline context with the worker ID
|
|
|
|
|
stage_worker_id = stage_session.worker_id if stage_session else None
|
|
|
|
|
pipeline_ctx = ctx.PipelineStageContext(stage_index=0, total_stages=1, worker_id=stage_worker_id)
|
|
|
|
|
ctx.set_stage_context(pipeline_ctx)
|
2025-11-25 20:09:33 -08:00
|
|
|
stage_status = "completed"
|
|
|
|
|
stage_error = ""
|
|
|
|
|
|
|
|
|
|
# Execute the cmdlet
|
|
|
|
|
ctx.set_last_selection(selected_indices)
|
|
|
|
|
try:
|
|
|
|
|
ret_code = cmd_fn(result, filtered_args, config)
|
|
|
|
|
|
|
|
|
|
# Print emitted results using ResultTable for structured output
|
|
|
|
|
if pipeline_ctx.emits:
|
|
|
|
|
if RESULT_TABLE_AVAILABLE and ResultTable is not None and pipeline_ctx.emits:
|
|
|
|
|
# Check if these are format objects (from download-data format selection)
|
|
|
|
|
# Format objects have format_id and should not be displayed as a table
|
|
|
|
|
is_format_selection = False
|
|
|
|
|
if pipeline_ctx.emits and len(pipeline_ctx.emits) > 0:
|
|
|
|
|
first_emit = pipeline_ctx.emits[0]
|
|
|
|
|
if isinstance(first_emit, dict) and 'format_id' in first_emit:
|
|
|
|
|
is_format_selection = True
|
|
|
|
|
|
|
|
|
|
# Skip table display for format selection - user will use @N to select
|
|
|
|
|
if is_format_selection:
|
|
|
|
|
# Store items for @N selection but don't display table
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
else:
|
|
|
|
|
# Try to format as a table if we have search results
|
2025-12-16 01:45:01 -08:00
|
|
|
table_title = _get_table_title_for_command(cmd_name, pipeline_ctx.emits, filtered_args)
|
2025-11-25 20:09:33 -08:00
|
|
|
|
|
|
|
|
# Only set source_command for search/filter commands (not display-only or action commands)
|
|
|
|
|
# This preserves context so @N refers to the original search, not intermediate results
|
|
|
|
|
selectable_commands = {
|
2025-12-11 12:47:30 -08:00
|
|
|
'search-file', 'download-data', 'download-media', 'search_file', 'download_data', 'download_media',
|
2025-11-25 20:09:33 -08:00
|
|
|
'.config', '.worker'
|
|
|
|
|
}
|
|
|
|
|
# Display-only commands (excluding get-tag which manages its own table)
|
|
|
|
|
display_only_commands = {
|
|
|
|
|
'get-url', 'get_url', 'get-note', 'get_note',
|
|
|
|
|
'get-relationship', 'get_relationship', 'get-file', 'get_file',
|
|
|
|
|
}
|
|
|
|
|
# Commands that manage their own table/history state (e.g. get-tag)
|
|
|
|
|
self_managing_commands = {
|
2025-12-06 00:10:19 -08:00
|
|
|
'get-tag', 'get_tag', 'tags',
|
2025-12-16 01:45:01 -08:00
|
|
|
'search-file', 'search_file',
|
2025-12-16 23:23:43 -08:00
|
|
|
'search-provider', 'search_provider',
|
|
|
|
|
'search-store', 'search_store'
|
2025-11-25 20:09:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if cmd_name in self_managing_commands:
|
|
|
|
|
# Command has already set the table and history
|
|
|
|
|
# Retrieve the table it set so we print the correct custom formatting
|
|
|
|
|
table = ctx.get_last_result_table()
|
|
|
|
|
if table is None:
|
|
|
|
|
# Fallback if something went wrong
|
|
|
|
|
table = ResultTable(table_title)
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
table.add_result(emitted)
|
|
|
|
|
else:
|
|
|
|
|
table = ResultTable(table_title)
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
table.add_result(emitted)
|
|
|
|
|
|
|
|
|
|
if cmd_name in selectable_commands:
|
|
|
|
|
table.set_source_command(cmd_name, filtered_args)
|
|
|
|
|
ctx.set_last_result_table(table, pipeline_ctx.emits)
|
|
|
|
|
# Clear any stale current_stage_table (e.g. from previous download-data formats)
|
|
|
|
|
# This ensures @N refers to these new results, not old format selections
|
|
|
|
|
ctx.set_current_stage_table(None)
|
|
|
|
|
elif cmd_name in display_only_commands:
|
|
|
|
|
# Display-only: show table but preserve search context
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
else:
|
|
|
|
|
# Action commands: update items only without changing current table or history
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
|
|
|
|
|
print()
|
|
|
|
|
print(table.format_plain())
|
|
|
|
|
|
|
|
|
|
# Special case: if this was a youtube search, print a hint about auto-piping
|
|
|
|
|
if cmd_name == 'search-file' and filtered_args and 'youtube' in filtered_args:
|
2025-11-27 10:59:01 -08:00
|
|
|
# print("\n[Hint] Type @N to play a video in MPV (e.g. @1)")
|
|
|
|
|
pass
|
2025-11-25 20:09:33 -08:00
|
|
|
else:
|
|
|
|
|
# Fallback to raw output if ResultTable not available
|
|
|
|
|
for emitted in pipeline_ctx.emits:
|
|
|
|
|
if isinstance(emitted, dict):
|
|
|
|
|
print(json.dumps(emitted, indent=2))
|
|
|
|
|
else:
|
|
|
|
|
print(emitted)
|
|
|
|
|
|
|
|
|
|
# Store emitted items for @ selection
|
|
|
|
|
selectable_commands = {
|
2025-12-11 12:47:30 -08:00
|
|
|
'search-file', 'download-data', 'download-media', 'search_file', 'download_data', 'download_media',
|
2025-11-25 20:09:33 -08:00
|
|
|
'.config', '.worker'
|
|
|
|
|
}
|
|
|
|
|
display_only_commands = {
|
|
|
|
|
'get-url', 'get_url', 'get-note', 'get_note',
|
|
|
|
|
'get-relationship', 'get_relationship', 'get-file', 'get_file',
|
|
|
|
|
}
|
|
|
|
|
self_managing_commands = {
|
2025-12-06 00:10:19 -08:00
|
|
|
'get-tag', 'get_tag', 'tags',
|
|
|
|
|
'search-file', 'search_file'
|
2025-11-25 20:09:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if cmd_name in self_managing_commands:
|
|
|
|
|
pass # Already handled by cmdlet
|
|
|
|
|
elif cmd_name in selectable_commands:
|
|
|
|
|
ctx.set_last_result_table(None, pipeline_ctx.emits)
|
|
|
|
|
elif cmd_name in display_only_commands:
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
else:
|
|
|
|
|
# Action commands: items only, don't change table/history
|
|
|
|
|
ctx.set_last_result_items_only(pipeline_ctx.emits)
|
|
|
|
|
|
|
|
|
|
if ret_code != 0:
|
|
|
|
|
stage_status = "failed"
|
|
|
|
|
stage_error = f"exit code {ret_code}"
|
|
|
|
|
print(f"[exit code: {ret_code}]\n")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
stage_status = "failed"
|
|
|
|
|
stage_error = f"{type(e).__name__}: {e}"
|
|
|
|
|
print(f"[error] {type(e).__name__}: {e}\n")
|
|
|
|
|
finally:
|
|
|
|
|
ctx.clear_last_selection()
|
|
|
|
|
if stage_session:
|
|
|
|
|
stage_session.close(status=stage_status, error_msg=stage_error)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"[error] Failed to execute cmdlet: {e}\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _show_cmdlet_list():
|
2025-12-12 21:55:38 -08:00
|
|
|
"""Display available cmdlet with full metadata: cmd:name alias:aliases args:args."""
|
2025-11-25 20:09:33 -08:00
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
metadata = _catalog_list_cmdlet_metadata()
|
2025-12-12 21:55:38 -08:00
|
|
|
print("\nAvailable cmdlet:")
|
2025-12-11 12:47:30 -08:00
|
|
|
for cmd_name in sorted(metadata.keys()):
|
|
|
|
|
info = metadata[cmd_name]
|
|
|
|
|
aliases = info.get("aliases", [])
|
|
|
|
|
args = info.get("args", [])
|
|
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
display = f" cmd:{cmd_name}"
|
|
|
|
|
if aliases:
|
2025-12-11 12:47:30 -08:00
|
|
|
display += f" alias:{', '.join(aliases)}"
|
2025-11-25 20:09:33 -08:00
|
|
|
if args:
|
2025-12-11 12:47:30 -08:00
|
|
|
arg_names = [a.get("name") for a in args if a.get("name")]
|
|
|
|
|
if arg_names:
|
|
|
|
|
display += f" args:{', '.join(arg_names)}"
|
|
|
|
|
summary = info.get("summary")
|
|
|
|
|
if summary:
|
|
|
|
|
display += f" - {summary}"
|
2025-11-25 20:09:33 -08:00
|
|
|
print(display)
|
2025-12-11 12:47:30 -08:00
|
|
|
|
2025-11-25 20:09:33 -08:00
|
|
|
print()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error: {e}\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _show_cmdlet_help(cmd_name: str):
|
|
|
|
|
"""Display help for a cmdlet."""
|
|
|
|
|
try:
|
2025-12-11 12:47:30 -08:00
|
|
|
meta = _catalog_get_cmdlet_metadata(cmd_name)
|
|
|
|
|
if meta:
|
|
|
|
|
_print_metadata(cmd_name, meta)
|
2025-12-05 03:42:57 -08:00
|
|
|
return
|
2025-11-25 20:09:33 -08:00
|
|
|
print(f"Unknown command: {cmd_name}\n")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"Error: {e}\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _print_metadata(cmd_name: str, data):
|
|
|
|
|
"""Print cmdlet metadata in PowerShell-style format."""
|
|
|
|
|
d = data.to_dict() if hasattr(data, "to_dict") else data
|
|
|
|
|
if not isinstance(d, dict):
|
|
|
|
|
print(f"Invalid metadata for {cmd_name}\n")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
name = d.get('name', cmd_name)
|
|
|
|
|
summary = d.get("summary", "")
|
|
|
|
|
usage = d.get("usage", "")
|
|
|
|
|
description = d.get("description", "")
|
|
|
|
|
args = d.get("args", [])
|
|
|
|
|
details = d.get("details", [])
|
|
|
|
|
|
|
|
|
|
# NAME section
|
|
|
|
|
print(f"\nNAME")
|
|
|
|
|
print(f" {name}")
|
|
|
|
|
|
|
|
|
|
# SYNOPSIS section
|
|
|
|
|
print(f"\nSYNOPSIS")
|
|
|
|
|
if usage:
|
|
|
|
|
# Format usage similar to PowerShell syntax
|
|
|
|
|
print(f" {usage}")
|
|
|
|
|
else:
|
|
|
|
|
print(f" {name}")
|
|
|
|
|
|
|
|
|
|
# DESCRIPTION section
|
|
|
|
|
if summary or description:
|
|
|
|
|
print(f"\nDESCRIPTION")
|
|
|
|
|
if summary:
|
|
|
|
|
print(f" {summary}")
|
|
|
|
|
if description:
|
|
|
|
|
print(f" {description}")
|
|
|
|
|
|
|
|
|
|
# PARAMETERS section
|
|
|
|
|
if args and isinstance(args, list):
|
|
|
|
|
print(f"\nPARAMETERS")
|
|
|
|
|
for arg in args:
|
|
|
|
|
if isinstance(arg, dict):
|
|
|
|
|
name_str = arg.get("name", "?")
|
|
|
|
|
typ = arg.get("type", "string")
|
|
|
|
|
required = arg.get("required", False)
|
|
|
|
|
desc = arg.get("description", "")
|
|
|
|
|
else:
|
|
|
|
|
name_str = getattr(arg, "name", "?")
|
|
|
|
|
typ = getattr(arg, "type", "string")
|
|
|
|
|
required = getattr(arg, "required", False)
|
|
|
|
|
desc = getattr(arg, "description", "")
|
|
|
|
|
|
|
|
|
|
# Format: -Name <type> [required flag]
|
|
|
|
|
req_marker = "[required]" if required else "[optional]"
|
|
|
|
|
print(f" -{name_str} <{typ}>")
|
|
|
|
|
if desc:
|
|
|
|
|
print(f" {desc}")
|
|
|
|
|
print(f" {req_marker}")
|
|
|
|
|
print()
|
|
|
|
|
|
|
|
|
|
# REMARKS/DETAILS section
|
|
|
|
|
if details:
|
|
|
|
|
print(f"REMARKS")
|
|
|
|
|
for detail in details:
|
|
|
|
|
print(f" {detail}")
|
|
|
|
|
print()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
# SELECTION UTILITIES - Consolidated from selection_syntax.py and select_utils.py
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
def _parse_selection_syntax(token: str) -> Optional[Set[int]]:
|
|
|
|
|
"""Parse @ selection syntax into a set of 1-based indices.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
token: Token starting with @ (e.g., "@2", "@2-5", "@{1,3,5}", "@*", "@3,5,7", "@3-6,8")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Set of 1-based indices (for concrete selections like @1, @2-5, @3,5,7)
|
2025-12-11 12:47:30 -08:00
|
|
|
None for special cases: @* (all), @.. (restore previous), @,, (restore next)
|
2025-11-25 20:09:33 -08:00
|
|
|
None for invalid format
|
|
|
|
|
|
|
|
|
|
Special handling:
|
|
|
|
|
- @* returns None and should be handled as "select all current items"
|
2025-12-11 12:47:30 -08:00
|
|
|
- @.. returns None and is handled as "restore previous table" (backward navigation)
|
|
|
|
|
- @,, returns None and is handled as "restore next table" (forward navigation)
|
2025-11-25 20:09:33 -08:00
|
|
|
- Invalid selections like @-1 or @a return None and are treated as invalid args
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
"@2" → {2}
|
|
|
|
|
"@2-5" → {2, 3, 4, 5}
|
|
|
|
|
"@{2,5,6}" → {2, 5, 6}
|
|
|
|
|
"@2,5,6" → {2, 5, 6}
|
|
|
|
|
"@2-5,8,10-12" → {2, 3, 4, 5, 8, 10, 11, 12}
|
|
|
|
|
"@*" → None (caller checks token=="@*" to handle as "all")
|
2025-12-11 12:47:30 -08:00
|
|
|
"@.." → None (backward navigation)
|
|
|
|
|
"@,," → None (forward navigation)
|
2025-11-25 20:09:33 -08:00
|
|
|
"""
|
|
|
|
|
if not token.startswith("@"):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
selector = token[1:].strip()
|
|
|
|
|
|
|
|
|
|
# Special case: @.. means restore previous result table (handled separately)
|
2025-12-11 12:47:30 -08:00
|
|
|
# Special case: @,, means restore next result table (handled separately)
|
2025-11-25 20:09:33 -08:00
|
|
|
# Special case: @* means all items (should be converted to actual list by caller)
|
2025-12-11 12:47:30 -08:00
|
|
|
if selector in (".", ",", "*"):
|
2025-11-25 20:09:33 -08:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
indices = set()
|
|
|
|
|
|
|
|
|
|
# Handle set notation: @{2,5,6,7} (convert to standard format)
|
|
|
|
|
if selector.startswith("{") and selector.endswith("}"):
|
|
|
|
|
selector = selector[1:-1]
|
|
|
|
|
|
|
|
|
|
# Handle mixed comma and range notation: @2,5,7-9,10 or @2-5,8,10-12
|
|
|
|
|
parts = selector.split(",")
|
|
|
|
|
|
|
|
|
|
for part in parts:
|
|
|
|
|
part = part.strip()
|
|
|
|
|
if not part:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
if "-" in part:
|
|
|
|
|
# Range notation: 2-5 or 7-9
|
|
|
|
|
range_parts = part.split("-", 1) # Split on first - only (in case of negative numbers)
|
|
|
|
|
if len(range_parts) == 2:
|
|
|
|
|
start_str = range_parts[0].strip()
|
|
|
|
|
end_str = range_parts[1].strip()
|
|
|
|
|
|
|
|
|
|
# Make sure both are valid positive integers
|
|
|
|
|
if start_str and end_str:
|
|
|
|
|
start = int(start_str)
|
|
|
|
|
end = int(end_str)
|
|
|
|
|
if start > 0 and end > 0 and start <= end:
|
|
|
|
|
indices.update(range(start, end + 1))
|
|
|
|
|
else:
|
|
|
|
|
return None # Invalid range
|
|
|
|
|
else:
|
|
|
|
|
return None
|
|
|
|
|
else:
|
|
|
|
|
return None
|
|
|
|
|
else:
|
|
|
|
|
# Single number
|
|
|
|
|
num = int(part)
|
|
|
|
|
if num > 0:
|
|
|
|
|
indices.add(num)
|
|
|
|
|
else:
|
|
|
|
|
return None
|
|
|
|
|
except (ValueError, AttributeError):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return indices if indices else None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _filter_items_by_selection(items: List, selection: Optional[Set[int]]) -> List:
|
|
|
|
|
"""Filter items by 1-based selection indices.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
items: List of items to filter
|
|
|
|
|
selection: Set of 1-based indices, or None for all items
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Filtered list of items in original order
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
_filter_items_by_selection([a, b, c, d], {2, 4}) → [b, d]
|
|
|
|
|
_filter_items_by_selection([a, b, c, d], None) → [a, b, c, d]
|
|
|
|
|
"""
|
|
|
|
|
if selection is None or len(selection) == 0:
|
|
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
filtered = []
|
|
|
|
|
for i, item in enumerate(items, start=1):
|
|
|
|
|
if i in selection:
|
|
|
|
|
filtered.append(item)
|
|
|
|
|
|
|
|
|
|
return filtered
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_line_selection(args: Sequence[str]) -> Set[int]:
|
|
|
|
|
"""Parse selection arguments to indices.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
args: Line numbers and ranges (1-indexed)
|
|
|
|
|
Examples: ["3"], ["1", "3", "5"], ["1-3"]
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Set of 0-indexed line numbers to select
|
|
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
|
ValueError: If selection is invalid
|
|
|
|
|
"""
|
|
|
|
|
selected_indices: Set[int] = set()
|
|
|
|
|
|
|
|
|
|
for arg in args:
|
|
|
|
|
arg = str(arg).strip()
|
|
|
|
|
|
|
|
|
|
# Check if it's a range (e.g., "1-3")
|
|
|
|
|
if '-' in arg and not arg.startswith('-'):
|
|
|
|
|
try:
|
|
|
|
|
parts = arg.split('-')
|
|
|
|
|
if len(parts) == 2:
|
|
|
|
|
start = int(parts[0]) - 1 # Convert to 0-indexed
|
|
|
|
|
end = int(parts[1]) # End is exclusive in range
|
|
|
|
|
for i in range(start, end):
|
|
|
|
|
selected_indices.add(i)
|
|
|
|
|
else:
|
|
|
|
|
raise ValueError(f"Invalid range format: {arg}")
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
raise ValueError(f"Invalid range: {arg}") from e
|
|
|
|
|
else:
|
|
|
|
|
# Single line number (1-indexed)
|
|
|
|
|
try:
|
|
|
|
|
line_num = int(arg)
|
|
|
|
|
idx = line_num - 1 # Convert to 0-indexed
|
|
|
|
|
selected_indices.add(idx)
|
|
|
|
|
except ValueError:
|
|
|
|
|
raise ValueError(f"Invalid line number: {arg}")
|
|
|
|
|
|
|
|
|
|
return selected_indices
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _validate_indices(selected_indices: Set[int], total_lines: int) -> List[str]:
|
|
|
|
|
"""Validate indices are within bounds.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
selected_indices: Set of 0-indexed line numbers
|
|
|
|
|
total_lines: Total number of available lines
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
List of error messages (empty if all valid)
|
|
|
|
|
"""
|
|
|
|
|
errors = []
|
|
|
|
|
for idx in selected_indices:
|
|
|
|
|
if idx < 0 or idx >= total_lines:
|
|
|
|
|
errors.append(f"Line {idx + 1} out of range (1-{total_lines})")
|
|
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _select_lines(lines: List[str], selected_indices: Set[int]) -> List[str]:
|
|
|
|
|
"""Select specific lines from input.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
lines: List of input lines
|
|
|
|
|
selected_indices: Set of 0-indexed line numbers to select
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
List of selected lines in order
|
|
|
|
|
"""
|
|
|
|
|
selected_indices_sorted = sorted(selected_indices)
|
|
|
|
|
return [lines[idx] for idx in selected_indices_sorted]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Keep helper references so static analyzers treat them as used in this module.
|
|
|
|
|
_SELECTION_HELPERS = (
|
|
|
|
|
_filter_items_by_selection,
|
|
|
|
|
_parse_line_selection,
|
|
|
|
|
_validate_indices,
|
|
|
|
|
_select_lines,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
"""Entry point for the CLI."""
|
|
|
|
|
app = _create_cmdlet_cli()
|
|
|
|
|
if app:
|
|
|
|
|
app()
|
|
|
|
|
else:
|
|
|
|
|
print("Typer not available")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
main()
|