Files
Medios-Macina/CLI.py
2025-12-20 23:57:44 -08:00

2598 lines
111 KiB
Python

from __future__ import annotations
"""Medeia-Macina CLI.
This module intentionally uses a class-based architecture:
- no legacy procedural entrypoints
- no compatibility shims
- all REPL/pipeline/cmdlet execution state lives on objects
"""
import atexit
import io
import json
import re
import shlex
import sys
import threading
import time
import uuid
from copy import deepcopy
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, TextIO, cast
import typer
from prompt_toolkit import PromptSession
from prompt_toolkit.completion import Completer, Completion
from prompt_toolkit.document import Document
from prompt_toolkit.lexers import Lexer
from prompt_toolkit.styles import Style
from rich_display import stderr_console, stdout_console
def _install_rich_traceback(*, show_locals: bool = False) -> None:
"""Install Rich traceback handler as the default excepthook.
This keeps uncaught exceptions readable in the terminal.
"""
try:
from rich.traceback import install as rich_traceback_install
rich_traceback_install(show_locals=bool(show_locals))
except Exception:
# Fall back to the standard Python traceback if Rich isn't available.
return
# Default to Rich tracebacks for the whole process.
_install_rich_traceback(show_locals=False)
from SYS.background_notifier import ensure_background_notifier
from SYS.logger import debug, set_debug
from SYS.worker_manager import WorkerManager
from cmdlet_catalog import (
ensure_registry_loaded,
get_cmdlet_arg_choices,
get_cmdlet_arg_flags,
get_cmdlet_metadata,
import_cmd_module,
list_cmdlet_metadata,
list_cmdlet_names,
)
from config import get_local_storage_path, load_config
from result_table import ResultTable
class SelectionSyntax:
"""Parses @ selection syntax into 1-based indices."""
_RANGE_RE = re.compile(r"^[0-9\-]+$")
@staticmethod
def parse(token: str) -> Optional[Set[int]]:
"""Return 1-based indices or None when not a concrete selection.
Concrete selections:
- @2
- @2-5
- @{1,3,5}
- @2,5,7-9
Special (non-concrete) selectors return None:
- @* (select all)
- @.. (history prev)
- @,, (history next)
"""
if not token or not token.startswith("@"):
return None
selector = token[1:].strip()
if selector in (".", ",", "*"):
return None
if selector.startswith("{") and selector.endswith("}"):
selector = selector[1:-1].strip()
indices: Set[int] = set()
for part in selector.split(","):
part = part.strip()
if not part:
continue
if "-" in part:
pieces = part.split("-", 1)
if len(pieces) != 2:
return None
start_str = pieces[0].strip()
end_str = pieces[1].strip()
if not start_str or not end_str:
return None
try:
start = int(start_str)
end = int(end_str)
except ValueError:
return None
if start <= 0 or end <= 0 or start > end:
return None
indices.update(range(start, end + 1))
continue
try:
value = int(part)
except ValueError:
return None
if value <= 0:
return None
indices.add(value)
return indices if indices else None
class WorkerOutputMirror(io.TextIOBase):
"""Mirror stdout/stderr to worker manager while preserving console output."""
def __init__(self, original: TextIO, manager: WorkerManager, worker_id: str, channel: str):
self._original = original
self._manager = manager
self._worker_id = worker_id
self._channel = channel
self._pending: str = ""
def write(self, data: str) -> int: # type: ignore[override]
if not data:
return 0
self._original.write(data)
self._buffer_text(data)
return len(data)
def flush(self) -> None: # type: ignore[override]
self._original.flush()
self._flush_pending(force=True)
def isatty(self) -> bool: # pragma: no cover
return bool(getattr(self._original, "isatty", lambda: False)())
def _buffer_text(self, data: str) -> None:
combined = self._pending + data
lines = combined.splitlines(keepends=True)
if not lines:
self._pending = combined
return
if lines[-1].endswith(("\n", "\r")):
complete = lines
self._pending = ""
else:
complete = lines[:-1]
self._pending = lines[-1]
for chunk in complete:
self._emit(chunk)
def _flush_pending(self, *, force: bool = False) -> None:
if self._pending and force:
self._emit(self._pending)
self._pending = ""
def _emit(self, text: str) -> None:
if not text:
return
try:
self._manager.append_stdout(self._worker_id, text, channel=self._channel)
except Exception:
pass
@property
def encoding(self) -> str: # type: ignore[override]
return getattr(self._original, "encoding", "utf-8")
class WorkerStageSession:
"""Lifecycle helper for wrapping a CLI cmdlet execution in a worker record."""
def __init__(
self,
*,
manager: WorkerManager,
worker_id: str,
orig_stdout: TextIO,
orig_stderr: TextIO,
stdout_proxy: WorkerOutputMirror,
stderr_proxy: WorkerOutputMirror,
config: Optional[Dict[str, Any]],
logging_enabled: bool,
completion_label: str,
error_label: str,
) -> None:
self.manager = manager
self.worker_id = worker_id
self.orig_stdout = orig_stdout
self.orig_stderr = orig_stderr
self.stdout_proxy = stdout_proxy
self.stderr_proxy = stderr_proxy
self.config = config
self.logging_enabled = logging_enabled
self.closed = False
self._completion_label = completion_label
self._error_label = error_label
def close(self, *, status: str = "completed", error_msg: str = "") -> None:
if self.closed:
return
try:
self.stdout_proxy.flush()
self.stderr_proxy.flush()
except Exception:
pass
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
if self.logging_enabled:
try:
self.manager.disable_logging_for_worker(self.worker_id)
except Exception:
pass
try:
if status == "completed":
self.manager.log_step(self.worker_id, self._completion_label)
else:
self.manager.log_step(self.worker_id, f"{self._error_label}: {error_msg or status}")
except Exception:
pass
try:
self.manager.finish_worker(self.worker_id, result=status or "completed", error_msg=error_msg or "")
except Exception:
pass
if self.config and self.config.get("_current_worker_id") == self.worker_id:
self.config.pop("_current_worker_id", None)
self.closed = True
class WorkerManagerRegistry:
"""Process-wide WorkerManager cache keyed by library_root."""
_manager: Optional[WorkerManager] = None
_manager_root: Optional[Path] = None
_orphan_cleanup_done: bool = False
_registered: bool = False
@classmethod
def ensure(cls, config: Dict[str, Any]) -> Optional[WorkerManager]:
if not isinstance(config, dict):
return None
existing = config.get("_worker_manager")
if isinstance(existing, WorkerManager):
return existing
library_root = get_local_storage_path(config)
if not library_root:
return None
try:
resolved_root = Path(library_root).resolve()
except Exception:
resolved_root = Path(library_root)
try:
if cls._manager is None or cls._manager_root != resolved_root:
if cls._manager is not None:
try:
cls._manager.close()
except Exception:
pass
cls._manager = WorkerManager(resolved_root, auto_refresh_interval=0.5)
cls._manager_root = resolved_root
manager = cls._manager
config["_worker_manager"] = manager
if manager is not None and not cls._orphan_cleanup_done:
try:
manager.expire_running_workers(
older_than_seconds=120,
worker_id_prefix="cli_%",
reason="CLI session ended unexpectedly; marking worker as failed",
)
except Exception:
pass
else:
cls._orphan_cleanup_done = True
if not cls._registered:
atexit.register(cls.close)
cls._registered = True
return manager
except Exception as exc:
print(f"[worker] Could not initialize worker manager: {exc}", file=sys.stderr)
return None
@classmethod
def close(cls) -> None:
if cls._manager is None:
return
try:
cls._manager.close()
except Exception:
pass
cls._manager = None
cls._manager_root = None
cls._orphan_cleanup_done = False
class WorkerStages:
"""Factory methods for stage/pipeline worker sessions."""
@staticmethod
def _start_worker_session(
worker_manager: Optional[WorkerManager],
*,
worker_type: str,
title: str,
description: str,
pipe_text: str,
config: Optional[Dict[str, Any]],
completion_label: str,
error_label: str,
skip_logging_for: Optional[Set[str]] = None,
session_worker_ids: Optional[Set[str]] = None,
) -> Optional[WorkerStageSession]:
if worker_manager is None:
return None
if skip_logging_for and worker_type in skip_logging_for:
return None
safe_type = worker_type or "cmd"
worker_id = f"cli_{safe_type[:8]}_{uuid.uuid4().hex[:6]}"
try:
tracked = worker_manager.track_worker(
worker_id,
worker_type=worker_type,
title=title,
description=description or "(no args)",
pipe=pipe_text,
)
if not tracked:
return None
except Exception as exc:
print(f"[worker] Failed to track {worker_type}: {exc}", file=sys.stderr)
return None
if session_worker_ids is not None:
session_worker_ids.add(worker_id)
logging_enabled = False
try:
handler = worker_manager.enable_logging_for_worker(worker_id)
logging_enabled = handler is not None
except Exception:
logging_enabled = False
orig_stdout = sys.stdout
orig_stderr = sys.stderr
stdout_proxy = WorkerOutputMirror(orig_stdout, worker_manager, worker_id, "stdout")
stderr_proxy = WorkerOutputMirror(orig_stderr, worker_manager, worker_id, "stderr")
sys.stdout = stdout_proxy
sys.stderr = stderr_proxy
if isinstance(config, dict):
config["_current_worker_id"] = worker_id
try:
worker_manager.log_step(worker_id, f"Started {worker_type}")
except Exception:
pass
return WorkerStageSession(
manager=worker_manager,
worker_id=worker_id,
orig_stdout=orig_stdout,
orig_stderr=orig_stderr,
stdout_proxy=stdout_proxy,
stderr_proxy=stderr_proxy,
config=config,
logging_enabled=logging_enabled,
completion_label=completion_label,
error_label=error_label,
)
@classmethod
def begin_stage(
cls,
worker_manager: Optional[WorkerManager],
*,
cmd_name: str,
stage_tokens: Sequence[str],
config: Optional[Dict[str, Any]],
command_text: str,
) -> Optional[WorkerStageSession]:
description = " ".join(stage_tokens[1:]) if len(stage_tokens) > 1 else "(no args)"
session_worker_ids = None
if isinstance(config, dict):
session_worker_ids = config.get("_session_worker_ids")
return cls._start_worker_session(
worker_manager,
worker_type=cmd_name,
title=f"{cmd_name} stage",
description=description,
pipe_text=command_text,
config=config,
completion_label="Stage completed",
error_label="Stage error",
skip_logging_for={".worker", "worker", "workers"},
session_worker_ids=session_worker_ids,
)
@classmethod
def begin_pipeline(
cls,
worker_manager: Optional[WorkerManager],
*,
pipeline_text: str,
config: Optional[Dict[str, Any]],
) -> Optional[WorkerStageSession]:
session_worker_ids: Set[str] = set()
if isinstance(config, dict):
config["_session_worker_ids"] = session_worker_ids
return cls._start_worker_session(
worker_manager,
worker_type="pipeline",
title="Pipeline run",
description=pipeline_text,
pipe_text=pipeline_text,
config=config,
completion_label="Pipeline completed",
error_label="Pipeline error",
session_worker_ids=session_worker_ids,
)
class CmdletIntrospection:
@staticmethod
def cmdlet_names() -> List[str]:
try:
return list_cmdlet_names() or []
except Exception:
return []
@staticmethod
def cmdlet_args(cmd_name: str) -> List[str]:
try:
return get_cmdlet_arg_flags(cmd_name) or []
except Exception:
return []
@staticmethod
def store_choices(config: Dict[str, Any]) -> List[str]:
try:
from Store import Store
storage = Store(config=config, suppress_debug=True)
return list(storage.list_backends() or [])
except Exception:
return []
@classmethod
def arg_choices(cls, *, cmd_name: str, arg_name: str, config: Dict[str, Any]) -> List[str]:
try:
normalized_arg = (arg_name or "").lstrip("-").strip().lower()
if normalized_arg in ("storage", "store"):
backends = cls.store_choices(config)
if backends:
return backends
if normalized_arg == "provider":
canonical_cmd = (cmd_name or "").replace("_", "-").lower()
try:
from ProviderCore.registry import list_search_providers, list_file_providers
except Exception:
list_search_providers = None # type: ignore
list_file_providers = None # type: ignore
provider_choices: List[str] = []
if canonical_cmd in {"search-provider"} and list_search_providers is not None:
providers = list_search_providers(config) or {}
available = [name for name, is_ready in providers.items() if is_ready]
return sorted(available) if available else sorted(providers.keys())
if canonical_cmd in {"add-file"} and list_file_providers is not None:
providers = list_file_providers(config) or {}
available = [name for name, is_ready in providers.items() if is_ready]
return sorted(available) if available else sorted(providers.keys())
if list_search_providers is not None:
providers = list_search_providers(config) or {}
available = [name for name, is_ready in providers.items() if is_ready]
provider_choices = sorted(available) if available else sorted(providers.keys())
try:
from Provider.metadata_provider import list_metadata_providers
meta_providers = list_metadata_providers(config) or {}
meta_available = [n for n, ready in meta_providers.items() if ready]
meta_choices = sorted(meta_available) if meta_available else sorted(meta_providers.keys())
except Exception:
meta_choices = []
merged = sorted(set(provider_choices + meta_choices))
if merged:
return merged
if normalized_arg == "scrape":
try:
from Provider.metadata_provider import list_metadata_providers
meta_providers = list_metadata_providers(config) or {}
if meta_providers:
return sorted(meta_providers.keys())
except Exception:
pass
return get_cmdlet_arg_choices(cmd_name, arg_name) or []
except Exception:
return []
class CmdletCompleter(Completer):
"""Prompt-toolkit completer for the Medeia cmdlet REPL."""
def __init__(self, *, config_loader: "ConfigLoader") -> None:
self._config_loader = config_loader
self.cmdlet_names = CmdletIntrospection.cmdlet_names()
@staticmethod
def _used_arg_logicals(cmd_name: str, stage_tokens: List[str]) -> Set[str]:
"""Return logical argument names already used in this cmdlet stage.
Example: if the user has typed `download-media -url ...`, then `url`
is considered used and should not be suggested again (even as `--url`).
"""
arg_flags = CmdletIntrospection.cmdlet_args(cmd_name)
allowed = {a.lstrip("-").strip().lower() for a in arg_flags if a}
if not allowed:
return set()
used: Set[str] = set()
for tok in stage_tokens[1:]:
if not tok or not tok.startswith("-"):
continue
if tok in {"-", "--"}:
continue
# Handle common `-arg=value` form.
raw = tok.split("=", 1)[0]
logical = raw.lstrip("-").strip().lower()
if logical and logical in allowed:
used.add(logical)
return used
def get_completions(self, document: Document, complete_event): # type: ignore[override]
text = document.text_before_cursor
tokens = text.split()
ends_with_space = bool(text) and text[-1].isspace()
last_pipe = -1
for idx, tok in enumerate(tokens):
if tok == "|":
last_pipe = idx
stage_tokens = tokens[last_pipe + 1 :] if last_pipe >= 0 else tokens
if not stage_tokens:
for cmd in self.cmdlet_names:
yield Completion(cmd, start_position=0)
return
if len(stage_tokens) == 1:
current = stage_tokens[0].lower()
if ends_with_space:
cmd_name = current.replace("_", "-")
if cmd_name == "help":
for cmd in self.cmdlet_names:
yield Completion(cmd, start_position=0)
return
if cmd_name not in self.cmdlet_names:
return
arg_names = CmdletIntrospection.cmdlet_args(cmd_name)
logical_seen: Set[str] = set()
for arg in arg_names:
arg_low = arg.lower()
if arg_low.startswith("--"):
continue
logical = arg.lstrip("-").lower()
if logical in logical_seen:
continue
yield Completion(arg, start_position=0)
logical_seen.add(logical)
yield Completion("-help", start_position=0)
return
for cmd in self.cmdlet_names:
if cmd.startswith(current):
yield Completion(cmd, start_position=-len(current))
for keyword in ("help", "exit", "quit"):
if keyword.startswith(current):
yield Completion(keyword, start_position=-len(current))
return
cmd_name = stage_tokens[0].replace("_", "-").lower()
if ends_with_space:
current_token = ""
prev_token = stage_tokens[-1].lower()
else:
current_token = stage_tokens[-1].lower()
prev_token = stage_tokens[-2].lower() if len(stage_tokens) > 1 else ""
config = self._config_loader.load()
choices = CmdletIntrospection.arg_choices(cmd_name=cmd_name, arg_name=prev_token, config=config)
if choices:
for choice in choices:
if choice.lower().startswith(current_token):
yield Completion(choice, start_position=-len(current_token))
return
arg_names = CmdletIntrospection.cmdlet_args(cmd_name)
used_logicals = self._used_arg_logicals(cmd_name, stage_tokens)
logical_seen: Set[str] = set()
for arg in arg_names:
arg_low = arg.lower()
prefer_single_dash = current_token in {"", "-"}
if prefer_single_dash and arg_low.startswith("--"):
continue
logical = arg.lstrip("-").lower()
if logical in used_logicals:
continue
if prefer_single_dash and logical in logical_seen:
continue
if arg_low.startswith(current_token):
yield Completion(arg, start_position=-len(current_token))
if prefer_single_dash:
logical_seen.add(logical)
if cmd_name in self.cmdlet_names:
if current_token.startswith("--"):
if "--help".startswith(current_token):
yield Completion("--help", start_position=-len(current_token))
else:
if "-help".startswith(current_token):
yield Completion("-help", start_position=-len(current_token))
class MedeiaLexer(Lexer):
def lex_document(self, document: Document): # type: ignore[override]
def get_line(lineno: int):
line = document.lines[lineno]
tokens: List[tuple[str, str]] = []
pattern = re.compile(
r"""
(\s+) | # 1. Whitespace
(\|) | # 2. Pipe
("(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*') | # 3. Quoted string
([^\s\|]+) # 4. Word
""",
re.VERBOSE,
)
is_cmdlet = True
def _emit_keyed_value(word: str) -> bool:
"""Emit `key:` prefixes (comma-separated) as argument tokens.
Designed for values like:
clip:3m4s-3m14s,1h22m-1h33m,item:2-3
Avoids special-casing URLs (://) and Windows drive paths (C:\\...).
Returns True if it handled the token.
"""
if not word or ":" not in word:
return False
# Avoid URLs and common scheme patterns.
if "://" in word:
return False
# Avoid Windows drive paths (e.g., C:\foo or D:/bar)
if re.match(r"^[A-Za-z]:[\\/]", word):
return False
key_prefix = re.compile(r"^([A-Za-z_][A-Za-z0-9_-]*:)(.*)$")
parts = word.split(",")
handled_any = False
for i, part in enumerate(parts):
if i > 0:
tokens.append(("class:value", ","))
if part == "":
continue
m = key_prefix.match(part)
if m:
tokens.append(("class:argument", m.group(1)))
if m.group(2):
tokens.append(("class:value", m.group(2)))
handled_any = True
else:
tokens.append(("class:value", part))
handled_any = True
return handled_any
for match in pattern.finditer(line):
ws, pipe, quote, word = match.groups()
if ws:
tokens.append(("", ws))
continue
if pipe:
tokens.append(("class:pipe", pipe))
is_cmdlet = True
continue
if quote:
# If the quoted token contains a keyed spec (clip:/item:/hash:),
# highlight the `key:` portion in argument-blue even inside quotes.
if len(quote) >= 2 and quote[0] == quote[-1] and quote[0] in ("\"", "'"):
q = quote[0]
inner = quote[1:-1]
start_index = len(tokens)
if _emit_keyed_value(inner):
# _emit_keyed_value already appended tokens for inner; insert opening quote
# before that chunk, then add the closing quote.
tokens.insert(start_index, ("class:string", q))
tokens.append(("class:string", q))
is_cmdlet = False
continue
tokens.append(("class:string", quote))
is_cmdlet = False
continue
if not word:
continue
if word.startswith("@"): # selection tokens
rest = word[1:]
if rest and re.fullmatch(r"[0-9\-\*,]+", rest):
tokens.append(("class:selection_at", "@"))
tokens.append(("class:selection_range", rest))
is_cmdlet = False
continue
if rest == "":
tokens.append(("class:selection_at", "@"))
is_cmdlet = False
continue
if is_cmdlet:
tokens.append(("class:cmdlet", word))
is_cmdlet = False
elif word.startswith("-"):
tokens.append(("class:argument", word))
else:
if not _emit_keyed_value(word):
tokens.append(("class:value", word))
return tokens
return get_line
class ConfigLoader:
def __init__(self, *, root: Path) -> None:
self._root = root
def load(self) -> Dict[str, Any]:
try:
return deepcopy(load_config(config_dir=self._root))
except Exception:
return {}
class CmdletHelp:
@staticmethod
def show_cmdlet_list() -> None:
try:
metadata = list_cmdlet_metadata() or {}
from rich.box import SIMPLE
from rich.panel import Panel
from rich.table import Table as RichTable
table = RichTable(show_header=True, header_style="bold", box=SIMPLE, expand=True)
table.add_column("Cmdlet", no_wrap=True)
table.add_column("Aliases")
table.add_column("Args")
table.add_column("Summary")
for cmd_name in sorted(metadata.keys()):
info = metadata[cmd_name]
aliases = info.get("aliases", [])
args = info.get("args", [])
summary = info.get("summary") or ""
alias_str = ", ".join([str(a) for a in (aliases or []) if str(a).strip()])
arg_names = [a.get("name") for a in (args or []) if isinstance(a, dict) and a.get("name")]
args_str = ", ".join([str(a) for a in arg_names if str(a).strip()])
table.add_row(str(cmd_name), alias_str, args_str, str(summary))
stdout_console().print(Panel(table, title="Cmdlets", expand=False))
except Exception as exc:
from rich.panel import Panel
from rich.text import Text
stderr_console().print(Panel(Text(f"Error: {exc}"), title="Error", expand=False))
@staticmethod
def show_cmdlet_help(cmd_name: str) -> None:
try:
meta = get_cmdlet_metadata(cmd_name)
if meta:
CmdletHelp._print_metadata(cmd_name, meta)
return
print(f"Unknown command: {cmd_name}\n")
except Exception as exc:
print(f"Error: {exc}\n")
@staticmethod
def _print_metadata(cmd_name: str, data: Any) -> None:
d = data.to_dict() if hasattr(data, "to_dict") else data
if not isinstance(d, dict):
from rich.panel import Panel
from rich.text import Text
stderr_console().print(Panel(Text(f"Invalid metadata for {cmd_name}"), title="Error", expand=False))
return
name = d.get("name", cmd_name)
summary = d.get("summary", "")
usage = d.get("usage", "")
description = d.get("description", "")
args = d.get("args", [])
details = d.get("details", [])
from rich.box import SIMPLE
from rich.console import Group
from rich.panel import Panel
from rich.table import Table as RichTable
from rich.text import Text
header = Text.assemble((str(name), "bold"))
synopsis = Text(str(usage or name))
stdout_console().print(Panel(Group(header, synopsis), title="Help", expand=False))
if summary or description:
desc_bits: List[Text] = []
if summary:
desc_bits.append(Text(str(summary)))
if description:
desc_bits.append(Text(str(description)))
stdout_console().print(Panel(Group(*desc_bits), title="Description", expand=False))
if args and isinstance(args, list):
param_table = RichTable(show_header=True, header_style="bold", box=SIMPLE, expand=True)
param_table.add_column("Arg", no_wrap=True)
param_table.add_column("Type", no_wrap=True)
param_table.add_column("Required", no_wrap=True)
param_table.add_column("Description")
for arg in args:
if isinstance(arg, dict):
name_str = arg.get("name", "?")
typ = arg.get("type", "string")
required = bool(arg.get("required", False))
desc = arg.get("description", "")
else:
name_str = getattr(arg, "name", "?")
typ = getattr(arg, "type", "string")
required = bool(getattr(arg, "required", False))
desc = getattr(arg, "description", "")
param_table.add_row(f"-{name_str}", str(typ), "yes" if required else "no", str(desc or ""))
stdout_console().print(Panel(param_table, title="Parameters", expand=False))
if details:
stdout_console().print(Panel(Group(*[Text(str(x)) for x in details]), title="Remarks", expand=False))
class CmdletExecutor:
def __init__(self, *, config_loader: ConfigLoader) -> None:
self._config_loader = config_loader
@staticmethod
def _get_table_title_for_command(
cmd_name: str,
emitted_items: Optional[List[Any]] = None,
cmd_args: Optional[List[str]] = None,
) -> str:
if cmd_name in ("search-provider", "search_provider") and cmd_args:
provider: str = ""
query: str = ""
tokens = [str(a) for a in (cmd_args or [])]
pos: List[str] = []
i = 0
while i < len(tokens):
low = tokens[i].lower()
if low in {"-provider", "--provider"} and i + 1 < len(tokens):
provider = str(tokens[i + 1]).strip()
i += 2
continue
if low in {"-query", "--query"} and i + 1 < len(tokens):
query = str(tokens[i + 1]).strip()
i += 2
continue
if low in {"-limit", "--limit"} and i + 1 < len(tokens):
i += 2
continue
if not str(tokens[i]).startswith("-"):
pos.append(str(tokens[i]))
i += 1
if not provider and pos:
provider = str(pos[0]).strip()
pos = pos[1:]
if not query and pos:
query = " ".join(pos).strip()
if provider and query:
provider_lower = provider.lower()
if provider_lower == "youtube":
provider_label = "Youtube"
elif provider_lower == "openlibrary":
provider_label = "OpenLibrary"
else:
provider_label = provider[:1].upper() + provider[1:]
return f"{provider_label}: {query}".strip().rstrip(":")
title_map = {
"search-file": "Results",
"search_file": "Results",
"download-data": "Downloads",
"download_data": "Downloads",
"get-tag": "Tags",
"get_tag": "Tags",
"get-file": "Results",
"get_file": "Results",
"add-tags": "Results",
"add_tags": "Results",
"delete-tag": "Results",
"delete_tag": "Results",
"add-url": "Results",
"add_url": "Results",
"get-url": "url",
"get_url": "url",
"delete-url": "Results",
"delete_url": "Results",
"get-note": "Notes",
"get_note": "Notes",
"add-note": "Results",
"add_note": "Results",
"delete-note": "Results",
"delete_note": "Results",
"get-relationship": "Relationships",
"get_relationship": "Relationships",
"add-relationship": "Results",
"add_relationship": "Results",
"add-file": "Results",
"add_file": "Results",
"delete-file": "Results",
"delete_file": "Results",
"get-metadata": None,
"get_metadata": None,
}
mapped = title_map.get(cmd_name, "Results")
if mapped is not None:
return mapped
if emitted_items:
first = emitted_items[0]
try:
if isinstance(first, dict) and first.get("title"):
return str(first.get("title"))
if hasattr(first, "title") and getattr(first, "title"):
return str(getattr(first, "title"))
except Exception:
pass
return "Results"
def execute(self, cmd_name: str, args: List[str]) -> None:
import pipeline as ctx
from cmdlet import REGISTRY
ensure_registry_loaded()
cmd_fn = REGISTRY.get(cmd_name)
if not cmd_fn:
# Lazy-import module and register its CMDLET.
try:
mod = import_cmd_module(cmd_name)
data = getattr(mod, "CMDLET", None) if mod else None
if data and hasattr(data, "exec") and callable(getattr(data, "exec")):
run_fn = getattr(data, "exec")
REGISTRY[cmd_name] = run_fn
cmd_fn = run_fn
except Exception:
cmd_fn = None
if not cmd_fn:
print(f"Unknown command: {cmd_name}\n")
return
config = self._config_loader.load()
filtered_args: List[str] = []
selected_indices: List[int] = []
select_all = False
value_flags: Set[str] = set()
try:
meta = get_cmdlet_metadata(cmd_name)
raw = meta.get("raw") if isinstance(meta, dict) else None
arg_specs = getattr(raw, "arg", None) if raw is not None else None
if isinstance(arg_specs, list):
for spec in arg_specs:
spec_type = str(getattr(spec, "type", "string") or "string").strip().lower()
if spec_type == "flag":
continue
spec_name = str(getattr(spec, "name", "") or "")
canonical = spec_name.lstrip("-").strip()
if not canonical:
continue
value_flags.add(f"-{canonical}".lower())
value_flags.add(f"--{canonical}".lower())
alias = str(getattr(spec, "alias", "") or "").strip()
if alias:
value_flags.add(f"-{alias}".lower())
except Exception:
value_flags = set()
for i, arg in enumerate(args):
if isinstance(arg, str) and arg.startswith("@"): # selection candidate
prev = str(args[i - 1]).lower() if i > 0 else ""
if prev in value_flags:
filtered_args.append(arg)
continue
if len(arg) >= 2 and arg[1] in {'"', "'"}:
filtered_args.append(arg[1:].strip("\"'"))
continue
if arg.strip() == "@*":
select_all = True
continue
selection = SelectionSyntax.parse(arg)
if selection is not None:
zero_based = sorted(idx - 1 for idx in selection)
for idx in zero_based:
if idx not in selected_indices:
selected_indices.append(idx)
continue
filtered_args.append(arg)
continue
filtered_args.append(str(arg))
piped_items = ctx.get_last_result_items()
result: Any = None
if piped_items:
if select_all:
result = piped_items
elif selected_indices:
result = [piped_items[idx] for idx in selected_indices if 0 <= idx < len(piped_items)]
else:
result = piped_items
worker_manager = WorkerManagerRegistry.ensure(config)
stage_session = WorkerStages.begin_stage(
worker_manager,
cmd_name=cmd_name,
stage_tokens=[cmd_name, *filtered_args],
config=config,
command_text=" ".join([cmd_name, *filtered_args]).strip() or cmd_name,
)
stage_worker_id = stage_session.worker_id if stage_session else None
pipeline_ctx = ctx.PipelineStageContext(stage_index=0, total_stages=1, worker_id=stage_worker_id)
ctx.set_stage_context(pipeline_ctx)
stage_status = "completed"
stage_error = ""
ctx.set_last_selection(selected_indices)
try:
try:
if hasattr(ctx, "set_current_cmdlet_name"):
ctx.set_current_cmdlet_name(cmd_name)
except Exception:
pass
try:
if hasattr(ctx, "set_current_stage_text"):
raw_stage = ""
try:
raw_stage = ctx.get_current_command_text("") if hasattr(ctx, "get_current_command_text") else ""
except Exception:
raw_stage = ""
if raw_stage:
ctx.set_current_stage_text(raw_stage)
else:
ctx.set_current_stage_text(" ".join([cmd_name, *filtered_args]).strip() or cmd_name)
except Exception:
pass
ret_code = cmd_fn(result, filtered_args, config)
if getattr(pipeline_ctx, "emits", None):
emits = list(pipeline_ctx.emits)
# Detect format-selection emits and skip printing (user selects with @N).
is_format_selection = False
if emits:
first_emit = emits[0]
if isinstance(first_emit, dict) and "format_id" in first_emit:
is_format_selection = True
if is_format_selection:
ctx.set_last_result_items_only(emits)
else:
table_title = self._get_table_title_for_command(cmd_name, emits, filtered_args)
selectable_commands = {
"search-file",
"download-data",
"download-media",
"search_file",
"download_data",
"download_media",
".config",
".worker",
}
display_only_commands = {
"get-url",
"get_url",
"get-note",
"get_note",
"get-relationship",
"get_relationship",
"get-file",
"get_file",
}
self_managing_commands = {
"get-tag",
"get_tag",
"tags",
"search-file",
"search_file",
"search-provider",
"search_provider",
"search-store",
"search_store",
}
if cmd_name in self_managing_commands:
table = ctx.get_last_result_table()
if table is None:
table = ResultTable(table_title)
for emitted in emits:
table.add_result(emitted)
else:
table = ResultTable(table_title)
for emitted in emits:
table.add_result(emitted)
if cmd_name in selectable_commands:
table.set_source_command(cmd_name, filtered_args)
ctx.set_last_result_table(table, emits)
ctx.set_current_stage_table(None)
elif cmd_name in display_only_commands:
ctx.set_last_result_items_only(emits)
else:
ctx.set_last_result_items_only(emits)
stdout_console().print()
stdout_console().print(table)
if ret_code != 0:
stage_status = "failed"
stage_error = f"exit code {ret_code}"
print(f"[exit code: {ret_code}]\n")
except Exception as exc:
stage_status = "failed"
stage_error = f"{type(exc).__name__}: {exc}"
print(f"[error] {type(exc).__name__}: {exc}\n")
finally:
try:
if hasattr(ctx, "clear_current_cmdlet_name"):
ctx.clear_current_cmdlet_name()
except Exception:
pass
try:
if hasattr(ctx, "clear_current_stage_text"):
ctx.clear_current_stage_text()
except Exception:
pass
ctx.clear_last_selection()
if stage_session:
stage_session.close(status=stage_status, error_msg=stage_error)
class PipelineExecutor:
def __init__(self, *, config_loader: ConfigLoader) -> None:
self._config_loader = config_loader
self._toolbar_output: Optional[Callable[[str], None]] = None
def set_toolbar_output(self, output: Optional[Callable[[str], None]]) -> None:
self._toolbar_output = output
@staticmethod
def _split_stages(tokens: Sequence[str]) -> List[List[str]]:
stages: List[List[str]] = []
current: List[str] = []
for token in tokens:
if token == "|":
if current:
stages.append(current)
current = []
else:
current.append(token)
if current:
stages.append(current)
return stages
def execute_tokens(self, tokens: List[str]) -> None:
from cmdlet import REGISTRY
import pipeline as ctx
try:
stages = self._split_stages(tokens)
if not stages:
print("Invalid pipeline syntax\n")
return
pending_tail = ctx.get_pending_pipeline_tail() if hasattr(ctx, "get_pending_pipeline_tail") else []
pending_source = ctx.get_pending_pipeline_source() if hasattr(ctx, "get_pending_pipeline_source") else None
if hasattr(ctx, "get_current_stage_table") and not ctx.get_current_stage_table():
display_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
if display_table:
ctx.set_current_stage_table(display_table)
else:
last_table = ctx.get_last_result_table() if hasattr(ctx, "get_last_result_table") else None
if last_table:
ctx.set_current_stage_table(last_table)
current_source = (
ctx.get_current_stage_table_source_command() if hasattr(ctx, "get_current_stage_table_source_command") else None
)
effective_source = current_source or (
ctx.get_last_result_table_source_command() if hasattr(ctx, "get_last_result_table_source_command") else None
)
selection_only = len(stages) == 1 and stages[0] and stages[0][0].startswith("@")
if pending_tail and selection_only:
if (pending_source is None) or (effective_source and pending_source == effective_source):
stages.extend(pending_tail)
if hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail()
elif hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail()
config = self._config_loader.load()
if isinstance(config, dict):
config["_quiet_background_output"] = True
def _resolve_items_for_selection(table_obj, items_list):
return items_list if items_list else []
def _maybe_run_class_selector(selected_items: list, *, stage_is_last: bool) -> bool:
if not stage_is_last:
return False
candidates: list[str] = []
seen: set[str] = set()
def _add(value) -> None:
try:
text = str(value or "").strip().lower()
except Exception:
return
if not text or text in seen:
return
seen.add(text)
candidates.append(text)
try:
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
_add(current_table.table if current_table and hasattr(current_table, "table") else None)
except Exception:
pass
for item in selected_items or []:
if isinstance(item, dict):
_add(item.get("provider"))
_add(item.get("store"))
_add(item.get("table"))
else:
_add(getattr(item, "provider", None))
_add(getattr(item, "store", None))
_add(getattr(item, "table", None))
try:
from ProviderCore.registry import get_provider
except Exception:
get_provider = None # type: ignore
if get_provider is not None:
for key in candidates:
try:
provider = get_provider(key, config)
except Exception:
continue
selector = getattr(provider, "selector", None)
if selector is None:
continue
try:
handled = bool(selector(selected_items, ctx=ctx, stage_is_last=True))
except Exception as exc:
print(f"{key} selector failed: {exc}\n")
return True
if handled:
return True
store_keys: list[str] = []
for item in selected_items or []:
if isinstance(item, dict):
v = item.get("store")
else:
v = getattr(item, "store", None)
name = str(v or "").strip()
if name:
store_keys.append(name)
if store_keys:
try:
from Store.registry import Store as StoreRegistry
store_registry = StoreRegistry(config, suppress_debug=True)
_backend_names = list(store_registry.list_backends() or [])
_backend_by_lower = {str(n).lower(): str(n) for n in _backend_names if str(n).strip()}
for name in store_keys:
resolved_name = name
if not store_registry.is_available(resolved_name):
resolved_name = _backend_by_lower.get(str(name).lower(), name)
if not store_registry.is_available(resolved_name):
continue
backend = store_registry[resolved_name]
selector = getattr(backend, "selector", None)
if selector is None:
continue
handled = bool(selector(selected_items, ctx=ctx, stage_is_last=True))
if handled:
return True
except Exception:
pass
return False
first_stage_tokens = stages[0] if stages else []
first_stage_selection_indices: List[int] = []
first_stage_had_extra_args = False
first_stage_select_all = False
if first_stage_tokens:
new_first_stage: List[str] = []
for token in first_stage_tokens:
if token.startswith("@"): # selection
selection = SelectionSyntax.parse(token)
if selection is not None:
first_stage_selection_indices = sorted([i - 1 for i in selection])
continue
if token == "@*":
first_stage_select_all = True
continue
new_first_stage.append(token)
if new_first_stage:
stages[0] = new_first_stage
if first_stage_selection_indices or first_stage_select_all:
first_stage_had_extra_args = True
elif first_stage_selection_indices or first_stage_select_all:
stages.pop(0)
if first_stage_select_all:
last_items = ctx.get_last_result_items()
if last_items:
first_stage_selection_indices = list(range(len(last_items)))
piped_result: Any = None
worker_manager = WorkerManagerRegistry.ensure(config)
pipeline_text = " | ".join(" ".join(stage) for stage in stages)
pipeline_session = WorkerStages.begin_pipeline(worker_manager, pipeline_text=pipeline_text, config=config)
raw_stage_texts: List[str] = []
try:
if hasattr(ctx, "get_current_command_stages"):
raw_stage_texts = ctx.get_current_command_stages() or []
except Exception:
raw_stage_texts = []
if pipeline_session and worker_manager and isinstance(config, dict):
session_worker_ids = config.get("_session_worker_ids")
if session_worker_ids:
try:
output_fn = self._toolbar_output
quiet_mode = bool(config.get("_quiet_background_output"))
terminal_only = quiet_mode and not output_fn
kwargs: Dict[str, Any] = {
"session_worker_ids": session_worker_ids,
"only_terminal_updates": terminal_only,
"overlay_mode": bool(output_fn),
}
if output_fn:
kwargs["output"] = output_fn
ensure_background_notifier(worker_manager, **kwargs)
except Exception:
pass
pipeline_status = "completed"
pipeline_error = ""
try:
if first_stage_selection_indices:
if not ctx.get_current_stage_table_source_command():
display_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
table_for_stage = display_table or ctx.get_last_result_table()
if table_for_stage:
ctx.set_current_stage_table(table_for_stage)
source_cmd = ctx.get_current_stage_table_source_command()
source_args_raw = ctx.get_current_stage_table_source_args()
if isinstance(source_args_raw, str):
source_args: List[str] = [source_args_raw]
elif isinstance(source_args_raw, list):
source_args = [str(x) for x in source_args_raw if x is not None]
else:
source_args = []
current_table = ctx.get_current_stage_table()
table_type = current_table.table if current_table and hasattr(current_table, "table") else None
command_expanded = False
if table_type in {"youtube", "soulseek"}:
command_expanded = False
elif source_cmd == "search-file" and source_args and "youtube" in source_args:
command_expanded = False
else:
selected_row_args: List[str] = []
skip_pipe_expansion = source_cmd == ".pipe" and len(stages) > 0
if source_cmd and not skip_pipe_expansion:
for idx in first_stage_selection_indices:
row_args = ctx.get_current_stage_table_row_selection_args(idx)
if row_args:
selected_row_args.extend(row_args)
break
if selected_row_args:
if isinstance(source_cmd, list):
cmd_list: List[str] = [str(x) for x in source_cmd if x is not None]
elif isinstance(source_cmd, str):
cmd_list = [source_cmd]
else:
cmd_list = []
expanded_stage: List[str] = cmd_list + source_args + selected_row_args
if first_stage_had_extra_args and stages:
expanded_stage += stages[0]
stages[0] = expanded_stage
else:
stages.insert(0, expanded_stage)
if pipeline_session and worker_manager:
try:
worker_manager.log_step(
pipeline_session.worker_id,
f"@N expansion: {source_cmd} + {' '.join(str(x) for x in selected_row_args)}",
)
except Exception:
pass
first_stage_selection_indices = []
command_expanded = True
if not command_expanded and first_stage_selection_indices:
last_piped_items = ctx.get_last_result_items()
stage_table = ctx.get_current_stage_table()
if not stage_table and hasattr(ctx, "get_display_table"):
stage_table = ctx.get_display_table()
if not stage_table:
stage_table = ctx.get_last_result_table()
resolved_items = _resolve_items_for_selection(stage_table, last_piped_items)
if last_piped_items:
filtered = [
resolved_items[i]
for i in first_stage_selection_indices
if 0 <= i < len(resolved_items)
]
if not filtered:
print("No items matched selection in pipeline\n")
return
if _maybe_run_class_selector(filtered, stage_is_last=(not stages)):
return
from cmdlet._shared import coerce_to_pipe_object
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
piped_result = filtered_pipe_objs if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
if pipeline_session and worker_manager:
try:
selection_parts = [f"@{i+1}" for i in first_stage_selection_indices]
worker_manager.log_step(
pipeline_session.worker_id,
f"Applied @N selection {' | '.join(selection_parts)}",
)
except Exception:
pass
# Auto-insert downloader stages for provider tables.
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
table_type = current_table.table if current_table and hasattr(current_table, "table") else None
if not stages:
if table_type == "youtube":
print("Auto-running YouTube selection via download-media")
stages.append(["download-media"])
elif table_type == "bandcamp":
print("Auto-running Bandcamp selection via download-media")
stages.append(["download-media"])
elif table_type in {"soulseek", "openlibrary", "libgen"}:
print("Auto-piping selection to download-file")
stages.append(["download-file"])
else:
first_cmd = stages[0][0] if stages and stages[0] else None
if table_type == "soulseek" and first_cmd not in (
"download-file",
"download-media",
"download_media",
".pipe",
):
print("Auto-inserting download-file after Soulseek selection")
stages.insert(0, ["download-file"])
if table_type == "youtube" and first_cmd not in (
"download-media",
"download_media",
"download-file",
".pipe",
):
print("Auto-inserting download-media after YouTube selection")
stages.insert(0, ["download-media"])
if table_type == "bandcamp" and first_cmd not in (
"download-media",
"download_media",
"download-file",
".pipe",
):
print("Auto-inserting download-media after Bandcamp selection")
stages.insert(0, ["download-media"])
if table_type == "libgen" and first_cmd not in (
"download-file",
"download-media",
"download_media",
".pipe",
):
print("Auto-inserting download-file after Libgen selection")
stages.insert(0, ["download-file"])
else:
print("No previous results to select from\n")
return
for stage_index, stage_tokens in enumerate(stages):
if not stage_tokens:
continue
cmd_name = stage_tokens[0].replace("_", "-").lower()
stage_args = stage_tokens[1:]
if cmd_name == "@":
subject = ctx.get_last_result_subject()
if subject is None:
print("No current result context available for '@'\n")
pipeline_status = "failed"
pipeline_error = "No result subject for @"
return
piped_result = subject
try:
subject_items = subject if isinstance(subject, list) else [subject]
ctx.set_last_items(subject_items)
except Exception:
pass
if pipeline_session and worker_manager:
try:
worker_manager.log_step(pipeline_session.worker_id, "@ used current table subject")
except Exception:
pass
continue
if cmd_name.startswith("@"): # selection stage
selection = SelectionSyntax.parse(cmd_name)
is_select_all = cmd_name == "@*"
if selection is None and not is_select_all:
print(f"Invalid selection: {cmd_name}\n")
pipeline_status = "failed"
pipeline_error = f"Invalid selection {cmd_name}"
return
selected_indices = []
if is_select_all:
last_items = ctx.get_last_result_items() or []
selected_indices = list(range(len(last_items)))
else:
selected_indices = sorted([i - 1 for i in selection]) # type: ignore[arg-type]
stage_table = ctx.get_current_stage_table()
if not stage_table and hasattr(ctx, "get_display_table"):
stage_table = ctx.get_display_table()
if not stage_table:
stage_table = ctx.get_last_result_table()
items_list = ctx.get_last_result_items() or []
resolved_items = _resolve_items_for_selection(stage_table, items_list)
filtered = [resolved_items[i] for i in selected_indices if 0 <= i < len(resolved_items)]
if not filtered:
print("No items matched selection\n")
pipeline_status = "failed"
pipeline_error = "Empty selection"
return
if _maybe_run_class_selector(filtered, stage_is_last=(stage_index + 1 >= len(stages))):
return
# Special case: selecting multiple tags from get-tag and piping into delete-tag
# should batch into a single operation (one backend call).
next_cmd = None
try:
if stage_index + 1 < len(stages) and stages[stage_index + 1]:
next_cmd = str(stages[stage_index + 1][0]).replace("_", "-").lower()
except Exception:
next_cmd = None
def _is_tag_row(obj: Any) -> bool:
try:
if hasattr(obj, "__class__") and obj.__class__.__name__ == "TagItem" and hasattr(obj, "tag_name"):
return True
except Exception:
pass
try:
if isinstance(obj, dict) and obj.get("tag_name"):
return True
except Exception:
pass
return False
if next_cmd in {"delete-tag", "delete_tag"} and len(filtered) > 1 and all(_is_tag_row(x) for x in filtered):
from cmdlet._shared import get_field
tags: List[str] = []
first_hash = None
first_store = None
first_path = None
for item in filtered:
tag_name = get_field(item, "tag_name")
if tag_name:
tags.append(str(tag_name))
if first_hash is None:
first_hash = get_field(item, "hash")
if first_store is None:
first_store = get_field(item, "store")
if first_path is None:
first_path = get_field(item, "path") or get_field(item, "target")
if tags:
grouped = {
"table": "tag.selection",
"media_kind": "tag",
"hash": first_hash,
"store": first_store,
"path": first_path,
"tag": tags,
}
piped_result = grouped
continue
from cmdlet._shared import coerce_to_pipe_object
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
piped_result = filtered_pipe_objs if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
table_type = current_table.table if current_table and hasattr(current_table, "table") else None
if table_type == "youtube" and stage_index + 1 >= len(stages):
print("Auto-running YouTube selection via download-media")
stages.append(["download-media", *stage_args])
continue
ensure_registry_loaded()
cmd_fn = REGISTRY.get(cmd_name)
if not cmd_fn:
print(f"Unknown command: {cmd_name}\n")
pipeline_status = "failed"
pipeline_error = f"Unknown command: {cmd_name}"
return
stage_session = WorkerStages.begin_stage(
worker_manager,
cmd_name=cmd_name,
stage_tokens=stage_tokens,
config=config,
command_text=" ".join(stage_tokens).strip(),
)
stage_worker_id = stage_session.worker_id if stage_session else None
pipeline_ctx = ctx.PipelineStageContext(
stage_index=stage_index,
total_stages=len(stages),
worker_id=stage_worker_id,
)
ctx.set_stage_context(pipeline_ctx)
stage_status = "completed"
stage_error = ""
stage_label = f"stage {stage_index + 1}/{len(stages)} ({cmd_name})"
try:
# Avoid leaking interactive selection tables across stages.
# (Selection/expansion happens before this loop, so clearing here is safe.)
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
try:
if hasattr(ctx, "set_current_cmdlet_name"):
ctx.set_current_cmdlet_name(cmd_name)
except Exception:
pass
try:
if hasattr(ctx, "set_current_stage_text"):
stage_text = ""
if raw_stage_texts and stage_index < len(raw_stage_texts):
candidate = str(raw_stage_texts[stage_index] or "").strip()
if candidate:
try:
cand_tokens = shlex.split(candidate)
except Exception:
cand_tokens = candidate.split()
if cand_tokens:
first = str(cand_tokens[0]).replace("_", "-").lower()
if first == cmd_name:
stage_text = candidate
if not stage_text:
stage_text = " ".join(stage_tokens).strip()
ctx.set_current_stage_text(stage_text)
except Exception:
pass
ret_code = cmd_fn(piped_result, list(stage_args), config)
stage_is_last = stage_index + 1 >= len(stages)
emits: List[Any] = []
if getattr(pipeline_ctx, "emits", None) is not None:
emits = list(pipeline_ctx.emits or [])
if emits:
# If the cmdlet already installed an overlay table (e.g. get-tag),
# don't overwrite it: set_last_result_items_only() would clear the
# overlay table/subject and break '@' subject piping.
try:
has_overlay = bool(ctx.get_display_table()) if hasattr(ctx, "get_display_table") else False
except Exception:
has_overlay = False
if not has_overlay:
ctx.set_last_result_items_only(emits)
piped_result = emits
else:
piped_result = None
# Some cmdlets (notably download-media format selection) populate a selectable
# current-stage table without emitting pipeline items. In these cases, render
# the table and pause the pipeline so the user can pick @N.
stage_table = ctx.get_current_stage_table() if hasattr(ctx, "get_current_stage_table") else None
stage_table_type = str(getattr(stage_table, "table", "") or "").strip().lower() if stage_table else ""
if (
(not stage_is_last)
and (not emits)
and cmd_name in {"download-media", "download_media"}
and stage_table is not None
and stage_table_type in {"ytdlp.formatlist", "download-media", "download_media"}
):
try:
is_selectable = not bool(getattr(stage_table, "no_choice", False))
except Exception:
is_selectable = True
if is_selectable:
try:
already_rendered = bool(getattr(stage_table, "_rendered_by_cmdlet", False))
except Exception:
already_rendered = False
if not already_rendered:
stdout_console().print()
stdout_console().print(stage_table)
try:
remaining = stages[stage_index + 1 :]
source_cmd = (
ctx.get_current_stage_table_source_command()
if hasattr(ctx, "get_current_stage_table_source_command")
else None
)
if remaining and hasattr(ctx, "set_pending_pipeline_tail"):
ctx.set_pending_pipeline_tail(remaining, source_command=source_cmd or cmd_name)
except Exception:
pass
return
# For the final stage, many cmdlets rely on the runner to render the
# table they placed into pipeline context (e.g. get-tag). Prefer a
# display table if one exists, otherwise the current-stage table.
if stage_is_last:
final_table = None
try:
final_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
except Exception:
final_table = None
if final_table is None:
final_table = stage_table
if final_table is not None:
try:
already_rendered = bool(getattr(final_table, "_rendered_by_cmdlet", False))
except Exception:
already_rendered = False
if not already_rendered:
stdout_console().print()
stdout_console().print(final_table)
# Fallback: if a cmdlet emitted results but did not provide a table,
# render a standard ResultTable so last-stage pipelines still show output.
if final_table is None and emits:
try:
table_title = CmdletExecutor._get_table_title_for_command(cmd_name, emits, list(stage_args))
except Exception:
table_title = "Results"
table = ResultTable(table_title)
for item in emits:
table.add_result(item)
stdout_console().print()
stdout_console().print(table)
if isinstance(ret_code, int) and ret_code != 0:
stage_status = "failed"
stage_error = f"exit code {ret_code}"
print(f"[{stage_label} exit code: {ret_code}]\n")
pipeline_status = "failed"
pipeline_error = f"{stage_label} failed ({stage_error})"
return
except Exception as exc:
stage_status = "failed"
stage_error = f"{type(exc).__name__}: {exc}"
print(f"[error in {stage_label}]: {stage_error}\n")
pipeline_status = "failed"
pipeline_error = f"{stage_label} error: {exc}"
return
finally:
try:
if hasattr(ctx, "clear_current_cmdlet_name"):
ctx.clear_current_cmdlet_name()
except Exception:
pass
try:
if hasattr(ctx, "clear_current_stage_text"):
ctx.clear_current_stage_text()
except Exception:
pass
if stage_session:
stage_session.close(status=stage_status, error_msg=stage_error)
elif pipeline_session and worker_manager:
try:
worker_manager.log_step(
pipeline_session.worker_id,
f"{stage_label} {'completed' if stage_status == 'completed' else 'failed'}",
)
except Exception:
pass
if not stages and piped_result is not None:
table = ResultTable("Selection Result")
items = piped_result if isinstance(piped_result, list) else [piped_result]
for item in items:
table.add_result(item)
ctx.set_last_result_items_only(items)
stdout_console().print()
stdout_console().print(table)
except Exception as exc:
pipeline_status = "failed"
pipeline_error = str(exc)
print(f"[error] Failed to execute pipeline: {exc}\n")
finally:
if pipeline_session:
pipeline_session.close(status=pipeline_status, error_msg=pipeline_error)
except Exception as exc:
print(f"[error] Failed to execute pipeline: {exc}\n")
Welcome = """
# MEDIOS-MACINA
Rich can do a pretty *decent* job of rendering markdown.
1. This is a list item
2. This is another list item
"""
from rich.markdown import Markdown
from rich.console import Console
console = Console()
md = Markdown(Welcome)
console.print(md)
class MedeiaCLI:
"""Main CLI application object."""
ROOT = Path(__file__).resolve().parent
def __init__(self) -> None:
self._config_loader = ConfigLoader(root=self.ROOT)
self._cmdlet_executor = CmdletExecutor(config_loader=self._config_loader)
self._pipeline_executor = PipelineExecutor(config_loader=self._config_loader)
@staticmethod
def parse_selection_syntax(token: str) -> Optional[Set[int]]:
return SelectionSyntax.parse(token)
@classmethod
def get_store_choices(cls) -> List[str]:
loader = ConfigLoader(root=cls.ROOT)
return CmdletIntrospection.store_choices(loader.load())
def build_app(self) -> typer.Typer:
app = typer.Typer(help="Medeia-Macina CLI")
def _complete_search_provider(ctx, param, incomplete: str): # pragma: no cover
try:
from click.shell_completion import CompletionItem
except Exception:
return []
try:
from ProviderCore.registry import list_search_providers
providers = list_search_providers(self._config_loader.load()) or {}
available = [n for n, ok in providers.items() if ok]
choices = sorted(available) if available else sorted(providers.keys())
except Exception:
choices = []
inc = (incomplete or "").lower()
return [CompletionItem(name) for name in choices if name and name.lower().startswith(inc)]
@app.command("search-provider")
def search_provider(
provider: str = typer.Option(
..., "--provider", "-p", help="Provider name (bandcamp, libgen, soulseek, youtube)", shell_complete=_complete_search_provider
),
query: str = typer.Argument(..., help="Search query (quote for spaces)"),
limit: int = typer.Option(36, "--limit", "-l", help="Maximum results to return"),
) -> None:
self._cmdlet_executor.execute("search-provider", ["-provider", provider, query, "-limit", str(limit)])
@app.command("pipeline")
def pipeline(
command: str = typer.Option(..., "--pipeline", "-p", help="Pipeline command string to execute"),
seeds_json: Optional[str] = typer.Option(None, "--seeds-json", "-s", help="JSON string of seed items"),
) -> None:
import pipeline as ctx
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
set_debug(debug_enabled)
if seeds_json:
try:
seeds = json.loads(seeds_json)
if not isinstance(seeds, list):
seeds = [seeds]
ctx.set_last_result_items_only(seeds)
except Exception as exc:
print(f"Error parsing seeds JSON: {exc}")
return
try:
from cli_syntax import validate_pipeline_text
syntax_error = validate_pipeline_text(command)
if syntax_error:
print(syntax_error.message, file=sys.stderr)
return
except Exception:
pass
try:
tokens = shlex.split(command)
except ValueError as exc:
print(f"Syntax error: {exc}", file=sys.stderr)
return
if not tokens:
return
self._pipeline_executor.execute_tokens(tokens)
@app.command("repl")
def repl() -> None:
self.run_repl()
@app.callback(invoke_without_command=True)
def main_callback(ctx: typer.Context) -> None:
if ctx.invoked_subcommand is None:
self.run_repl()
_ = (search_provider, pipeline, repl, main_callback)
return app
def run(self) -> None:
# Ensure Rich tracebacks are active even when invoking subcommands.
try:
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False)) if isinstance(config, dict) else False
except Exception:
debug_enabled = False
set_debug(debug_enabled)
_install_rich_traceback(show_locals=debug_enabled)
self.build_app()()
def run_repl(self) -> None:
# (Startup banner is optional; keep the REPL quiet by default.)
prompt_text = "🜂🜄🜁🜃|"
startup_table = ResultTable(
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
)
startup_table.set_no_choice(True).set_preserve_order(True)
startup_table.set_value_case("upper")
def _upper(value: Any) -> str:
text = "" if value is None else str(value)
return text.upper()
def _add_startup_check(
status: str,
name: str,
*,
provider: str = "",
store: str = "",
files: int | str | None = None,
detail: str = "",
) -> None:
row = startup_table.add_row()
row.add_column("STATUS", _upper(status))
row.add_column("NAME", _upper(name))
row.add_column("PROVIDER", _upper(provider or ""))
row.add_column("STORE", _upper(store or ""))
row.add_column("FILES", "" if files is None else str(files))
row.add_column("DETAIL", _upper(detail or ""))
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
store_cfg = cfg.get("store")
if not isinstance(store_cfg, dict):
return False
bucket = store_cfg.get(subtype)
if not isinstance(bucket, dict):
return False
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
def _has_provider(cfg: dict, name: str) -> bool:
provider_cfg = cfg.get("provider")
if not isinstance(provider_cfg, dict):
return False
block = provider_cfg.get(str(name).strip().lower())
return isinstance(block, dict) and bool(block)
def _ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
try:
from API.HTTP import HTTPClient
with HTTPClient(timeout=timeout, retries=1) as client:
resp = client.get(url, allow_redirects=True)
code = int(getattr(resp, "status_code", 0) or 0)
ok = 200 <= code < 500
return ok, f"{url} (HTTP {code})"
except Exception as exc:
return False, f"{url} ({type(exc).__name__})"
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
set_debug(debug_enabled)
_install_rich_traceback(show_locals=debug_enabled)
_add_startup_check("ENABLED" if debug_enabled else "DISABLED", "DEBUGGING")
try:
try:
from MPV.mpv_ipc import MPV
import shutil
MPV()
mpv_path = shutil.which("mpv")
_add_startup_check("ENABLED", "MPV", detail=mpv_path or "Available")
except Exception as exc:
_add_startup_check("DISABLED", "MPV", detail=str(exc))
store_registry = None
if config:
try:
from Store import Store as StoreRegistry
store_registry = StoreRegistry(config=config, suppress_debug=True)
except Exception:
store_registry = None
if _has_store_subtype(config, "hydrusnetwork"):
store_cfg = config.get("store")
hydrus_cfg = store_cfg.get("hydrusnetwork", {}) if isinstance(store_cfg, dict) else {}
if isinstance(hydrus_cfg, dict):
for instance_name, instance_cfg in hydrus_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
url_val = str(instance_cfg.get("URL") or "").strip()
ok = bool(store_registry and store_registry.is_available(name_key))
status = "ENABLED" if ok else "DISABLED"
if ok:
total = None
try:
if store_registry:
backend = store_registry[name_key]
total = getattr(backend, "total_count", None)
if total is None:
getter = getattr(backend, "get_total_count", None)
if callable(getter):
total = getter()
except Exception:
total = None
detail = url_val
files = total if isinstance(total, int) and total >= 0 else None
else:
err = None
if store_registry:
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
detail = (url_val + (" - " if url_val else "")) + (err or "Unavailable")
files = None
_add_startup_check(status, name_key, store="hydrusnetwork", files=files, detail=detail)
provider_cfg = config.get("provider") if isinstance(config, dict) else None
if isinstance(provider_cfg, dict) and provider_cfg:
from Provider.metadata_provider import list_metadata_providers
from ProviderCore.registry import list_file_providers, list_providers, list_search_providers
provider_availability = list_providers(config) or {}
search_availability = list_search_providers(config) or {}
file_availability = list_file_providers(config) or {}
meta_availability = list_metadata_providers(config) or {}
def _provider_display_name(key: str) -> str:
k = (key or "").strip()
low = k.lower()
if low == "openlibrary":
return "OpenLibrary"
if low == "alldebrid":
return "AllDebrid"
if low == "youtube":
return "YouTube"
return k[:1].upper() + k[1:] if k else "Provider"
already_checked = {"matrix"}
def _default_provider_ping_targets(provider_key: str) -> list[str]:
prov = (provider_key or "").strip().lower()
if prov == "openlibrary":
return ["https://openlibrary.org"]
if prov == "youtube":
return ["https://www.youtube.com"]
if prov == "bandcamp":
return ["https://bandcamp.com"]
if prov == "libgen":
from Provider.libgen import MIRRORS
mirrors = [str(x).rstrip("/") for x in (MIRRORS or []) if str(x).strip()]
return [m + "/json.php" for m in mirrors]
return []
def _ping_first(urls: list[str]) -> tuple[bool, str]:
for u in urls:
ok, detail = _ping_url(u)
if ok:
return True, detail
if urls:
ok, detail = _ping_url(urls[0])
return ok, detail
return False, "No ping target"
for provider_name in provider_cfg.keys():
prov = str(provider_name or "").strip().lower()
if not prov or prov in already_checked:
continue
display = _provider_display_name(prov)
if prov == "alldebrid":
try:
from Provider.alldebrid import _get_debrid_api_key
from API.alldebrid import AllDebridClient
api_key = _get_debrid_api_key(config)
if not api_key:
_add_startup_check("DISABLED", display, provider=prov, detail="Not configured")
else:
client = AllDebridClient(api_key)
base_url = str(getattr(client, "base_url", "") or "").strip()
_add_startup_check("ENABLED", display, provider=prov, detail=base_url or "Connected")
except Exception as exc:
_add_startup_check("DISABLED", display, provider=prov, detail=str(exc))
continue
is_known = False
ok_val: Optional[bool] = None
if prov in provider_availability:
is_known = True
ok_val = bool(provider_availability.get(prov))
elif prov in search_availability:
is_known = True
ok_val = bool(search_availability.get(prov))
elif prov in file_availability:
is_known = True
ok_val = bool(file_availability.get(prov))
elif prov in meta_availability:
is_known = True
ok_val = bool(meta_availability.get(prov))
if not is_known:
_add_startup_check("UNKNOWN", display, provider=prov, detail="Not registered")
else:
detail = "Configured" if ok_val else "Not configured"
ping_targets = _default_provider_ping_targets(prov)
if ping_targets:
ping_ok, ping_detail = _ping_first(ping_targets)
if ok_val:
detail = ping_detail
else:
detail = (detail + " | " + ping_detail) if ping_detail else detail
_add_startup_check("ENABLED" if ok_val else "DISABLED", display, provider=prov, detail=detail)
already_checked.add(prov)
default_search_providers = ["openlibrary", "libgen", "youtube", "bandcamp"]
for prov in default_search_providers:
if prov in already_checked:
continue
display = _provider_display_name(prov)
ok_val = bool(search_availability.get(prov)) if prov in search_availability else False
ping_targets = _default_provider_ping_targets(prov)
ping_ok, ping_detail = _ping_first(ping_targets) if ping_targets else (False, "No ping target")
detail = ping_detail or ("Available" if ok_val else "Unavailable")
if not ok_val:
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
_add_startup_check("ENABLED" if (ok_val and ping_ok) else "DISABLED", display, provider=prov, detail=detail)
already_checked.add(prov)
if "0x0" not in already_checked:
ok_val = bool(file_availability.get("0x0")) if "0x0" in file_availability else False
ping_ok, ping_detail = _ping_url("https://0x0.st")
detail = ping_detail
if not ok_val:
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
_add_startup_check("ENABLED" if (ok_val and ping_ok) else "DISABLED", "0x0", provider="0x0", detail=detail)
if _has_provider(config, "matrix"):
try:
from Provider.matrix import Matrix
provider = Matrix(config)
matrix_conf = config.get("provider", {}).get("matrix", {}) if isinstance(config, dict) else {}
homeserver = str(matrix_conf.get("homeserver") or "").strip()
room_id = str(matrix_conf.get("room_id") or "").strip()
if homeserver and not homeserver.startswith("http"):
homeserver = f"https://{homeserver}"
target = homeserver.rstrip("/")
if room_id:
target = (target + (" " if target else "")) + f"room:{room_id}"
_add_startup_check(
"ENABLED" if provider.validate() else "DISABLED",
"Matrix",
provider="matrix",
detail=target or ("Connected" if provider.validate() else "Not configured"),
)
except Exception as exc:
_add_startup_check("DISABLED", "Matrix", provider="matrix", detail=str(exc))
if _has_store_subtype(config, "folder"):
store_cfg = config.get("store")
folder_cfg = store_cfg.get("folder", {}) if isinstance(store_cfg, dict) else {}
if isinstance(folder_cfg, dict) and folder_cfg:
for instance_name, instance_cfg in folder_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
path_val = str(instance_cfg.get("PATH") or instance_cfg.get("path") or "").strip()
ok = bool(store_registry and store_registry.is_available(name_key))
if ok and store_registry:
backend = store_registry[name_key]
scan_ok = bool(getattr(backend, "scan_ok", True))
scan_detail = str(getattr(backend, "scan_detail", "") or "")
stats = getattr(backend, "scan_stats", None)
files = None
if isinstance(stats, dict):
total_db = stats.get("files_total_db")
if isinstance(total_db, (int, float)):
files = int(total_db)
status = "SCANNED" if scan_ok else "ERROR"
detail = (path_val + (" - " if path_val else "")) + (scan_detail or "Up to date")
_add_startup_check(status, name_key, store="folder", files=files, detail=detail)
else:
err = None
if store_registry:
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
detail = (path_val + (" - " if path_val else "")) + (err or "Unavailable")
_add_startup_check("ERROR", name_key, store="folder", detail=detail)
if _has_store_subtype(config, "debrid"):
try:
from config import get_debrid_api_key
from API.alldebrid import AllDebridClient
api_key = get_debrid_api_key(config)
if not api_key:
_add_startup_check("DISABLED", "Debrid", store="debrid", detail="Not configured")
else:
client = AllDebridClient(api_key)
base_url = str(getattr(client, "base_url", "") or "").strip()
_add_startup_check("ENABLED", "Debrid", store="debrid", detail=base_url or "Connected")
except Exception as exc:
_add_startup_check("DISABLED", "Debrid", store="debrid", detail=str(exc))
try:
from tool.ytdlp import YtDlpTool
cookiefile = YtDlpTool(config).resolve_cookiefile()
if cookiefile is not None:
_add_startup_check("FOUND", "Cookies", detail=str(cookiefile))
else:
_add_startup_check("MISSING", "Cookies", detail="Not found")
except Exception as exc:
_add_startup_check("ERROR", "Cookies", detail=str(exc))
if startup_table.rows:
stdout_console().print()
stdout_console().print(startup_table)
except Exception as exc:
if debug_enabled:
debug(f"⚠ Could not check service availability: {exc}")
style = Style.from_dict(
{
"cmdlet": "#ffffff",
"argument": "#3b8eea",
"value": "#9a3209",
"string": "#6d0d93",
"pipe": "#4caf50",
"selection_at": "#f1c40f",
"selection_range": "#4caf50",
"bottom-toolbar": "noreverse",
}
)
class ToolbarState:
text: str = ""
last_update_time: float = 0.0
clear_timer: Optional[threading.Timer] = None
toolbar_state = ToolbarState()
session: Optional[PromptSession] = None
def get_toolbar() -> Optional[str]:
if not toolbar_state.text or not toolbar_state.text.strip():
return None
if time.time() - toolbar_state.last_update_time > 3:
toolbar_state.text = ""
return None
return toolbar_state.text
def update_toolbar(text: str) -> None:
nonlocal session
text = text.strip()
toolbar_state.text = text
toolbar_state.last_update_time = time.time()
if toolbar_state.clear_timer:
toolbar_state.clear_timer.cancel()
toolbar_state.clear_timer = None
if text:
def clear_toolbar() -> None:
toolbar_state.text = ""
toolbar_state.clear_timer = None
if session is not None and hasattr(session, "app") and session.app.is_running:
session.app.invalidate()
toolbar_state.clear_timer = threading.Timer(3.0, clear_toolbar)
toolbar_state.clear_timer.daemon = True
toolbar_state.clear_timer.start()
if session is not None and hasattr(session, "app") and session.app.is_running:
session.app.invalidate()
self._pipeline_executor.set_toolbar_output(update_toolbar)
completer = CmdletCompleter(config_loader=self._config_loader)
session = PromptSession(
completer=cast(Any, completer),
lexer=MedeiaLexer(),
style=style,
bottom_toolbar=get_toolbar,
refresh_interval=0.5,
)
while True:
try:
user_input = session.prompt(prompt_text).strip()
except (EOFError, KeyboardInterrupt):
print("He who is victorious through deceit is defeated by the truth.")
break
if not user_input:
continue
low = user_input.lower()
if low in {"exit", "quit", "q"}:
print("He who is victorious through deceit is defeated by the truth.")
break
if low in {"help", "?"}:
CmdletHelp.show_cmdlet_list()
continue
pipeline_ctx_ref = None
try:
import pipeline as ctx
ctx.set_current_command_text(user_input)
pipeline_ctx_ref = ctx
except Exception:
pipeline_ctx_ref = None
try:
from cli_syntax import validate_pipeline_text
syntax_error = validate_pipeline_text(user_input)
if syntax_error:
print(syntax_error.message, file=sys.stderr)
continue
except Exception:
pass
try:
tokens = shlex.split(user_input)
except ValueError as exc:
print(f"Syntax error: {exc}", file=sys.stderr)
continue
if not tokens:
continue
if len(tokens) == 1 and tokens[0] == "@,,":
try:
import pipeline as ctx
if ctx.restore_next_result_table():
last_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
if last_table is None:
last_table = ctx.get_last_result_table()
if last_table:
stdout_console().print()
ctx.set_current_stage_table(last_table)
stdout_console().print(last_table)
else:
items = ctx.get_last_result_items()
if items:
ctx.set_current_stage_table(None)
print(f"Restored {len(items)} items (no table format available)")
else:
print("No forward history available", file=sys.stderr)
except Exception as exc:
print(f"Error restoring next table: {exc}", file=sys.stderr)
continue
if len(tokens) == 1 and tokens[0] == "@..":
try:
import pipeline as ctx
if ctx.restore_previous_result_table():
last_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
if last_table is None:
last_table = ctx.get_last_result_table()
# Auto-refresh search-store tables when navigating back,
# so row payloads (titles/tags) reflect latest store state.
try:
src_cmd = getattr(last_table, "source_command", None) if last_table else None
if isinstance(src_cmd, str) and src_cmd.lower().replace("_", "-") == "search-store":
src_args = getattr(last_table, "source_args", None) if last_table else None
base_args = list(src_args) if isinstance(src_args, list) else []
cleaned_args = [
str(a)
for a in base_args
if str(a).strip().lower() not in {"--refresh", "-refresh"}
]
if hasattr(ctx, "set_current_command_text"):
try:
title_text = getattr(last_table, "title", None) if last_table else None
if isinstance(title_text, str) and title_text.strip():
ctx.set_current_command_text(title_text.strip())
else:
ctx.set_current_command_text(" ".join(["search-store", *cleaned_args]).strip())
except Exception:
pass
try:
self._cmdlet_executor.execute("search-store", cleaned_args + ["--refresh"])
finally:
if hasattr(ctx, "clear_current_command_text"):
try:
ctx.clear_current_command_text()
except Exception:
pass
continue
except Exception as exc:
print(f"Error refreshing search-store table: {exc}", file=sys.stderr)
if last_table:
stdout_console().print()
ctx.set_current_stage_table(last_table)
stdout_console().print(last_table)
else:
items = ctx.get_last_result_items()
if items:
ctx.set_current_stage_table(None)
print(f"Restored {len(items)} items (no table format available)")
else:
print("No previous result table in history")
else:
print("Result table history is empty")
except Exception as exc:
print(f"Error restoring previous result table: {exc}")
continue
try:
if "|" in tokens or (tokens and tokens[0].startswith("@")):
self._pipeline_executor.execute_tokens(tokens)
else:
cmd_name = tokens[0].replace("_", "-").lower()
is_help = any(arg in {"-help", "--help", "-h"} for arg in tokens[1:])
if is_help:
CmdletHelp.show_cmdlet_help(cmd_name)
else:
self._cmdlet_executor.execute(cmd_name, tokens[1:])
finally:
if pipeline_ctx_ref:
pipeline_ctx_ref.clear_current_command_text()
if __name__ == "__main__":
MedeiaCLI().run()