Files
Medios-Macina/CLI.py

5321 lines
221 KiB
Python
Raw Normal View History

2025-11-25 20:09:33 -08:00
from __future__ import annotations
2025-12-20 02:12:45 -08:00
"""Medeia-Macina CLI.
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
This module intentionally uses a class-based architecture:
- no legacy procedural entrypoints
- no compatibility shims
- all REPL/pipeline/cmdlet execution state lives on objects
"""
import atexit
import io
2025-11-25 20:09:33 -08:00
import json
import re
2025-12-20 02:12:45 -08:00
import shlex
import sys
import threading
import time
2025-11-25 20:09:33 -08:00
import uuid
from copy import deepcopy
2026-01-01 20:37:27 -08:00
from datetime import datetime
2025-11-25 20:09:33 -08:00
from pathlib import Path
2025-12-30 04:47:13 -08:00
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, TextIO, Tuple, cast
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
import typer
from prompt_toolkit import PromptSession
from prompt_toolkit.completion import Completer, Completion
from prompt_toolkit.document import Document
from prompt_toolkit.lexers import Lexer
from prompt_toolkit.styles import Style
2025-12-24 17:58:57 -08:00
from rich.console import Console
from rich.layout import Layout
from rich.panel import Panel
from rich.markdown import Markdown
from rich.bar import Bar
from rich.table import Table
2026-01-11 02:26:39 -08:00
from SYS.rich_display import (
IMAGE_EXTENSIONS,
render_image_to_console,
render_item_details_panel,
stderr_console,
stdout_console,
)
2025-12-20 23:57:44 -08:00
def _install_rich_traceback(*, show_locals: bool = False) -> None:
"""Install Rich traceback handler as the default excepthook.
This keeps uncaught exceptions readable in the terminal.
"""
try:
from rich.traceback import install as rich_traceback_install
rich_traceback_install(show_locals=bool(show_locals))
except Exception:
# Fall back to the standard Python traceback if Rich isn't available.
return
# Default to Rich tracebacks for the whole process.
_install_rich_traceback(show_locals=False)
2025-12-20 02:12:45 -08:00
from SYS.background_notifier import ensure_background_notifier
from SYS.logger import debug, set_debug
from SYS.worker_manager import WorkerManager
2025-12-11 12:47:30 -08:00
from SYS.cmdlet_catalog import (
2025-12-20 02:12:45 -08:00
get_cmdlet_arg_choices,
get_cmdlet_arg_flags,
get_cmdlet_metadata,
import_cmd_module,
list_cmdlet_metadata,
list_cmdlet_names,
2025-12-11 12:47:30 -08:00
)
from SYS.config import get_local_storage_path, load_config
from SYS.result_table import ResultTable
from ProviderCore.registry import provider_inline_query_choices
2025-12-20 02:12:45 -08:00
2026-01-03 03:37:48 -08:00
HELP_EXAMPLE_SOURCE_COMMANDS = {
".help-example",
"help-example",
}
def _split_pipeline_tokens(tokens: Sequence[str]) -> List[List[str]]:
"""Split example tokens into per-stage command sequences using pipe separators."""
stages: List[List[str]] = []
current: List[str] = []
for token in tokens:
if token == "|":
if current:
stages.append(current)
current = []
continue
current.append(str(token))
if current:
stages.append(current)
return [stage for stage in stages if stage]
2025-12-20 02:12:45 -08:00
class SelectionSyntax:
"""Parses @ selection syntax into 1-based indices."""
_RANGE_RE = re.compile(r"^[0-9\-]+$")
@staticmethod
def parse(token: str) -> Optional[Set[int]]:
"""Return 1-based indices or None when not a concrete selection.
Concrete selections:
- @2
- @2-5
- @{1,3,5}
- @2,5,7-9
Special (non-concrete) selectors return None:
- @* (select all)
- @.. (history prev)
- @,, (history next)
"""
if not token or not token.startswith("@"):
return None
selector = token[1:].strip()
if selector in (".", ",", "*"):
return None
if selector.startswith("{") and selector.endswith("}"):
selector = selector[1:-1].strip()
indices: Set[int] = set()
for part in selector.split(","):
part = part.strip()
if not part:
continue
if "-" in part:
pieces = part.split("-", 1)
if len(pieces) != 2:
return None
start_str = pieces[0].strip()
end_str = pieces[1].strip()
if not start_str or not end_str:
return None
try:
start = int(start_str)
end = int(end_str)
except ValueError:
return None
if start <= 0 or end <= 0 or start > end:
return None
indices.update(range(start, end + 1))
continue
try:
value = int(part)
except ValueError:
return None
if value <= 0:
return None
indices.add(value)
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
return indices if indices else None
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
2025-12-30 04:47:13 -08:00
class SelectionFilterSyntax:
"""Parses and applies @"COL:filter" selection filters.
Notes:
- CLI tokenization (shlex) strips quotes, so a user input of `@"TITLE:foo"`
arrives as `@TITLE:foo`. We support both forms.
- Filters apply to the *current selectable table items* (in-memory), not to
provider searches.
"""
_OP_RE = re.compile(r"^(>=|<=|!=|==|>|<|=)\s*(.+)$")
_DUR_TOKEN_RE = re.compile(r"(?i)(\d+)\s*([hms])")
@staticmethod
def parse(token: str) -> Optional[List[Tuple[str, str]]]:
"""Return list of (column, raw_expression) or None when not a filter token."""
if not token or not str(token).startswith("@"):
return None
if token.strip() == "@*":
return None
# If this is a concrete numeric selection (@2, @1-3, @{1,3}), do not treat it as a filter.
try:
if SelectionSyntax.parse(str(token)) is not None:
return None
except Exception:
pass
raw = str(token)[1:].strip()
if not raw:
return None
# If quotes survived tokenization, strip a single symmetric wrapper.
if len(raw) >= 2 and raw[0] == raw[-1] and raw[0] in ('"', "'"):
raw = raw[1:-1].strip()
# Shorthand: @"foo" means Title contains "foo".
if ":" not in raw:
if raw:
return [("Title", raw)]
return None
parts = [p.strip() for p in raw.split(",") if p.strip()]
conditions: List[Tuple[str, str]] = []
for part in parts:
if ":" not in part:
return None
col, expr = part.split(":", 1)
col = str(col or "").strip()
expr = str(expr or "").strip()
if not col:
return None
conditions.append((col, expr))
return conditions if conditions else None
@staticmethod
def _norm_key(text: str) -> str:
return re.sub(r"\s+", " ", str(text or "").strip().lower())
@staticmethod
def _item_column_map(item: Any) -> Dict[str, str]:
out: Dict[str, str] = {}
def _set(k: Any, v: Any) -> None:
key = SelectionFilterSyntax._norm_key(str(k or ""))
if not key:
return
if v is None:
return
try:
if isinstance(v, (list, tuple, set)):
text = ", ".join(str(x) for x in v if x is not None)
else:
text = str(v)
except Exception:
return
out[key] = text
if isinstance(item, dict):
# Display columns (primary UX surface)
cols = item.get("columns")
if isinstance(cols, list):
for pair in cols:
try:
if isinstance(pair, (list, tuple)) and len(pair) == 2:
_set(pair[0], pair[1])
except Exception:
continue
# Direct keys as fallback
for k, v in item.items():
if k == "columns":
continue
_set(k, v)
else:
cols = getattr(item, "columns", None)
if isinstance(cols, list):
for pair in cols:
try:
if isinstance(pair, (list, tuple)) and len(pair) == 2:
_set(pair[0], pair[1])
except Exception:
continue
for k in ("title", "path", "detail", "provider", "store", "table"):
try:
_set(k, getattr(item, k, None))
except Exception:
pass
return out
@staticmethod
def _parse_duration_seconds(text: str) -> Optional[int]:
s = str(text or "").strip()
if not s:
return None
if s.isdigit():
try:
return max(0, int(s))
except Exception:
return None
# clock format: M:SS or H:MM:SS
if ":" in s:
parts = [p.strip() for p in s.split(":")]
if len(parts) == 2 and all(p.isdigit() for p in parts):
m, sec = parts
return max(0, int(m) * 60 + int(sec))
if len(parts) == 3 and all(p.isdigit() for p in parts):
h, m, sec = parts
return max(0, int(h) * 3600 + int(m) * 60 + int(sec))
# token format: 1h2m3s (tokens can appear in any combination)
total = 0
found = False
for m in SelectionFilterSyntax._DUR_TOKEN_RE.finditer(s):
found = True
n = int(m.group(1))
unit = m.group(2).lower()
if unit == "h":
total += n * 3600
elif unit == "m":
total += n * 60
elif unit == "s":
total += n
if found:
return max(0, int(total))
return None
@staticmethod
def _parse_float(text: str) -> Optional[float]:
s = str(text or "").strip()
if not s:
return None
s = s.replace(",", "")
try:
return float(s)
except Exception:
return None
@staticmethod
def _parse_op(expr: str) -> tuple[Optional[str], str]:
text = str(expr or "").strip()
if not text:
return None, ""
m = SelectionFilterSyntax._OP_RE.match(text)
if not m:
return None, text
return m.group(1), str(m.group(2) or "").strip()
@staticmethod
def matches(item: Any, conditions: List[Tuple[str, str]]) -> bool:
colmap = SelectionFilterSyntax._item_column_map(item)
for col, expr in conditions:
key = SelectionFilterSyntax._norm_key(col)
actual = colmap.get(key)
# Convenience aliases for common UX names.
if actual is None:
if key == "duration":
actual = colmap.get("duration")
elif key == "title":
actual = colmap.get("title")
if actual is None:
return False
op, rhs = SelectionFilterSyntax._parse_op(expr)
left_text = str(actual or "").strip()
right_text = str(rhs or "").strip()
if op is None:
if not right_text:
return False
if right_text.lower() not in left_text.lower():
return False
continue
# Comparator: try duration parsing first when it looks time-like.
prefer_duration = (
key == "duration"
or any(ch in right_text for ch in (":", "h", "m", "s"))
or any(ch in left_text for ch in (":", "h", "m", "s"))
)
left_num: Optional[float] = None
right_num: Optional[float] = None
if prefer_duration:
ldur = SelectionFilterSyntax._parse_duration_seconds(left_text)
rdur = SelectionFilterSyntax._parse_duration_seconds(right_text)
if ldur is not None and rdur is not None:
left_num = float(ldur)
right_num = float(rdur)
if left_num is None or right_num is None:
left_num = SelectionFilterSyntax._parse_float(left_text)
right_num = SelectionFilterSyntax._parse_float(right_text)
if left_num is not None and right_num is not None:
if op in ("=", "=="):
if not (left_num == right_num):
return False
elif op == "!=":
if not (left_num != right_num):
return False
elif op == ">":
if not (left_num > right_num):
return False
elif op == ">=":
if not (left_num >= right_num):
return False
elif op == "<":
if not (left_num < right_num):
return False
elif op == "<=":
if not (left_num <= right_num):
return False
else:
return False
continue
# Fallback to string equality for =/!= when numeric parsing fails.
if op in ("=", "=="):
if left_text.lower() != right_text.lower():
return False
elif op == "!=":
if left_text.lower() == right_text.lower():
return False
else:
return False
return True
2025-12-20 02:12:45 -08:00
class WorkerOutputMirror(io.TextIOBase):
2025-11-25 20:09:33 -08:00
"""Mirror stdout/stderr to worker manager while preserving console output."""
def __init__(
self,
original: TextIO,
manager: WorkerManager,
worker_id: str,
channel: str
):
2025-11-25 20:09:33 -08:00
self._original = original
self._manager = manager
self._worker_id = worker_id
self._channel = channel
self._pending: str = ""
2025-12-20 02:12:45 -08:00
def write(self, data: str) -> int: # type: ignore[override]
2025-11-25 20:09:33 -08:00
if not data:
return 0
self._original.write(data)
self._buffer_text(data)
return len(data)
2025-12-20 02:12:45 -08:00
def flush(self) -> None: # type: ignore[override]
2025-11-25 20:09:33 -08:00
self._original.flush()
self._flush_pending(force=True)
2025-12-20 02:12:45 -08:00
def isatty(self) -> bool: # pragma: no cover
2025-11-25 20:09:33 -08:00
return bool(getattr(self._original, "isatty", lambda: False)())
def _buffer_text(self, data: str) -> None:
combined = self._pending + data
lines = combined.splitlines(keepends=True)
if not lines:
self._pending = combined
return
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
if lines[-1].endswith(("\n", "\r")):
complete = lines
self._pending = ""
else:
complete = lines[:-1]
self._pending = lines[-1]
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
for chunk in complete:
self._emit(chunk)
2025-12-20 02:12:45 -08:00
def _flush_pending(self, *, force: bool = False) -> None:
2025-11-25 20:09:33 -08:00
if self._pending and force:
self._emit(self._pending)
self._pending = ""
def _emit(self, text: str) -> None:
if not text:
return
try:
self._manager.append_stdout(self._worker_id, text, channel=self._channel)
except Exception:
pass
@property
def encoding(self) -> str: # type: ignore[override]
return getattr(self._original, "encoding", "utf-8")
2025-12-20 02:12:45 -08:00
class WorkerStageSession:
2025-11-25 20:09:33 -08:00
"""Lifecycle helper for wrapping a CLI cmdlet execution in a worker record."""
def __init__(
self,
2025-12-20 02:12:45 -08:00
*,
manager: WorkerManager,
2025-11-25 20:09:33 -08:00
worker_id: str,
orig_stdout: TextIO,
orig_stderr: TextIO,
2025-12-20 02:12:45 -08:00
stdout_proxy: WorkerOutputMirror,
stderr_proxy: WorkerOutputMirror,
config: Optional[Dict[str,
Any]],
2025-11-25 20:09:33 -08:00
logging_enabled: bool,
completion_label: str,
error_label: str,
) -> None:
self.manager = manager
self.worker_id = worker_id
self.orig_stdout = orig_stdout
self.orig_stderr = orig_stderr
self.stdout_proxy = stdout_proxy
self.stderr_proxy = stderr_proxy
self.config = config
self.logging_enabled = logging_enabled
self.closed = False
self._completion_label = completion_label
self._error_label = error_label
2025-12-20 02:12:45 -08:00
def close(self, *, status: str = "completed", error_msg: str = "") -> None:
2025-11-25 20:09:33 -08:00
if self.closed:
return
try:
self.stdout_proxy.flush()
self.stderr_proxy.flush()
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
if self.logging_enabled:
try:
self.manager.disable_logging_for_worker(self.worker_id)
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
try:
if status == "completed":
self.manager.log_step(self.worker_id, self._completion_label)
else:
self.manager.log_step(
self.worker_id,
f"{self._error_label}: {error_msg or status}"
)
2025-11-25 20:09:33 -08:00
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
try:
2025-12-29 17:05:03 -08:00
self.manager.finish_worker(
self.worker_id,
result=status or "completed",
error_msg=error_msg or ""
2025-12-29 17:05:03 -08:00
)
2025-11-25 20:09:33 -08:00
except Exception:
pass
2025-12-20 02:12:45 -08:00
if self.config and self.config.get("_current_worker_id") == self.worker_id:
self.config.pop("_current_worker_id", None)
2025-11-25 20:09:33 -08:00
self.closed = True
2025-12-20 02:12:45 -08:00
class WorkerManagerRegistry:
"""Process-wide WorkerManager cache keyed by library_root."""
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
_manager: Optional[WorkerManager] = None
_manager_root: Optional[Path] = None
_orphan_cleanup_done: bool = False
_registered: bool = False
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
@classmethod
def ensure(cls, config: Dict[str, Any]) -> Optional[WorkerManager]:
if not isinstance(config, dict):
2025-11-25 20:09:33 -08:00
return None
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
existing = config.get("_worker_manager")
if isinstance(existing, WorkerManager):
return existing
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
library_root = get_local_storage_path(config)
if not library_root:
return None
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
try:
resolved_root = Path(library_root).resolve()
except Exception:
resolved_root = Path(library_root)
2025-12-05 03:42:57 -08:00
2025-12-20 02:12:45 -08:00
try:
if cls._manager is None or cls._manager_root != resolved_root:
if cls._manager is not None:
try:
cls._manager.close()
except Exception:
pass
cls._manager = WorkerManager(resolved_root, auto_refresh_interval=0.5)
cls._manager_root = resolved_root
2025-12-05 03:42:57 -08:00
2025-12-20 02:12:45 -08:00
manager = cls._manager
config["_worker_manager"] = manager
2025-12-07 00:21:30 -08:00
2025-12-20 02:12:45 -08:00
if manager is not None and not cls._orphan_cleanup_done:
try:
manager.expire_running_workers(
older_than_seconds=120,
worker_id_prefix="cli_%",
reason=
"CLI session ended unexpectedly; marking worker as failed",
2025-12-20 02:12:45 -08:00
)
except Exception:
pass
else:
cls._orphan_cleanup_done = True
2025-12-07 00:21:30 -08:00
2025-12-20 02:12:45 -08:00
if not cls._registered:
atexit.register(cls.close)
cls._registered = True
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
return manager
except Exception as exc:
print(
f"[worker] Could not initialize worker manager: {exc}",
file=sys.stderr
)
2025-12-20 02:12:45 -08:00
return None
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
@classmethod
def close(cls) -> None:
if cls._manager is None:
return
try:
cls._manager.close()
except Exception:
pass
cls._manager = None
cls._manager_root = None
cls._orphan_cleanup_done = False
class WorkerStages:
"""Factory methods for stage/pipeline worker sessions."""
@staticmethod
def _start_worker_session(
worker_manager: Optional[WorkerManager],
*,
worker_type: str,
title: str,
description: str,
pipe_text: str,
config: Optional[Dict[str,
Any]],
2025-12-20 02:12:45 -08:00
completion_label: str,
error_label: str,
skip_logging_for: Optional[Set[str]] = None,
session_worker_ids: Optional[Set[str]] = None,
) -> Optional[WorkerStageSession]:
if worker_manager is None:
return None
if skip_logging_for and worker_type in skip_logging_for:
return None
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
safe_type = worker_type or "cmd"
worker_id = f"cli_{safe_type[:8]}_{uuid.uuid4().hex[:6]}"
2025-12-07 00:21:30 -08:00
2025-12-20 02:12:45 -08:00
try:
tracked = worker_manager.track_worker(
worker_id,
worker_type=worker_type,
title=title,
description=description or "(no args)",
pipe=pipe_text,
)
if not tracked:
return None
except Exception as exc:
print(f"[worker] Failed to track {worker_type}: {exc}", file=sys.stderr)
return None
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
if session_worker_ids is not None:
session_worker_ids.add(worker_id)
2025-12-12 21:55:38 -08:00
2025-12-20 02:12:45 -08:00
logging_enabled = False
try:
handler = worker_manager.enable_logging_for_worker(worker_id)
logging_enabled = handler is not None
except Exception:
logging_enabled = False
orig_stdout = sys.stdout
orig_stderr = sys.stderr
stdout_proxy = WorkerOutputMirror(
orig_stdout,
worker_manager,
worker_id,
"stdout"
)
stderr_proxy = WorkerOutputMirror(
orig_stderr,
worker_manager,
worker_id,
"stderr"
)
2025-12-20 02:12:45 -08:00
sys.stdout = stdout_proxy
sys.stderr = stderr_proxy
if isinstance(config, dict):
config["_current_worker_id"] = worker_id
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
try:
worker_manager.log_step(worker_id, f"Started {worker_type}")
except Exception:
pass
2025-12-12 21:55:38 -08:00
2025-12-20 02:12:45 -08:00
return WorkerStageSession(
manager=worker_manager,
worker_id=worker_id,
orig_stdout=orig_stdout,
orig_stderr=orig_stderr,
stdout_proxy=stdout_proxy,
stderr_proxy=stderr_proxy,
config=config,
logging_enabled=logging_enabled,
completion_label=completion_label,
error_label=error_label,
)
2025-12-12 21:55:38 -08:00
2025-12-20 02:12:45 -08:00
@classmethod
def begin_stage(
cls,
worker_manager: Optional[WorkerManager],
*,
cmd_name: str,
stage_tokens: Sequence[str],
config: Optional[Dict[str,
Any]],
2025-12-20 02:12:45 -08:00
command_text: str,
) -> Optional[WorkerStageSession]:
description = " ".join(stage_tokens[1:]
) if len(stage_tokens) > 1 else "(no args)"
2025-12-20 02:12:45 -08:00
session_worker_ids = None
if isinstance(config, dict):
session_worker_ids = config.get("_session_worker_ids")
return cls._start_worker_session(
worker_manager,
worker_type=cmd_name,
title=f"{cmd_name} stage",
description=description,
pipe_text=command_text,
config=config,
completion_label="Stage completed",
error_label="Stage error",
skip_logging_for={".worker",
"worker",
"workers"},
2025-12-20 02:12:45 -08:00
session_worker_ids=session_worker_ids,
)
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
@classmethod
def begin_pipeline(
cls,
worker_manager: Optional[WorkerManager],
*,
pipeline_text: str,
config: Optional[Dict[str,
Any]],
2025-12-20 02:12:45 -08:00
) -> Optional[WorkerStageSession]:
session_worker_ids: Set[str] = set()
if isinstance(config, dict):
config["_session_worker_ids"] = session_worker_ids
return cls._start_worker_session(
worker_manager,
worker_type="pipeline",
title="Pipeline run",
description=pipeline_text,
pipe_text=pipeline_text,
config=config,
completion_label="Pipeline completed",
error_label="Pipeline error",
session_worker_ids=session_worker_ids,
)
class CmdletIntrospection:
2025-12-20 02:12:45 -08:00
@staticmethod
2026-01-11 00:39:17 -08:00
def cmdlet_names(force: bool = False) -> List[str]:
2025-12-16 01:45:01 -08:00
try:
2026-01-11 00:39:17 -08:00
return list_cmdlet_names(force=force) or []
2025-12-16 01:45:01 -08:00
except Exception:
return []
2025-12-20 02:12:45 -08:00
@staticmethod
def cmdlet_args(cmd_name: str,
config: Optional[Dict[str,
Any]] = None) -> List[str]:
2025-12-16 01:45:01 -08:00
try:
2025-12-24 17:58:57 -08:00
return get_cmdlet_arg_flags(cmd_name, config=config) or []
2025-12-16 01:45:01 -08:00
except Exception:
2025-12-20 02:12:45 -08:00
return []
2025-11-27 10:59:01 -08:00
2025-12-20 02:12:45 -08:00
@staticmethod
2026-01-11 00:39:17 -08:00
def store_choices(config: Dict[str, Any], force: bool = False) -> List[str]:
2025-12-12 21:55:38 -08:00
try:
2026-01-09 01:22:06 -08:00
# Use the cached startup check from SharedArgs
from cmdlet._shared import SharedArgs
2026-01-11 00:39:17 -08:00
return SharedArgs.get_store_choices(config, force=force)
2025-12-12 21:55:38 -08:00
except Exception:
2025-12-20 02:12:45 -08:00
return []
2025-12-12 21:55:38 -08:00
2025-12-20 02:12:45 -08:00
@classmethod
def arg_choices(cls,
*,
cmd_name: str,
arg_name: str,
config: Dict[str,
2026-01-11 00:39:17 -08:00
Any],
force: bool = False) -> List[str]:
2025-11-27 10:59:01 -08:00
try:
2025-12-20 02:12:45 -08:00
normalized_arg = (arg_name or "").lstrip("-").strip().lower()
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
if normalized_arg in ("storage", "store"):
2026-01-11 00:39:17 -08:00
backends = cls.store_choices(config, force=force)
2025-12-20 02:12:45 -08:00
if backends:
return backends
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
if normalized_arg == "provider":
canonical_cmd = (cmd_name or "").replace("_", "-").lower()
try:
from ProviderCore.registry import list_search_providers, list_file_providers
except Exception:
list_search_providers = None # type: ignore
list_file_providers = None # type: ignore
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
provider_choices: List[str] = []
2025-12-13 00:18:30 -08:00
2025-12-20 02:12:45 -08:00
if canonical_cmd in {"add-file"} and list_file_providers is not None:
providers = list_file_providers(config) or {}
available = [
name for name, is_ready in providers.items() if is_ready
]
2025-12-20 02:12:45 -08:00
return sorted(available) if available else sorted(providers.keys())
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
if list_search_providers is not None:
providers = list_search_providers(config) or {}
available = [
name for name, is_ready in providers.items() if is_ready
]
provider_choices = sorted(available) if available else sorted(
providers.keys()
)
2025-12-16 01:45:01 -08:00
2026-01-05 07:51:19 -08:00
if provider_choices:
return provider_choices
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
if normalized_arg == "scrape":
2025-11-25 20:09:33 -08:00
try:
2025-12-20 02:12:45 -08:00
from Provider.metadata_provider import list_metadata_providers
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
meta_providers = list_metadata_providers(config) or {}
if meta_providers:
return sorted(meta_providers.keys())
except Exception:
2025-11-25 20:09:33 -08:00
pass
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
return get_cmdlet_arg_choices(cmd_name, arg_name) or []
except Exception:
return []
2025-12-11 12:47:30 -08:00
2025-12-13 12:09:50 -08:00
2025-12-20 02:12:45 -08:00
class CmdletCompleter(Completer):
"""Prompt-toolkit completer for the Medeia cmdlet REPL."""
2025-12-13 12:09:50 -08:00
2025-12-20 02:12:45 -08:00
def __init__(self, *, config_loader: "ConfigLoader") -> None:
self._config_loader = config_loader
self.cmdlet_names = CmdletIntrospection.cmdlet_names()
2025-12-13 12:09:50 -08:00
2025-12-20 23:57:44 -08:00
@staticmethod
2025-12-29 17:05:03 -08:00
def _used_arg_logicals(
cmd_name: str,
stage_tokens: List[str],
config: Dict[str,
Any]
2025-12-29 17:05:03 -08:00
) -> Set[str]:
2025-12-20 23:57:44 -08:00
"""Return logical argument names already used in this cmdlet stage.
2026-01-01 20:37:27 -08:00
Example: if the user has typed `download-file -url ...`, then `url`
2025-12-20 23:57:44 -08:00
is considered used and should not be suggested again (even as `--url`).
"""
2025-12-24 17:58:57 -08:00
arg_flags = CmdletIntrospection.cmdlet_args(cmd_name, config)
allowed = {a.lstrip("-").strip().lower()
for a in arg_flags if a}
2025-12-20 23:57:44 -08:00
if not allowed:
return set()
used: Set[str] = set()
for tok in stage_tokens[1:]:
if not tok or not tok.startswith("-"):
continue
if tok in {"-",
"--"}:
2025-12-20 23:57:44 -08:00
continue
# Handle common `-arg=value` form.
raw = tok.split("=", 1)[0]
logical = raw.lstrip("-").strip().lower()
if logical and logical in allowed:
used.add(logical)
return used
@staticmethod
def _flag_value(tokens: Sequence[str], *flags: str) -> Optional[str]:
want = {str(f).strip().lower() for f in flags if str(f).strip()}
if not want:
return None
for idx, tok in enumerate(tokens):
low = str(tok or "").strip().lower()
if "=" in low:
head, val = low.split("=", 1)
if head in want:
return tok.split("=", 1)[1]
if low in want and idx + 1 < len(tokens):
return tokens[idx + 1]
return None
def get_completions(
self,
document: Document,
complete_event
): # type: ignore[override]
2026-01-11 00:39:17 -08:00
# Refresh cmdlet names from introspection to pick up dynamic updates
self.cmdlet_names = CmdletIntrospection.cmdlet_names(force=True)
2025-12-20 02:12:45 -08:00
text = document.text_before_cursor
tokens = text.split()
ends_with_space = bool(text) and text[-1].isspace()
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
last_pipe = -1
for idx, tok in enumerate(tokens):
if tok == "|":
last_pipe = idx
stage_tokens = tokens[last_pipe + 1:] if last_pipe >= 0 else tokens
2025-12-13 12:09:50 -08:00
2025-12-20 02:12:45 -08:00
if not stage_tokens:
for cmd in self.cmdlet_names:
yield Completion(cmd, start_position=0)
return
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
if len(stage_tokens) == 1:
current = stage_tokens[0].lower()
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
if ends_with_space:
cmd_name = current.replace("_", "-")
2025-12-19 03:25:52 -08:00
2025-12-24 17:58:57 -08:00
config = self._config_loader.load()
2025-12-20 02:12:45 -08:00
if cmd_name == "help":
for cmd in self.cmdlet_names:
yield Completion(cmd, start_position=0)
return
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
if cmd_name not in self.cmdlet_names:
return
2025-12-16 01:45:01 -08:00
2025-12-24 17:58:57 -08:00
arg_names = CmdletIntrospection.cmdlet_args(cmd_name, config)
2025-12-20 02:12:45 -08:00
logical_seen: Set[str] = set()
for arg in arg_names:
arg_low = arg.lower()
if arg_low.startswith("--"):
continue
logical = arg.lstrip("-").lower()
if logical in logical_seen:
continue
yield Completion(arg, start_position=0)
logical_seen.add(logical)
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
yield Completion("-help", start_position=0)
return
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
for cmd in self.cmdlet_names:
if cmd.startswith(current):
yield Completion(cmd, start_position=-len(current))
for keyword in ("help", "exit", "quit"):
if keyword.startswith(current):
yield Completion(keyword, start_position=-len(current))
return
2025-12-16 01:45:01 -08:00
2025-12-20 02:12:45 -08:00
cmd_name = stage_tokens[0].replace("_", "-").lower()
if ends_with_space:
current_token = ""
prev_token = stage_tokens[-1].lower()
else:
current_token = stage_tokens[-1].lower()
prev_token = stage_tokens[-2].lower() if len(stage_tokens) > 1 else ""
config = self._config_loader.load()
provider_name = None
if cmd_name == "search-file":
provider_name = self._flag_value(stage_tokens, "-provider", "--provider")
if (
cmd_name == "search-file"
and provider_name
and not ends_with_space
and ":" in current_token
and not current_token.startswith("-")
):
# Allow quoted tokens like "system:g
quote_prefix = current_token[0] if current_token[:1] in {"'", '"'} else ""
inline_token = current_token[1:] if quote_prefix else current_token
if inline_token.endswith(quote_prefix) and len(inline_token) > 1:
inline_token = inline_token[:-1]
# Allow comma-separated inline specs; operate on the last segment only.
if "," in inline_token:
inline_token = inline_token.split(",")[-1].lstrip()
if ":" not in inline_token:
return
field, partial = inline_token.split(":", 1)
field = field.strip().lower()
partial_lower = partial.strip().lower()
inline_choices = provider_inline_query_choices(provider_name, field, config)
if inline_choices:
filtered = (
[c for c in inline_choices if partial_lower in str(c).lower()]
if partial_lower
else list(inline_choices)
)
for choice in (filtered or inline_choices):
# Replace only the partial after the colon; keep the field prefix and quotes as typed.
start_pos = -len(partial)
suggestion = str(choice)
yield Completion(suggestion, start_position=start_pos)
return
2025-12-29 17:05:03 -08:00
choices = CmdletIntrospection.arg_choices(
cmd_name=cmd_name,
arg_name=prev_token,
2026-01-11 00:39:17 -08:00
config=config,
force=True
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if choices:
2026-01-05 07:51:19 -08:00
choice_list = choices
normalized_prev = prev_token.lstrip("-").strip().lower()
if normalized_prev == "provider" and current_token:
current_lower = current_token.lower()
filtered = [c for c in choices if current_lower in c.lower()]
if filtered:
choice_list = filtered
for choice in choice_list:
2026-01-01 20:37:27 -08:00
yield Completion(choice, start_position=-len(current_token))
# Example: if the user has typed `download-file -url ...`, then `url`
# is considered used and should not be suggested again (even as `--url`).
2025-12-20 02:12:45 -08:00
return
2025-12-16 01:45:01 -08:00
2025-12-24 17:58:57 -08:00
arg_names = CmdletIntrospection.cmdlet_args(cmd_name, config)
used_logicals = self._used_arg_logicals(cmd_name, stage_tokens, config)
2025-12-20 02:12:45 -08:00
logical_seen: Set[str] = set()
for arg in arg_names:
arg_low = arg.lower()
prefer_single_dash = current_token in {"",
"-"}
2025-12-20 02:12:45 -08:00
if prefer_single_dash and arg_low.startswith("--"):
continue
logical = arg.lstrip("-").lower()
2025-12-20 23:57:44 -08:00
if logical in used_logicals:
continue
2025-12-20 02:12:45 -08:00
if prefer_single_dash and logical in logical_seen:
continue
if arg_low.startswith(current_token):
yield Completion(arg, start_position=-len(current_token))
if prefer_single_dash:
logical_seen.add(logical)
2025-12-13 00:18:30 -08:00
2025-12-20 02:12:45 -08:00
if cmd_name in self.cmdlet_names:
if current_token.startswith("--"):
if "--help".startswith(current_token):
yield Completion("--help", start_position=-len(current_token))
else:
if "-help".startswith(current_token):
yield Completion("-help", start_position=-len(current_token))
class MedeiaLexer(Lexer):
2025-12-20 02:12:45 -08:00
def lex_document(self, document: Document): # type: ignore[override]
2025-12-20 02:12:45 -08:00
def get_line(lineno: int):
line = document.lines[lineno]
tokens: List[tuple[str, str]] = []
pattern = re.compile(
r"""
(\s+) | # 1. Whitespace
(\|) | # 2. Pipe
("(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*') | # 3. Quoted string
([^\s\|]+) # 4. Word
""",
re.VERBOSE,
)
2025-12-13 00:18:30 -08:00
2025-12-20 02:12:45 -08:00
is_cmdlet = True
def _emit_keyed_value(word: str) -> bool:
"""Emit `key:` prefixes (comma-separated) as argument tokens.
Designed for values like:
clip:3m4s-3m14s,1h22m-1h33m,item:2-3
Avoids special-casing URLs (://) and Windows drive paths (C:\\...).
Returns True if it handled the token.
"""
if not word or ":" not in word:
return False
# Avoid URLs and common scheme patterns.
if "://" in word:
return False
# Avoid Windows drive paths (e.g., C:\foo or D:/bar)
if re.match(r"^[A-Za-z]:[\\/]", word):
return False
key_prefix = re.compile(r"^([A-Za-z_][A-Za-z0-9_-]*:)(.*)$")
parts = word.split(",")
handled_any = False
for i, part in enumerate(parts):
if i > 0:
tokens.append(("class:value", ","))
if part == "":
continue
m = key_prefix.match(part)
if m:
tokens.append(("class:argument", m.group(1)))
if m.group(2):
tokens.append(("class:value", m.group(2)))
handled_any = True
else:
tokens.append(("class:value", part))
handled_any = True
2025-12-13 12:09:50 -08:00
2025-12-20 02:12:45 -08:00
return handled_any
2025-12-13 12:09:50 -08:00
2025-12-20 02:12:45 -08:00
for match in pattern.finditer(line):
ws, pipe, quote, word = match.groups()
if ws:
tokens.append(("", ws))
continue
if pipe:
tokens.append(("class:pipe", pipe))
is_cmdlet = True
continue
if quote:
# If the quoted token contains a keyed spec (clip:/item:/hash:),
# highlight the `key:` portion in argument-blue even inside quotes.
if len(quote) >= 2 and quote[0] == quote[-1] and quote[0] in ('"',
"'"):
2025-12-20 02:12:45 -08:00
q = quote[0]
inner = quote[1:-1]
start_index = len(tokens)
if _emit_keyed_value(inner):
# _emit_keyed_value already appended tokens for inner; insert opening quote
# before that chunk, then add the closing quote.
tokens.insert(start_index, ("class:string", q))
tokens.append(("class:string", q))
is_cmdlet = False
continue
2025-12-16 23:23:43 -08:00
2025-12-20 02:12:45 -08:00
tokens.append(("class:string", quote))
is_cmdlet = False
continue
if not word:
continue
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
if word.startswith("@"): # selection tokens
rest = word[1:]
if rest and re.fullmatch(r"[0-9\-\*,]+", rest):
tokens.append(("class:selection_at", "@"))
tokens.append(("class:selection_range", rest))
is_cmdlet = False
continue
if rest == "":
tokens.append(("class:selection_at", "@"))
is_cmdlet = False
continue
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
if is_cmdlet:
tokens.append(("class:cmdlet", word))
is_cmdlet = False
elif word.startswith("-"):
tokens.append(("class:argument", word))
else:
if not _emit_keyed_value(word):
tokens.append(("class:value", word))
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
return tokens
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
return get_line
2025-12-12 21:55:38 -08:00
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
class ConfigLoader:
2025-12-20 02:12:45 -08:00
def __init__(self, *, root: Path) -> None:
self._root = root
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
def load(self) -> Dict[str, Any]:
try:
return deepcopy(load_config(config_dir=self._root))
except Exception:
return {}
2025-11-25 22:34:41 -08:00
2025-11-27 10:59:01 -08:00
2025-12-20 02:12:45 -08:00
class CmdletHelp:
2025-12-20 02:12:45 -08:00
@staticmethod
def show_cmdlet_list() -> None:
try:
metadata = list_cmdlet_metadata() or {}
2025-12-20 23:57:44 -08:00
from rich.box import SIMPLE
from rich.panel import Panel
from rich.table import Table as RichTable
table = RichTable(
show_header=True,
header_style="bold",
box=SIMPLE,
expand=True
)
2025-12-20 23:57:44 -08:00
table.add_column("Cmdlet", no_wrap=True)
table.add_column("Aliases")
table.add_column("Args")
table.add_column("Summary")
2025-12-20 02:12:45 -08:00
for cmd_name in sorted(metadata.keys()):
info = metadata[cmd_name]
aliases = info.get("aliases", [])
args = info.get("args", [])
2025-12-20 23:57:44 -08:00
summary = info.get("summary") or ""
alias_str = ", ".join(
[str(a) for a in (aliases or []) if str(a).strip()]
)
2025-12-29 17:05:03 -08:00
arg_names = [
a.get("name") for a in (args or [])
if isinstance(a, dict) and a.get("name")
2025-12-29 17:05:03 -08:00
]
2025-12-20 23:57:44 -08:00
args_str = ", ".join([str(a) for a in arg_names if str(a).strip()])
table.add_row(str(cmd_name), alias_str, args_str, str(summary))
2025-12-20 02:12:45 -08:00
2025-12-20 23:57:44 -08:00
stdout_console().print(Panel(table, title="Cmdlets", expand=False))
2025-12-20 02:12:45 -08:00
except Exception as exc:
2025-12-20 23:57:44 -08:00
from rich.panel import Panel
from rich.text import Text
stderr_console().print(
Panel(Text(f"Error: {exc}"),
title="Error",
expand=False)
)
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
@staticmethod
def show_cmdlet_help(cmd_name: str) -> None:
try:
meta = get_cmdlet_metadata(cmd_name)
if meta:
CmdletHelp._print_metadata(cmd_name, meta)
return
print(f"Unknown command: {cmd_name}\n")
except Exception as exc:
print(f"Error: {exc}\n")
@staticmethod
def _print_metadata(cmd_name: str, data: Any) -> None:
d = data.to_dict() if hasattr(data, "to_dict") else data
if not isinstance(d, dict):
2025-12-20 23:57:44 -08:00
from rich.panel import Panel
from rich.text import Text
2025-12-29 17:05:03 -08:00
stderr_console().print(
Panel(
Text(f"Invalid metadata for {cmd_name}"),
title="Error",
expand=False
)
2025-12-29 17:05:03 -08:00
)
2025-11-25 20:09:33 -08:00
return
2025-12-07 00:21:30 -08:00
2025-12-20 02:12:45 -08:00
name = d.get("name", cmd_name)
summary = d.get("summary", "")
usage = d.get("usage", "")
description = d.get("description", "")
args = d.get("args", [])
details = d.get("details", [])
2025-12-19 02:29:42 -08:00
2025-12-20 23:57:44 -08:00
from rich.box import SIMPLE
from rich.console import Group
from rich.panel import Panel
from rich.table import Table as RichTable
from rich.text import Text
2025-12-19 02:29:42 -08:00
2025-12-20 23:57:44 -08:00
header = Text.assemble((str(name), "bold"))
synopsis = Text(str(usage or name))
stdout_console().print(
Panel(Group(header,
synopsis),
title="Help",
expand=False)
)
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
if summary or description:
2025-12-20 23:57:44 -08:00
desc_bits: List[Text] = []
2025-12-20 02:12:45 -08:00
if summary:
2025-12-20 23:57:44 -08:00
desc_bits.append(Text(str(summary)))
2025-12-20 02:12:45 -08:00
if description:
2025-12-20 23:57:44 -08:00
desc_bits.append(Text(str(description)))
stdout_console().print(
Panel(Group(*desc_bits),
title="Description",
expand=False)
)
2025-12-20 02:12:45 -08:00
if args and isinstance(args, list):
param_table = RichTable(
show_header=True,
header_style="bold",
box=SIMPLE,
expand=True
)
2025-12-20 23:57:44 -08:00
param_table.add_column("Arg", no_wrap=True)
param_table.add_column("Type", no_wrap=True)
param_table.add_column("Required", no_wrap=True)
param_table.add_column("Description")
2025-12-20 02:12:45 -08:00
for arg in args:
if isinstance(arg, dict):
name_str = arg.get("name", "?")
typ = arg.get("type", "string")
2025-12-20 23:57:44 -08:00
required = bool(arg.get("required", False))
2025-12-20 02:12:45 -08:00
desc = arg.get("description", "")
else:
name_str = getattr(arg, "name", "?")
typ = getattr(arg, "type", "string")
2025-12-20 23:57:44 -08:00
required = bool(getattr(arg, "required", False))
2025-12-20 02:12:45 -08:00
desc = getattr(arg, "description", "")
2025-12-29 17:05:03 -08:00
param_table.add_row(
f"-{name_str}",
str(typ),
"yes" if required else "no",
str(desc or "")
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
stdout_console().print(Panel(param_table, title="Parameters", expand=False))
2025-12-20 02:12:45 -08:00
if details:
2025-12-29 17:05:03 -08:00
stdout_console().print(
Panel(
Group(*[Text(str(x)) for x in details]),
title="Remarks",
expand=False
)
2025-12-29 17:05:03 -08:00
)
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
class CmdletExecutor:
2025-12-20 02:12:45 -08:00
def __init__(self, *, config_loader: ConfigLoader) -> None:
self._config_loader = config_loader
@staticmethod
def _get_table_title_for_command(
cmd_name: str,
emitted_items: Optional[List[Any]] = None,
cmd_args: Optional[List[str]] = None,
) -> str:
title_map = {
"search-file": "Results",
"search_file": "Results",
"download-data": "Downloads",
"download_data": "Downloads",
2025-12-29 17:05:03 -08:00
"download-file": "Downloads",
"download_file": "Downloads",
2025-12-20 02:12:45 -08:00
"get-tag": "Tags",
"get_tag": "Tags",
"get-file": "Results",
"get_file": "Results",
"add-tags": "Results",
"add_tags": "Results",
"delete-tag": "Results",
"delete_tag": "Results",
"add-url": "Results",
"add_url": "Results",
"get-url": "url",
"get_url": "url",
"delete-url": "Results",
"delete_url": "Results",
"get-note": "Notes",
"get_note": "Notes",
"add-note": "Results",
"add_note": "Results",
"delete-note": "Results",
"delete_note": "Results",
"get-relationship": "Relationships",
"get_relationship": "Relationships",
"add-relationship": "Results",
"add_relationship": "Results",
"add-file": "Results",
"add_file": "Results",
"delete-file": "Results",
"delete_file": "Results",
"get-metadata": None,
"get_metadata": None,
}
mapped = title_map.get(cmd_name, "Results")
if mapped is not None:
return mapped
if emitted_items:
first = emitted_items[0]
2025-12-19 02:29:42 -08:00
try:
2025-12-20 02:12:45 -08:00
if isinstance(first, dict) and first.get("title"):
return str(first.get("title"))
if hasattr(first, "title") and getattr(first, "title"):
return str(getattr(first, "title"))
2025-12-19 02:29:42 -08:00
except Exception:
pass
2025-12-20 02:12:45 -08:00
return "Results"
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
def execute(self, cmd_name: str, args: List[str]) -> None:
from SYS import pipeline as ctx
2025-12-20 02:12:45 -08:00
from cmdlet import REGISTRY
2025-12-19 02:29:42 -08:00
2025-12-27 14:50:59 -08:00
# REPL guard: stage-local selection tables should not leak across independent
# commands. @ selection can always re-seed from the last result table.
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
2025-12-20 02:12:45 -08:00
cmd_fn = REGISTRY.get(cmd_name)
if not cmd_fn:
# Lazy-import module and register its CMDLET.
2025-12-19 02:29:42 -08:00
try:
2025-12-20 02:12:45 -08:00
mod = import_cmd_module(cmd_name)
data = getattr(mod, "CMDLET", None) if mod else None
if data and hasattr(data, "exec") and callable(getattr(data, "exec")):
run_fn = getattr(data, "exec")
REGISTRY[cmd_name] = run_fn
cmd_fn = run_fn
2025-12-19 02:29:42 -08:00
except Exception:
2025-12-20 02:12:45 -08:00
cmd_fn = None
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
if not cmd_fn:
print(f"Unknown command: {cmd_name}\n")
return
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
config = self._config_loader.load()
2025-12-19 02:29:42 -08:00
2025-12-23 16:36:39 -08:00
# ------------------------------------------------------------------
# Single-command Live pipeline progress (match REPL behavior)
# ------------------------------------------------------------------
progress_ui = None
pipe_idx: Optional[int] = None
def _maybe_start_single_live_progress(
*,
cmd_name_norm: str,
filtered_args: List[str],
piped_input: Any,
config: Any,
) -> None:
nonlocal progress_ui, pipe_idx
# Keep behavior consistent with pipeline runner exclusions.
2025-12-26 18:58:48 -08:00
# Some commands render their own Rich UI (tables/panels) and don't
# play nicely with Live cursor control.
2025-12-29 17:05:03 -08:00
if cmd_name_norm in {
"get-relationship",
"get-rel",
".pipe",
2026-01-03 03:37:48 -08:00
".mpv",
".matrix",
".telegram",
"telegram",
"delete-file",
"del-file",
2025-12-29 17:05:03 -08:00
}:
2025-12-23 16:36:39 -08:00
return
2025-12-27 06:05:07 -08:00
# add-file directory selector mode: show only the selection table, no Live progress.
if cmd_name_norm in {"add-file",
"add_file"}:
2025-12-27 06:05:07 -08:00
try:
from pathlib import Path as _Path
toks = list(filtered_args or [])
i = 0
while i < len(toks):
t = str(toks[i])
low = t.lower().strip()
if low in {"-path",
"--path",
"-p"} and i + 1 < len(toks):
2025-12-27 06:05:07 -08:00
nxt = str(toks[i + 1])
if nxt and ("," not in nxt):
p = _Path(nxt)
if p.exists() and p.is_dir():
return
i += 2
continue
i += 1
except Exception:
pass
2025-12-23 16:36:39 -08:00
try:
2025-12-29 17:05:03 -08:00
quiet_mode = (
bool(config.get("_quiet_background_output"))
if isinstance(config,
dict) else False
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
except Exception:
quiet_mode = False
if quiet_mode:
return
try:
import sys as _sys
if not bool(getattr(_sys.stderr, "isatty", lambda: False)()):
return
except Exception:
return
try:
from SYS.models import PipelineLiveProgress
2025-12-23 16:36:39 -08:00
progress_ui = PipelineLiveProgress([cmd_name_norm], enabled=True)
progress_ui.start()
try:
if hasattr(ctx, "set_live_progress"):
ctx.set_live_progress(progress_ui)
except Exception:
pass
pipe_idx = 0
# Estimate per-item task count for the single pipe.
total_items = 1
preview_items: Optional[List[Any]] = None
try:
if isinstance(piped_input, list):
total_items = max(1, int(len(piped_input)))
preview_items = list(piped_input)
elif piped_input is not None:
total_items = 1
preview_items = [piped_input]
else:
preview: List[Any] = []
toks = list(filtered_args or [])
i = 0
while i < len(toks):
t = str(toks[i])
low = t.lower().strip()
if (cmd_name_norm in {"add-file",
"add_file"} and low in {"-path",
"--path",
"-p"}
and i + 1 < len(toks)):
2025-12-27 06:05:07 -08:00
nxt = str(toks[i + 1])
if nxt:
if "," in nxt:
parts = [
p.strip().strip("\"'")
for p in nxt.split(",")
]
2025-12-27 06:05:07 -08:00
parts = [p for p in parts if p]
if parts:
preview.extend(parts)
i += 2
continue
else:
preview.append(nxt)
i += 2
continue
if low in {"-url",
"--url"} and i + 1 < len(toks):
2025-12-23 16:36:39 -08:00
nxt = str(toks[i + 1])
if nxt and not nxt.startswith("-"):
preview.append(nxt)
i += 2
continue
if (not t.startswith("-")) and ("://" in low
or low.startswith(
("magnet:",
"torrent:"))):
2025-12-23 16:36:39 -08:00
preview.append(t)
i += 1
preview_items = preview if preview else None
total_items = max(1, int(len(preview)) if preview else 1)
except Exception:
total_items = 1
preview_items = None
try:
2025-12-29 17:05:03 -08:00
progress_ui.begin_pipe(
0,
total_items=int(total_items),
items_preview=preview_items
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
except Exception:
pass
except Exception:
progress_ui = None
pipe_idx = None
2025-12-20 02:12:45 -08:00
filtered_args: List[str] = []
selected_indices: List[int] = []
select_all = False
2025-12-30 04:47:13 -08:00
selection_filters: List[List[Tuple[str, str]]] = []
2025-12-11 12:47:30 -08:00
2025-12-20 02:12:45 -08:00
value_flags: Set[str] = set()
try:
meta = get_cmdlet_metadata(cmd_name)
raw = meta.get("raw") if isinstance(meta, dict) else None
arg_specs = getattr(raw, "arg", None) if raw is not None else None
if isinstance(arg_specs, list):
for spec in arg_specs:
spec_type = str(getattr(spec,
"type",
"string") or "string").strip().lower()
2025-12-20 02:12:45 -08:00
if spec_type == "flag":
continue
spec_name = str(getattr(spec, "name", "") or "")
canonical = spec_name.lstrip("-").strip()
if not canonical:
continue
value_flags.add(f"-{canonical}".lower())
value_flags.add(f"--{canonical}".lower())
alias = str(getattr(spec, "alias", "") or "").strip()
if alias:
value_flags.add(f"-{alias}".lower())
except Exception:
value_flags = set()
2025-12-19 02:29:42 -08:00
2025-12-20 02:12:45 -08:00
for i, arg in enumerate(args):
if isinstance(arg, str) and arg.startswith("@"): # selection candidate
prev = str(args[i - 1]).lower() if i > 0 else ""
if prev in value_flags:
filtered_args.append(arg)
continue
2025-11-25 20:09:33 -08:00
2025-12-30 04:47:13 -08:00
# Universal selection filter: @"COL:expr" (quotes may be stripped by tokenization)
filter_spec = SelectionFilterSyntax.parse(arg)
if filter_spec is not None:
selection_filters.append(filter_spec)
2025-12-16 01:45:01 -08:00
continue
if arg.strip() == "@*":
select_all = True
continue
2025-12-20 02:12:45 -08:00
selection = SelectionSyntax.parse(arg)
2025-12-16 01:45:01 -08:00
if selection is not None:
2025-12-20 02:12:45 -08:00
zero_based = sorted(idx - 1 for idx in selection)
for idx in zero_based:
if idx not in selected_indices:
selected_indices.append(idx)
2025-12-16 01:45:01 -08:00
continue
2025-11-25 20:09:33 -08:00
filtered_args.append(arg)
2025-12-20 02:12:45 -08:00
continue
filtered_args.append(str(arg))
2025-12-21 05:10:09 -08:00
# IMPORTANT: Do not implicitly feed the previous command's results into
# a new command unless the user explicitly selected items via @ syntax.
# Piping should require `|` (or an explicit @ selection).
2025-11-25 20:09:33 -08:00
piped_items = ctx.get_last_result_items()
2025-12-20 02:12:45 -08:00
result: Any = None
2025-12-30 04:47:13 -08:00
effective_selected_indices: List[int] = []
if piped_items and (select_all or selected_indices or selection_filters):
candidate_idxs = list(range(len(piped_items)))
for spec in selection_filters:
candidate_idxs = [
i for i in candidate_idxs
if SelectionFilterSyntax.matches(piped_items[i], spec)
]
2025-12-16 01:45:01 -08:00
if select_all:
2025-12-30 04:47:13 -08:00
effective_selected_indices = list(candidate_idxs)
elif selected_indices:
effective_selected_indices = [
candidate_idxs[i] for i in selected_indices
if 0 <= i < len(candidate_idxs)
2025-12-29 17:05:03 -08:00
]
2025-12-30 04:47:13 -08:00
else:
effective_selected_indices = list(candidate_idxs)
result = [piped_items[i] for i in effective_selected_indices]
2025-12-20 02:12:45 -08:00
worker_manager = WorkerManagerRegistry.ensure(config)
stage_session = WorkerStages.begin_stage(
worker_manager,
2025-11-25 20:09:33 -08:00
cmd_name=cmd_name,
stage_tokens=[cmd_name,
*filtered_args],
2025-11-25 20:09:33 -08:00
config=config,
command_text=" ".join([cmd_name,
*filtered_args]).strip() or cmd_name,
2025-11-25 20:09:33 -08:00
)
2025-12-20 02:12:45 -08:00
2025-12-11 12:47:30 -08:00
stage_worker_id = stage_session.worker_id if stage_session else None
2025-12-23 16:36:39 -08:00
# Start live progress after we know the effective cmd + args + piped input.
cmd_norm = str(cmd_name or "").replace("_", "-").strip().lower()
_maybe_start_single_live_progress(
cmd_name_norm=cmd_norm or str(cmd_name or "").strip().lower(),
filtered_args=filtered_args,
piped_input=result,
config=config,
)
on_emit = None
if progress_ui is not None and pipe_idx is not None:
_ui = progress_ui
def _on_emit(obj: Any, _progress=_ui) -> None:
try:
_progress.on_emit(0, obj)
except Exception:
pass
on_emit = _on_emit
pipeline_ctx = ctx.PipelineStageContext(
stage_index=0,
total_stages=1,
pipe_index=pipe_idx if pipe_idx is not None else 0,
worker_id=stage_worker_id,
on_emit=on_emit,
)
2025-12-11 12:47:30 -08:00
ctx.set_stage_context(pipeline_ctx)
2025-11-25 20:09:33 -08:00
stage_status = "completed"
stage_error = ""
2025-12-30 04:47:13 -08:00
ctx.set_last_selection(effective_selected_indices)
2025-11-25 20:09:33 -08:00
try:
2025-12-20 23:57:44 -08:00
try:
if hasattr(ctx, "set_current_cmdlet_name"):
ctx.set_current_cmdlet_name(cmd_name)
except Exception:
pass
try:
if hasattr(ctx, "set_current_stage_text"):
raw_stage = ""
try:
2025-12-29 17:05:03 -08:00
raw_stage = (
ctx.get_current_command_text("")
if hasattr(ctx,
"get_current_command_text") else ""
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
except Exception:
raw_stage = ""
if raw_stage:
ctx.set_current_stage_text(raw_stage)
else:
2025-12-29 17:05:03 -08:00
ctx.set_current_stage_text(
" ".join([cmd_name,
*filtered_args]).strip() or cmd_name
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
except Exception:
pass
2025-11-25 20:09:33 -08:00
ret_code = cmd_fn(result, filtered_args, config)
2025-12-20 02:12:45 -08:00
if getattr(pipeline_ctx, "emits", None):
emits = list(pipeline_ctx.emits)
2025-12-23 16:36:39 -08:00
# Shared `-path` behavior: if the cmdlet emitted temp/PATH file artifacts,
# move them to the user-specified destination and update emitted paths.
try:
from cmdlet import _shared as sh
2025-12-29 17:05:03 -08:00
emits = sh.apply_output_path_from_pipeobjects(
cmd_name=cmd_name,
args=filtered_args,
emits=emits
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
try:
pipeline_ctx.emits = list(emits)
except Exception:
pass
except Exception:
pass
2025-12-20 02:12:45 -08:00
# Detect format-selection emits and skip printing (user selects with @N).
is_format_selection = False
if emits:
first_emit = emits[0]
if isinstance(first_emit, dict) and "format_id" in first_emit:
is_format_selection = True
if is_format_selection:
ctx.set_last_result_items_only(emits)
2025-11-25 20:09:33 -08:00
else:
table_title = self._get_table_title_for_command(
cmd_name,
emits,
filtered_args
)
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
selectable_commands = {
2025-12-20 02:12:45 -08:00
"search-file",
"download-data",
2025-12-27 21:24:27 -08:00
"download-file",
2025-12-20 02:12:45 -08:00
"search_file",
"download_data",
2025-12-27 21:24:27 -08:00
"download_file",
2025-12-20 02:12:45 -08:00
".config",
".worker",
2025-11-25 20:09:33 -08:00
}
display_only_commands = {
2025-12-20 02:12:45 -08:00
"get-url",
"get_url",
"get-note",
"get_note",
"get-relationship",
"get_relationship",
"get-file",
"get_file",
2025-11-25 20:09:33 -08:00
}
self_managing_commands = {
2025-12-20 02:12:45 -08:00
"get-tag",
"get_tag",
"tags",
"search-file",
"search_file",
2025-11-25 20:09:33 -08:00
}
2025-12-20 02:12:45 -08:00
2025-11-25 20:09:33 -08:00
if cmd_name in self_managing_commands:
2025-12-20 02:12:45 -08:00
table = ctx.get_last_result_table()
if table is None:
table = ResultTable(table_title)
for emitted in emits:
table.add_result(emitted)
2025-11-25 20:09:33 -08:00
else:
2025-12-20 02:12:45 -08:00
table = ResultTable(table_title)
for emitted in emits:
table.add_result(emitted)
if cmd_name in selectable_commands:
table.set_source_command(cmd_name, filtered_args)
ctx.set_last_result_table(table, emits)
ctx.set_current_stage_table(None)
elif cmd_name in display_only_commands:
ctx.set_last_result_items_only(emits)
else:
ctx.set_last_result_items_only(emits)
2025-12-23 16:36:39 -08:00
# Stop Live progress before printing tables.
if progress_ui is not None:
try:
if pipe_idx is not None:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=(stage_status == "completed")
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
except Exception:
pass
try:
progress_ui.stop()
except Exception:
pass
try:
if hasattr(ctx, "set_live_progress"):
ctx.set_live_progress(None)
except Exception:
pass
progress_ui = None
pipe_idx = None
2025-12-20 23:57:44 -08:00
stdout_console().print()
stdout_console().print(table)
2025-12-20 02:12:45 -08:00
2025-12-23 16:36:39 -08:00
# If the cmdlet produced a current-stage table without emits (e.g. format selection),
# render it here for parity with REPL pipeline runner.
if (not getattr(pipeline_ctx,
"emits",
None)) and hasattr(ctx,
"get_current_stage_table"):
2025-12-23 16:36:39 -08:00
try:
stage_table = ctx.get_current_stage_table()
except Exception:
stage_table = None
if stage_table is not None:
try:
already_rendered = bool(
getattr(stage_table,
"_rendered_by_cmdlet",
False)
)
2025-12-23 16:36:39 -08:00
except Exception:
already_rendered = False
if already_rendered:
return
if progress_ui is not None:
try:
if pipe_idx is not None:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=(stage_status == "completed")
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
except Exception:
pass
try:
progress_ui.stop()
except Exception:
pass
try:
if hasattr(ctx, "set_live_progress"):
ctx.set_live_progress(None)
except Exception:
pass
progress_ui = None
pipe_idx = None
stdout_console().print()
stdout_console().print(stage_table)
2025-11-25 20:09:33 -08:00
if ret_code != 0:
stage_status = "failed"
stage_error = f"exit code {ret_code}"
print(f"[exit code: {ret_code}]\n")
2025-12-20 02:12:45 -08:00
except Exception as exc:
2025-11-25 20:09:33 -08:00
stage_status = "failed"
2025-12-20 02:12:45 -08:00
stage_error = f"{type(exc).__name__}: {exc}"
print(f"[error] {type(exc).__name__}: {exc}\n")
2025-11-25 20:09:33 -08:00
finally:
2025-12-23 16:36:39 -08:00
if progress_ui is not None:
try:
if pipe_idx is not None:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=(stage_status == "completed")
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
except Exception:
pass
try:
progress_ui.stop()
except Exception:
pass
try:
if hasattr(ctx, "set_live_progress"):
ctx.set_live_progress(None)
except Exception:
pass
2025-12-27 14:50:59 -08:00
# Do not keep stage tables around after a single command; it can cause
# later @ selections to bind to stale tables (e.g. old add-file scans).
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
2025-12-20 23:57:44 -08:00
try:
if hasattr(ctx, "clear_current_cmdlet_name"):
ctx.clear_current_cmdlet_name()
except Exception:
pass
try:
if hasattr(ctx, "clear_current_stage_text"):
ctx.clear_current_stage_text()
except Exception:
pass
2025-11-25 20:09:33 -08:00
ctx.clear_last_selection()
if stage_session:
stage_session.close(status=stage_status, error_msg=stage_error)
2025-12-11 12:47:30 -08:00
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
class PipelineExecutor:
2025-12-20 02:12:45 -08:00
def __init__(self, *, config_loader: ConfigLoader) -> None:
self._config_loader = config_loader
self._toolbar_output: Optional[Callable[[str], None]] = None
2025-11-25 20:09:33 -08:00
2025-12-20 02:12:45 -08:00
def set_toolbar_output(self, output: Optional[Callable[[str], None]]) -> None:
self._toolbar_output = output
@staticmethod
def _split_stages(tokens: Sequence[str]) -> List[List[str]]:
stages: List[List[str]] = []
current: List[str] = []
for token in tokens:
if token == "|":
if current:
stages.append(current)
current = []
2025-11-25 20:09:33 -08:00
else:
2025-12-20 02:12:45 -08:00
current.append(token)
if current:
stages.append(current)
return stages
2025-12-23 16:36:39 -08:00
@staticmethod
2026-01-01 20:37:27 -08:00
def _validate_download_file_relationship_order(stages: List[List[str]]) -> bool:
"""Guard against running add-relationship on unstored download-file results.
2025-12-23 16:36:39 -08:00
Intended UX:
2026-01-01 20:37:27 -08:00
download-file ... | add-file -store <store> | add-relationship
2025-12-23 16:36:39 -08:00
Rationale:
2026-01-01 20:37:27 -08:00
download-file outputs items that may not yet have a stable store+hash.
2025-12-23 16:36:39 -08:00
add-relationship is designed to operate in store/hash mode.
"""
def _norm(name: str) -> str:
return str(name or "").replace("_", "-").strip().lower()
names: List[str] = []
for stage in stages or []:
if not stage:
continue
names.append(_norm(stage[0]))
2026-01-01 20:37:27 -08:00
dl_idxs = [i for i, n in enumerate(names) if n == "download-file"]
2025-12-23 16:36:39 -08:00
rel_idxs = [i for i, n in enumerate(names) if n == "add-relationship"]
add_file_idxs = [i for i, n in enumerate(names) if n == "add-file"]
if not dl_idxs or not rel_idxs:
return True
2026-01-01 20:37:27 -08:00
# If download-file is upstream of add-relationship, require an add-file in between.
2025-12-23 16:36:39 -08:00
for rel_i in rel_idxs:
dl_before = [d for d in dl_idxs if d < rel_i]
if not dl_before:
continue
dl_i = max(dl_before)
if not any(dl_i < a < rel_i for a in add_file_idxs):
print(
2026-01-01 20:37:27 -08:00
"Pipeline order error: when using download-file with add-relationship, "
2025-12-23 16:36:39 -08:00
"add-relationship must come after add-file (so items are stored and have store+hash).\n"
2026-01-01 20:37:27 -08:00
"Example: download-file <...> | add-file -store <store> | add-relationship\n"
2025-12-23 16:36:39 -08:00
)
return False
return True
2025-12-22 02:11:53 -08:00
@staticmethod
def _try_clear_pipeline_stop(ctx: Any) -> None:
2025-11-25 20:09:33 -08:00
try:
2025-12-22 02:11:53 -08:00
if hasattr(ctx, "clear_pipeline_stop"):
ctx.clear_pipeline_stop()
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
@staticmethod
def _maybe_seed_current_stage_table(ctx: Any) -> None:
try:
if hasattr(ctx,
"get_current_stage_table") and not ctx.get_current_stage_table():
2025-12-29 17:05:03 -08:00
display_table = (
ctx.get_display_table() if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if display_table:
ctx.set_current_stage_table(display_table)
2025-11-25 20:09:33 -08:00
else:
2025-12-29 17:05:03 -08:00
last_table = (
ctx.get_last_result_table()
if hasattr(ctx,
"get_last_result_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if last_table:
ctx.set_current_stage_table(last_table)
2025-12-22 02:11:53 -08:00
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
@staticmethod
def _maybe_apply_pending_pipeline_tail(ctx: Any,
stages: List[List[str]]) -> List[List[str]]:
2025-12-22 02:11:53 -08:00
try:
2025-12-29 17:05:03 -08:00
pending_tail = (
ctx.get_pending_pipeline_tail()
if hasattr(ctx,
"get_pending_pipeline_tail") else []
2025-12-29 17:05:03 -08:00
)
pending_source = (
ctx.get_pending_pipeline_source()
if hasattr(ctx,
"get_pending_pipeline_source") else None
2025-12-29 17:05:03 -08:00
)
2025-12-22 02:11:53 -08:00
except Exception:
pending_tail = []
pending_source = None
try:
2025-12-20 02:12:45 -08:00
current_source = (
2025-12-22 02:11:53 -08:00
ctx.get_current_stage_table_source_command()
if hasattr(ctx,
"get_current_stage_table_source_command") else None
2025-12-20 02:12:45 -08:00
)
2025-12-22 02:11:53 -08:00
except Exception:
current_source = None
try:
2025-12-20 02:12:45 -08:00
effective_source = current_source or (
2025-12-22 02:11:53 -08:00
ctx.get_last_result_table_source_command()
if hasattr(ctx,
"get_last_result_table_source_command") else None
2025-12-20 02:12:45 -08:00
)
2025-12-22 02:11:53 -08:00
except Exception:
effective_source = current_source
selection_only = bool(
len(stages) == 1 and stages[0] and stages[0][0].startswith("@")
)
2025-12-22 02:11:53 -08:00
if pending_tail and selection_only:
if (pending_source is None) or (effective_source
and pending_source == effective_source):
2025-12-22 02:11:53 -08:00
stages = list(stages) + list(pending_tail)
try:
2025-12-20 02:12:45 -08:00
if hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail()
2025-12-22 02:11:53 -08:00
except Exception:
pass
else:
try:
if hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail()
except Exception:
pass
return stages
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
def _apply_quiet_background_flag(self, config: Any) -> Any:
if isinstance(config, dict):
# This executor is used by both the REPL and the `pipeline` subcommand.
# Quiet/background mode is helpful for detached/background runners, but
# it suppresses interactive UX (like the pipeline Live progress UI).
config["_quiet_background_output"] = bool(self._toolbar_output is None)
return config
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
@staticmethod
2025-12-29 17:05:03 -08:00
def _extract_first_stage_selection_tokens(
stages: List[List[str]],
) -> tuple[List[List[str]],
List[int],
bool,
bool]:
2025-12-22 02:11:53 -08:00
first_stage_tokens = stages[0] if stages else []
first_stage_selection_indices: List[int] = []
first_stage_had_extra_args = False
first_stage_select_all = False
if first_stage_tokens:
new_first_stage: List[str] = []
for token in first_stage_tokens:
if token.startswith("@"): # selection
selection = SelectionSyntax.parse(token)
if selection is not None:
first_stage_selection_indices = sorted(
[i - 1 for i in selection]
)
2025-12-22 02:11:53 -08:00
continue
if token == "@*":
first_stage_select_all = True
continue
new_first_stage.append(token)
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
if new_first_stage:
stages = list(stages)
stages[0] = new_first_stage
if first_stage_selection_indices or first_stage_select_all:
first_stage_had_extra_args = True
elif first_stage_selection_indices or first_stage_select_all:
stages = list(stages)
stages.pop(0)
2025-12-20 02:12:45 -08:00
2025-12-29 17:05:03 -08:00
return (
stages,
first_stage_selection_indices,
first_stage_had_extra_args,
first_stage_select_all,
)
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
@staticmethod
def _apply_select_all_if_requested(ctx: Any,
indices: List[int],
select_all: bool) -> List[int]:
2025-12-22 02:11:53 -08:00
if not select_all:
return indices
try:
last_items = ctx.get_last_result_items()
except Exception:
last_items = None
if last_items:
return list(range(len(last_items)))
return indices
@staticmethod
2025-12-29 17:05:03 -08:00
def _maybe_run_class_selector(
ctx: Any,
config: Any,
selected_items: list,
*,
stage_is_last: bool
2025-12-29 17:05:03 -08:00
) -> bool:
2025-12-22 02:11:53 -08:00
if not stage_is_last:
return False
candidates: list[str] = []
seen: set[str] = set()
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
def _add(value) -> None:
try:
text = str(value or "").strip().lower()
except Exception:
return
if not text or text in seen:
return
seen.add(text)
candidates.append(text)
try:
current_table = ctx.get_current_stage_table() or ctx.get_last_result_table()
_add(
current_table.
table if current_table and hasattr(current_table,
"table") else None
)
2026-01-03 21:23:55 -08:00
# Prefer an explicit provider hint from table metadata when available.
# This keeps @N selectors working even when row payloads don't carry a
# provider key (or when they carry a table-type like hifi.album).
try:
meta = (
current_table.get_table_metadata()
if current_table is not None and hasattr(current_table, "get_table_metadata")
else getattr(current_table, "table_metadata", None)
)
except Exception:
meta = None
if isinstance(meta, dict):
_add(meta.get("provider"))
2025-12-22 02:11:53 -08:00
except Exception:
pass
for item in selected_items or []:
if isinstance(item, dict):
_add(item.get("provider"))
_add(item.get("store"))
_add(item.get("table"))
else:
_add(getattr(item, "provider", None))
_add(getattr(item, "store", None))
_add(getattr(item, "table", None))
try:
from ProviderCore.registry import get_provider, is_known_provider_name
except Exception:
get_provider = None # type: ignore
is_known_provider_name = None # type: ignore
2026-01-03 21:23:55 -08:00
# If we have a table-type like "hifi.album", also try its provider prefix ("hifi")
# when that prefix is a registered provider name.
if is_known_provider_name is not None:
try:
for key in list(candidates):
if not isinstance(key, str):
continue
if "." not in key:
continue
if is_known_provider_name(key):
continue
prefix = str(key).split(".", 1)[0].strip().lower()
if prefix and is_known_provider_name(prefix):
_add(prefix)
except Exception:
pass
2025-12-22 02:11:53 -08:00
if get_provider is not None:
for key in candidates:
2025-12-20 02:12:45 -08:00
try:
if is_known_provider_name is not None and (
not is_known_provider_name(key)):
2025-12-22 02:11:53 -08:00
continue
2025-12-20 02:12:45 -08:00
except Exception:
2025-12-22 02:11:53 -08:00
# If the predicate fails for any reason, fall back to legacy behavior.
2025-12-20 02:12:45 -08:00
pass
try:
2025-12-22 02:11:53 -08:00
provider = get_provider(key, config)
2025-12-20 02:12:45 -08:00
except Exception:
2025-12-22 02:11:53 -08:00
continue
selector = getattr(provider, "selector", None)
if selector is None:
continue
try:
handled = bool(
selector(selected_items,
ctx=ctx,
stage_is_last=True)
)
2025-12-22 02:11:53 -08:00
except Exception as exc:
print(f"{key} selector failed: {exc}\n")
return True
if handled:
return True
store_keys: list[str] = []
for item in selected_items or []:
if isinstance(item, dict):
v = item.get("store")
else:
v = getattr(item, "store", None)
name = str(v or "").strip()
if name:
store_keys.append(name)
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
if store_keys:
try:
from Store.registry import Store as StoreRegistry
store_registry = StoreRegistry(config, suppress_debug=True)
_backend_names = list(store_registry.list_backends() or [])
2025-12-29 17:05:03 -08:00
_backend_by_lower = {
str(n).lower(): str(n)
for n in _backend_names if str(n).strip()
2025-12-29 17:05:03 -08:00
}
2025-12-22 02:11:53 -08:00
for name in store_keys:
resolved_name = name
if not store_registry.is_available(resolved_name):
resolved_name = _backend_by_lower.get(str(name).lower(), name)
if not store_registry.is_available(resolved_name):
continue
backend = store_registry[resolved_name]
selector = getattr(backend, "selector", None)
if selector is None:
continue
handled = bool(
selector(selected_items,
ctx=ctx,
stage_is_last=True)
)
2025-12-22 02:11:53 -08:00
if handled:
return True
except Exception:
pass
return False
2026-01-01 20:37:27 -08:00
@staticmethod
def _summarize_stage_text(stage_tokens: Sequence[str], limit: int = 140) -> str:
combined = " ".join(str(tok) for tok in stage_tokens if tok is not None).strip()
if not combined:
return ""
normalized = re.sub(r"\s+", " ", combined)
if len(normalized) <= limit:
return normalized
return normalized[:limit - 3].rstrip() + "..."
@staticmethod
def _log_pipeline_event(
worker_manager: Any,
worker_id: Optional[str],
message: str,
) -> None:
if not worker_manager or not worker_id or not message:
return
try:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
except Exception:
timestamp = ""
if timestamp:
text = f"{timestamp} - PIPELINE - {message}"
else:
text = f"PIPELINE - {message}"
try:
worker_manager.append_stdout(worker_id, text + "\n", channel="log")
except Exception:
pass
2025-12-25 04:49:22 -08:00
@staticmethod
2025-12-29 17:05:03 -08:00
def _maybe_open_url_selection(
current_table: Any,
selected_items: list,
*,
stage_is_last: bool
2025-12-29 17:05:03 -08:00
) -> bool:
2025-12-25 04:49:22 -08:00
if not stage_is_last:
return False
if not selected_items or len(selected_items) != 1:
return False
table_type = ""
source_cmd = ""
try:
table_type = str(getattr(current_table, "table", "") or "").strip().lower()
except Exception:
table_type = ""
try:
2025-12-29 17:05:03 -08:00
source_cmd = (
str(getattr(current_table,
"source_command",
"") or "").strip().replace("_",
"-").lower()
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
except Exception:
source_cmd = ""
if table_type != "url" and source_cmd != "get-url":
return False
item = selected_items[0]
url = None
try:
from cmdlet._shared import get_field
url = get_field(item, "url")
except Exception:
try:
url = item.get("url") if isinstance(item,
dict
) else getattr(item,
"url",
None)
2025-12-25 04:49:22 -08:00
except Exception:
url = None
url_text = str(url or "").strip()
if not url_text:
return False
try:
import webbrowser
webbrowser.open(url_text, new=2)
return True
except Exception:
return False
2025-12-29 17:05:03 -08:00
def _maybe_enable_background_notifier(
self,
worker_manager: Any,
config: Any,
pipeline_session: Any
2025-12-29 17:05:03 -08:00
) -> None:
2025-12-22 02:11:53 -08:00
if not (pipeline_session and worker_manager and isinstance(config, dict)):
return
session_worker_ids = config.get("_session_worker_ids")
if not session_worker_ids:
return
try:
output_fn = self._toolbar_output
quiet_mode = bool(config.get("_quiet_background_output"))
terminal_only = quiet_mode and not output_fn
kwargs: Dict[str,
Any] = {
"session_worker_ids": session_worker_ids,
"only_terminal_updates": terminal_only,
"overlay_mode": bool(output_fn),
}
2025-12-22 02:11:53 -08:00
if output_fn:
kwargs["output"] = output_fn
ensure_background_notifier(worker_manager, **kwargs)
except Exception:
pass
@staticmethod
def _get_raw_stage_texts(ctx: Any) -> List[str]:
raw_stage_texts: List[str] = []
try:
if hasattr(ctx, "get_current_command_stages"):
raw_stage_texts = ctx.get_current_command_stages() or []
except Exception:
raw_stage_texts = []
return raw_stage_texts
def _maybe_apply_initial_selection(
self,
ctx: Any,
config: Any,
stages: List[List[str]],
*,
selection_indices: List[int],
first_stage_had_extra_args: bool,
worker_manager: Any,
pipeline_session: Any,
) -> tuple[bool,
Any]:
2025-12-22 02:11:53 -08:00
if not selection_indices:
return True, None
2026-01-03 21:23:55 -08:00
# Selection should operate on the *currently displayed* selectable table.
# Some navigation flows (e.g. @.. back) can show a display table without
# updating current_stage_table. Provider selectors rely on current_stage_table
# to detect table type (e.g. hifi.album -> tracks), so sync it here.
display_table = None
2025-12-22 02:11:53 -08:00
try:
2026-01-03 21:23:55 -08:00
display_table = (
ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
)
except Exception:
display_table = None
current_stage_table = None
try:
current_stage_table = (
ctx.get_current_stage_table()
if hasattr(ctx, "get_current_stage_table") else None
)
except Exception:
current_stage_table = None
try:
if display_table is not None and hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(display_table)
elif current_stage_table is None and hasattr(ctx, "set_current_stage_table"):
last_table = (
ctx.get_last_result_table()
if hasattr(ctx, "get_last_result_table") else None
2025-12-29 17:05:03 -08:00
)
2026-01-03 21:23:55 -08:00
if last_table is not None:
ctx.set_current_stage_table(last_table)
2025-12-22 02:11:53 -08:00
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
source_cmd = None
source_args_raw = None
try:
source_cmd = ctx.get_current_stage_table_source_command()
source_args_raw = ctx.get_current_stage_table_source_args()
except Exception:
source_cmd = None
source_args_raw = None
if isinstance(source_args_raw, str):
source_args: List[str] = [source_args_raw]
elif isinstance(source_args_raw, list):
source_args = [str(x) for x in source_args_raw if x is not None]
else:
source_args = []
current_table = None
try:
current_table = ctx.get_current_stage_table()
except Exception:
current_table = None
2025-12-29 17:05:03 -08:00
table_type = (
current_table.table if current_table and hasattr(current_table,
"table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-22 02:11:53 -08:00
command_expanded = False
2026-01-03 03:37:48 -08:00
example_selector_triggered = False
normalized_source_cmd = str(source_cmd or "").replace("_", "-").strip().lower()
2025-12-22 02:11:53 -08:00
2026-01-03 03:37:48 -08:00
if normalized_source_cmd in HELP_EXAMPLE_SOURCE_COMMANDS and selection_indices:
try:
idx = selection_indices[0]
row_args = ctx.get_current_stage_table_row_selection_args(idx)
except Exception:
row_args = None
tokens: List[str] = []
if isinstance(row_args, list) and row_args:
tokens = [str(x) for x in row_args if x is not None]
if tokens:
stage_groups = _split_pipeline_tokens(tokens)
if stage_groups:
for stage in reversed(stage_groups):
stages.insert(0, stage)
selection_indices = []
command_expanded = True
example_selector_triggered = True
if not example_selector_triggered:
if table_type in {"youtube",
"soulseek"}:
command_expanded = False
elif source_cmd == "search-file" and source_args and "youtube" in source_args:
command_expanded = False
else:
selected_row_args: List[str] = []
skip_pipe_expansion = source_cmd in {".pipe", ".mpv"} and len(stages) > 0
# Command expansion via @N:
# - Default behavior: expand ONLY for single-row selections.
# - Special case: allow multi-row expansion for add-file directory tables by
# combining selected rows into a single `-path file1,file2,...` argument.
if source_cmd and not skip_pipe_expansion:
src = str(source_cmd).replace("_", "-").strip().lower()
if src == "add-file" and selection_indices:
row_args_list: List[List[str]] = []
for idx in selection_indices:
try:
row_args = ctx.get_current_stage_table_row_selection_args(
idx
)
except Exception:
row_args = None
if isinstance(row_args, list) and row_args:
row_args_list.append(
[str(x) for x in row_args if x is not None]
)
2025-12-27 06:05:07 -08:00
2026-01-03 03:37:48 -08:00
# Combine `['-path', <file>]` from each row into one `-path` arg.
paths: List[str] = []
can_merge = bool(row_args_list) and (
len(row_args_list) == len(selection_indices)
)
if can_merge:
for ra in row_args_list:
if len(ra) == 2 and str(ra[0]).strip().lower() in {
"-path",
"--path",
"-p",
}:
p = str(ra[1]).strip()
if p:
paths.append(p)
else:
can_merge = False
break
2025-12-27 06:05:07 -08:00
2026-01-03 03:37:48 -08:00
if can_merge and paths:
selected_row_args.extend(["-path", ",".join(paths)])
elif len(selection_indices) == 1 and row_args_list:
selected_row_args.extend(row_args_list[0])
else:
# Only perform @N command expansion for *single-item* selections.
# For multi-item selections (e.g. @*, @1-5), expanding to one row
# would silently drop items. In those cases we pipe items downstream.
if len(selection_indices) == 1:
idx = selection_indices[0]
row_args = ctx.get_current_stage_table_row_selection_args(idx)
if row_args:
selected_row_args.extend(row_args)
if selected_row_args:
if isinstance(source_cmd, list):
cmd_list: List[str] = [str(x) for x in source_cmd if x is not None]
elif isinstance(source_cmd, str):
cmd_list = [source_cmd]
else:
cmd_list = []
2025-12-22 02:11:53 -08:00
# IMPORTANT: Put selected row args *before* source_args.
# Rationale: The cmdlet argument parser treats the *first* unknown
# token as a positional value (e.g., URL). If `source_args`
# contain unknown flags (like -provider which download-file does
# not declare), they could be misinterpreted as the positional
# URL argument and cause attempts to download strings like
# "-provider" (which is invalid). By placing selection args
# first we ensure the intended URL/selection token is parsed
# as the positional URL and avoid this class of parsing errors.
expanded_stage: List[str] = cmd_list + selected_row_args + source_args
if first_stage_had_extra_args and stages:
expanded_stage += stages[0]
stages[0] = expanded_stage
else:
stages.insert(0, expanded_stage)
2025-12-20 02:12:45 -08:00
if pipeline_session and worker_manager:
try:
worker_manager.log_step(
pipeline_session.worker_id,
f"@N expansion: {source_cmd} + selected_args={selected_row_args} + source_args={source_args}",
)
except Exception:
pass
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
stage_table = None
try:
stage_table = ctx.get_current_stage_table()
except Exception:
stage_table = None
2025-12-25 04:49:22 -08:00
display_table = None
try:
2025-12-29 17:05:03 -08:00
display_table = (
ctx.get_display_table() if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
except Exception:
display_table = None
if not stage_table and display_table is not None:
stage_table = display_table
2025-12-22 02:11:53 -08:00
if not stage_table:
try:
stage_table = ctx.get_last_result_table()
except Exception:
stage_table = None
2025-12-20 02:12:45 -08:00
2025-12-25 04:49:22 -08:00
# Prefer selecting from the last selectable *table* (search/playlist)
# rather than from display-only emitted items, unless we're explicitly
# selecting from an overlay table.
try:
if display_table is not None and stage_table is display_table:
items_list = ctx.get_last_result_items() or []
else:
if hasattr(ctx, "get_last_selectable_result_items"):
items_list = ctx.get_last_selectable_result_items() or []
else:
items_list = ctx.get_last_result_items() or []
except Exception:
items_list = []
resolved_items = items_list if items_list else []
if items_list:
2025-12-29 17:05:03 -08:00
filtered = [
resolved_items[i] for i in selection_indices
if 0 <= i < len(resolved_items)
2025-12-29 17:05:03 -08:00
]
2025-12-22 02:11:53 -08:00
if not filtered:
print("No items matched selection in pipeline\n")
return False, None
2025-12-20 02:12:45 -08:00
2026-01-03 21:23:55 -08:00
# Provider selection expansion (non-terminal): allow certain provider tables
# (e.g. hifi.album) to expand to multiple downstream items when the user
# pipes into another stage (e.g. @N | .mpv or @N | add-file).
table_type_hint = None
try:
table_type_hint = (
stage_table.table
if stage_table is not None and hasattr(stage_table, "table")
else None
)
except Exception:
table_type_hint = None
if stages and isinstance(table_type_hint, str) and table_type_hint.strip().lower() == "hifi.album":
try:
from ProviderCore.registry import get_provider
prov = get_provider("hifi", config)
except Exception:
prov = None
if prov is not None and hasattr(prov, "_extract_album_selection_context") and hasattr(prov, "_tracks_for_album"):
try:
album_contexts = prov._extract_album_selection_context(filtered) # type: ignore[attr-defined]
except Exception:
album_contexts = []
track_items: List[Any] = []
seen_track_ids: set[int] = set()
for album_id, album_title, artist_name in album_contexts or []:
try:
track_results = prov._tracks_for_album( # type: ignore[attr-defined]
album_id=album_id,
album_title=album_title,
artist_name=artist_name,
limit=500,
)
except Exception:
track_results = []
for tr in track_results or []:
try:
md = getattr(tr, "full_metadata", None)
tid = None
if isinstance(md, dict):
raw_id = md.get("trackId") or md.get("id")
try:
tid = int(raw_id) if raw_id is not None else None
except Exception:
tid = None
if tid is not None:
if tid in seen_track_ids:
continue
seen_track_ids.add(tid)
except Exception:
pass
track_items.append(tr)
if track_items:
filtered = track_items
table_type_hint = "hifi.track"
2025-12-29 17:05:03 -08:00
if PipelineExecutor._maybe_run_class_selector(
ctx,
config,
filtered,
stage_is_last=(not stages)):
2025-12-22 02:11:53 -08:00
return False, None
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
from cmdlet._shared import coerce_to_pipe_object
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
filtered_pipe_objs = [coerce_to_pipe_object(item) for item in filtered]
2025-12-29 17:05:03 -08:00
piped_result = (
filtered_pipe_objs
if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
2025-12-22 02:11:53 -08:00
if pipeline_session and worker_manager:
2025-12-20 02:12:45 -08:00
try:
2025-12-22 02:11:53 -08:00
selection_parts = [f"@{i+1}" for i in selection_indices]
worker_manager.log_step(
pipeline_session.worker_id,
f"Applied @N selection {' | '.join(selection_parts)}",
)
2025-12-20 02:12:45 -08:00
except Exception:
pass
2025-12-22 02:11:53 -08:00
# Auto-insert downloader stages for provider tables.
try:
2025-12-31 05:17:37 -08:00
current_table = ctx.get_current_stage_table()
if current_table is None and hasattr(ctx, "get_display_table"):
current_table = ctx.get_display_table()
if current_table is None:
current_table = ctx.get_last_result_table()
2025-12-22 02:11:53 -08:00
except Exception:
current_table = None
2026-01-03 21:23:55 -08:00
table_type = None
try:
if isinstance(table_type_hint, str) and table_type_hint.strip():
table_type = table_type_hint
else:
table_type = (
current_table.table
if current_table and hasattr(current_table, "table") else None
)
except Exception:
table_type = (
current_table.table
if current_table and hasattr(current_table, "table") else None
)
2025-12-22 02:11:53 -08:00
2026-01-03 03:37:48 -08:00
def _norm_cmd(name: Any) -> str:
return str(name or "").replace("_", "-").strip().lower()
auto_stage = None
if isinstance(table_type, str) and table_type:
try:
from ProviderCore.registry import selection_auto_stage_for_table
auto_stage = selection_auto_stage_for_table(table_type)
except Exception:
auto_stage = None
def _apply_row_action_to_stage(stage_idx: int) -> bool:
if not selection_indices or len(selection_indices) != 1:
return False
try:
row_action = ctx.get_current_stage_table_row_selection_action(
selection_indices[0]
)
except Exception:
row_action = None
if not row_action:
# Fallback to serialized payload when the table row is unavailable
try:
items = ctx.get_last_result_items() or []
if 0 <= selection_indices[0] < len(items):
maybe = items[selection_indices[0]]
if isinstance(maybe, dict):
candidate = maybe.get("_selection_action")
if isinstance(candidate, (list, tuple)):
row_action = [str(x) for x in candidate if x is not None]
debug(f"@N row {selection_indices[0]} restored action from payload: {row_action}")
except Exception:
row_action = row_action or None
if not row_action:
debug(f"@N row {selection_indices[0]} has no selection_action")
return False
normalized = [str(x) for x in row_action if x is not None]
if not normalized:
return False
debug(f"Applying row action for row {selection_indices[0]} -> {normalized}")
if 0 <= stage_idx < len(stages):
debug(f"Replacing stage {stage_idx} {stages[stage_idx]} with row action {normalized}")
stages[stage_idx] = normalized
return True
return False
2025-12-22 02:11:53 -08:00
if not stages:
2026-01-03 03:37:48 -08:00
if isinstance(table_type, str) and table_type.startswith("metadata."):
2025-12-31 05:17:37 -08:00
print("Auto-applying metadata selection via get-tag")
stages.append(["get-tag"])
2026-01-03 03:37:48 -08:00
elif auto_stage:
try:
print(f"Auto-running selection via {auto_stage[0]}")
except Exception:
pass
# Append the auto stage now. If the user also provided a selection
# (e.g., @1 | add-file ...), we want to attach the row selection
# args *to the auto-inserted stage* so the download command receives
# the selected row information immediately.
stages.append(list(auto_stage) + (source_args or []))
debug(f"Inserted auto stage before row action: {stages[-1]}")
# If the caller included a selection (e.g., @1) try to attach
# the selection args immediately to the inserted auto stage so
# the expansion is effective in a single pass.
if selection_indices:
try:
if not _apply_row_action_to_stage(len(stages) - 1):
# Only support single-row selection for auto-attach here
if len(selection_indices) == 1:
idx = selection_indices[0]
row_args = ctx.get_current_stage_table_row_selection_args(idx)
if not row_args:
try:
items = ctx.get_last_result_items() or []
if 0 <= idx < len(items):
maybe = items[idx]
if isinstance(maybe, dict):
candidate = maybe.get("_selection_args")
if isinstance(candidate, (list, tuple)):
row_args = [str(x) for x in candidate if x is not None]
except Exception:
row_args = row_args or None
if row_args:
# Place selection args before any existing source args
inserted = stages[-1]
if inserted:
cmd = inserted[0]
tail = [str(x) for x in inserted[1:]]
stages[-1] = [cmd] + [str(x) for x in row_args] + tail
except Exception:
pass
2025-12-22 02:11:53 -08:00
else:
first_cmd = stages[0][0] if stages and stages[0] else None
2025-12-31 05:17:37 -08:00
if isinstance(table_type, str) and table_type.startswith("metadata.") and first_cmd not in (
"get-tag",
"get_tag",
".pipe",
2026-01-03 03:37:48 -08:00
".mpv",
2025-12-31 05:17:37 -08:00
):
print("Auto-inserting get-tag after metadata selection")
stages.insert(0, ["get-tag"])
2026-01-03 03:37:48 -08:00
elif auto_stage:
first_cmd_norm = _norm_cmd(first_cmd)
auto_cmd_norm = _norm_cmd(auto_stage[0])
if first_cmd_norm not in (auto_cmd_norm, ".pipe", ".mpv"):
debug(f"Auto-inserting {auto_cmd_norm} after selection")
# Insert the auto stage before the user-specified stage
stages.insert(0, list(auto_stage) + (source_args or []))
debug(f"Inserted auto stage before existing pipeline: {stages[0]}")
# If a selection is present, attach the row selection args to the
# newly-inserted stage so the download stage runs with the
# selected row information.
if selection_indices:
try:
if not _apply_row_action_to_stage(0):
if len(selection_indices) == 1:
idx = selection_indices[0]
row_args = ctx.get_current_stage_table_row_selection_args(idx)
if not row_args:
try:
items = ctx.get_last_result_items() or []
if 0 <= idx < len(items):
maybe = items[idx]
if isinstance(maybe, dict):
candidate = maybe.get("_selection_args")
if isinstance(candidate, (list, tuple)):
row_args = [str(x) for x in candidate if x is not None]
except Exception:
row_args = row_args or None
if row_args:
inserted = stages[0]
if inserted:
cmd = inserted[0]
tail = [str(x) for x in inserted[1:]]
stages[0] = [cmd] + [str(x) for x in row_args] + tail
except Exception:
pass
2025-12-20 02:12:45 -08:00
# After inserting/appending an auto-stage, continue processing so later
# selection-expansion logic can still run (e.g., for example selectors).
2025-12-22 02:11:53 -08:00
return True, piped_result
else:
print("No previous results to select from\n")
return False, None
2025-12-21 05:10:09 -08:00
2025-12-22 02:11:53 -08:00
return True, None
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
@staticmethod
def _maybe_start_live_progress(config: Any,
stages: List[List[str]]) -> tuple[Any,
Dict[int,
int]]:
2025-12-22 02:11:53 -08:00
progress_ui = None
pipe_index_by_stage: Dict[int,
int] = {}
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
try:
2025-12-29 17:05:03 -08:00
quiet_mode = (
bool(config.get("_quiet_background_output"))
if isinstance(config,
dict) else False
2025-12-29 17:05:03 -08:00
)
2025-12-22 02:11:53 -08:00
except Exception:
quiet_mode = False
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
try:
import sys as _sys
2025-12-20 02:12:45 -08:00
if (not quiet_mode) and bool(getattr(_sys.stderr,
"isatty", lambda: False)()):
from SYS.models import PipelineLiveProgress
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
pipe_stage_indices: List[int] = []
pipe_labels: List[str] = []
for idx, stage_tokens in enumerate(stages):
if not stage_tokens:
continue
name = str(stage_tokens[0]).replace("_", "-").lower()
if name == "@" or name.startswith("@"):
continue
2025-12-27 06:05:07 -08:00
# add-file directory selector stage: avoid Live progress so the
# selection table renders cleanly.
if name in {"add-file",
"add_file"}:
2025-12-27 06:05:07 -08:00
try:
from pathlib import Path as _Path
toks = list(stage_tokens[1:])
i = 0
while i < len(toks):
t = str(toks[i])
low = t.lower().strip()
if low in {"-path",
"--path",
"-p"} and i + 1 < len(toks):
2025-12-27 06:05:07 -08:00
nxt = str(toks[i + 1])
if nxt and ("," not in nxt):
p = _Path(nxt)
if p.exists() and p.is_dir():
name = "" # mark as skipped
break
i += 2
continue
i += 1
except Exception:
pass
if not name:
continue
2025-12-23 16:36:39 -08:00
# Display-only: avoid Live progress for relationship viewing.
# This keeps `@1 | get-relationship` clean and prevents progress UI
# from interfering with Rich tables/panels.
if name in {"get-relationship",
"get-rel"}:
2025-12-23 16:36:39 -08:00
continue
2025-12-22 02:11:53 -08:00
# `.pipe` (MPV) is an interactive launcher; disable pipeline Live progress
# for it because it doesn't meaningfully "complete" (mpv may keep running)
# and Live output interferes with MPV playlist UI.
2026-01-03 03:37:48 -08:00
if name in {".pipe", ".mpv"}:
2025-12-22 02:11:53 -08:00
continue
# `.matrix` uses a two-phase picker (@N then .matrix -send). Pipeline Live
# progress can linger across those phases and interfere with interactive output.
if name == ".matrix":
continue
2025-12-26 18:58:48 -08:00
# `delete-file` prints a Rich table directly; Live progress interferes and
# can truncate/overwrite the output.
if name in {"delete-file",
"del-file"}:
2025-12-26 18:58:48 -08:00
continue
2025-12-22 02:11:53 -08:00
pipe_stage_indices.append(idx)
pipe_labels.append(name)
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
if pipe_labels:
progress_ui = PipelineLiveProgress(pipe_labels, enabled=True)
progress_ui.start()
try:
from SYS import pipeline as _pipeline_ctx
2025-12-29 17:05:03 -08:00
2025-12-22 02:11:53 -08:00
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(progress_ui)
except Exception:
pass
2025-12-29 17:05:03 -08:00
pipe_index_by_stage = {
stage_idx: pipe_idx
for pipe_idx, stage_idx in enumerate(pipe_stage_indices)
2025-12-29 17:05:03 -08:00
}
2025-12-22 02:11:53 -08:00
except Exception:
progress_ui = None
pipe_index_by_stage = {}
2025-12-20 02:12:45 -08:00
2025-12-22 02:11:53 -08:00
return progress_ui, pipe_index_by_stage
2025-12-21 05:10:09 -08:00
2025-12-22 02:11:53 -08:00
def execute_tokens(self, tokens: List[str]) -> None:
from cmdlet import REGISTRY
from SYS import pipeline as ctx
2025-12-21 05:10:09 -08:00
2025-12-22 02:11:53 -08:00
try:
self._try_clear_pipeline_stop(ctx)
2025-12-25 04:49:22 -08:00
2025-12-27 14:50:59 -08:00
# REPL guard: stage-local tables should not persist across independent
# commands. Selection stages can always seed from last/display tables.
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
2025-12-25 04:49:22 -08:00
# Preflight (URL-duplicate prompts, etc.) should be cached within a single
# pipeline run, not across independent pipelines.
try:
ctx.store_value("preflight",
{})
2025-12-25 04:49:22 -08:00
except Exception:
pass
2025-12-22 02:11:53 -08:00
stages = self._split_stages(tokens)
if not stages:
print("Invalid pipeline syntax\n")
return
self._maybe_seed_current_stage_table(ctx)
stages = self._maybe_apply_pending_pipeline_tail(ctx, stages)
config = self._config_loader.load()
config = self._apply_quiet_background_flag(config)
2025-12-21 05:10:09 -08:00
2025-12-29 17:05:03 -08:00
(
stages,
first_stage_selection_indices,
first_stage_had_extra_args,
first_stage_select_all,
) = self._extract_first_stage_selection_tokens(stages)
2025-12-22 02:11:53 -08:00
first_stage_selection_indices = self._apply_select_all_if_requested(
ctx,
first_stage_selection_indices,
first_stage_select_all
2025-12-22 02:11:53 -08:00
)
2025-12-21 05:10:09 -08:00
2025-12-22 02:11:53 -08:00
piped_result: Any = None
worker_manager = WorkerManagerRegistry.ensure(config)
pipeline_text = " | ".join(" ".join(stage) for stage in stages)
2025-12-29 17:05:03 -08:00
pipeline_session = WorkerStages.begin_pipeline(
worker_manager,
pipeline_text=pipeline_text,
config=config
2025-12-29 17:05:03 -08:00
)
2026-01-01 20:37:27 -08:00
if pipeline_session and worker_manager:
self._log_pipeline_event(
worker_manager,
pipeline_session.worker_id,
f"Pipeline start: {pipeline_text or '(empty pipeline)'}",
)
2025-12-22 02:11:53 -08:00
raw_stage_texts = self._get_raw_stage_texts(ctx)
self._maybe_enable_background_notifier(
worker_manager,
config,
pipeline_session
)
2025-12-22 02:11:53 -08:00
pipeline_status = "completed"
pipeline_error = ""
progress_ui = None
pipe_index_by_stage: Dict[int,
int] = {}
2025-12-22 02:11:53 -08:00
try:
ok, initial_piped = self._maybe_apply_initial_selection(
ctx,
config,
stages,
selection_indices=first_stage_selection_indices,
first_stage_had_extra_args=first_stage_had_extra_args,
worker_manager=worker_manager,
pipeline_session=pipeline_session,
)
if not ok:
return
if initial_piped is not None:
piped_result = initial_piped
2026-01-01 20:37:27 -08:00
# REPL guard: prevent add-relationship before add-file for download-file pipelines.
if not self._validate_download_file_relationship_order(stages):
2025-12-23 16:36:39 -08:00
pipeline_status = "failed"
pipeline_error = "Invalid pipeline order"
return
2025-12-22 02:11:53 -08:00
# ------------------------------------------------------------------
# Multi-level pipeline progress (pipes = stages, tasks = items)
# ------------------------------------------------------------------
progress_ui, pipe_index_by_stage = self._maybe_start_live_progress(config, stages)
2025-12-21 05:10:09 -08:00
2025-12-20 02:12:45 -08:00
for stage_index, stage_tokens in enumerate(stages):
if not stage_tokens:
continue
2025-12-30 04:47:13 -08:00
raw_stage_name = str(stage_tokens[0])
cmd_name = raw_stage_name.replace("_", "-").lower()
2025-12-20 02:12:45 -08:00
stage_args = stage_tokens[1:]
if cmd_name == "@":
2025-12-25 04:49:22 -08:00
# Prefer piping the last emitted/visible items (e.g. add-file results)
# over the result-table subject. The subject can refer to older context
# (e.g. a playlist row) and may not contain store+hash.
last_items = None
try:
last_items = ctx.get_last_result_items()
except Exception:
last_items = None
if last_items:
from cmdlet._shared import coerce_to_pipe_object
try:
pipe_items = [
coerce_to_pipe_object(x) for x in list(last_items)
]
2025-12-25 04:49:22 -08:00
except Exception:
pipe_items = list(last_items)
piped_result = pipe_items if len(pipe_items
) > 1 else pipe_items[0]
2025-12-25 04:49:22 -08:00
try:
ctx.set_last_items(pipe_items)
except Exception:
pass
if pipeline_session and worker_manager:
try:
2025-12-29 17:05:03 -08:00
worker_manager.log_step(
pipeline_session.worker_id,
"@ used last result items"
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
except Exception:
pass
continue
2025-12-20 02:12:45 -08:00
subject = ctx.get_last_result_subject()
if subject is None:
print("No current result context available for '@'\n")
pipeline_status = "failed"
2025-12-25 04:49:22 -08:00
pipeline_error = "No result items/subject for @"
2025-12-20 02:12:45 -08:00
return
piped_result = subject
try:
subject_items = subject if isinstance(subject,
list) else [subject]
2025-12-20 02:12:45 -08:00
ctx.set_last_items(subject_items)
except Exception:
pass
if pipeline_session and worker_manager:
try:
2025-12-29 17:05:03 -08:00
worker_manager.log_step(
pipeline_session.worker_id,
"@ used current table subject"
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
pass
continue
if cmd_name.startswith("@"): # selection stage
2025-12-30 04:47:13 -08:00
selection_token = raw_stage_name
selection = SelectionSyntax.parse(selection_token)
filter_spec = SelectionFilterSyntax.parse(selection_token)
is_select_all = selection_token.strip() == "@*"
if selection is None and filter_spec is None and not is_select_all:
print(f"Invalid selection: {selection_token}\n")
2025-12-20 02:12:45 -08:00
pipeline_status = "failed"
2025-12-30 04:47:13 -08:00
pipeline_error = f"Invalid selection {selection_token}"
2025-12-20 02:12:45 -08:00
return
selected_indices = []
2025-12-25 04:49:22 -08:00
# Prefer selecting from the last selectable *table* (search/playlist)
# rather than from display-only emitted items, unless we're explicitly
# selecting from an overlay table.
display_table = None
try:
2025-12-29 17:05:03 -08:00
display_table = (
ctx.get_display_table()
if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
except Exception:
display_table = None
2025-12-20 02:12:45 -08:00
stage_table = ctx.get_current_stage_table()
2026-01-03 21:23:55 -08:00
# Selection should operate on the table the user sees.
# If a display overlay table exists, force it as the current-stage table
# so provider selectors (e.g. hifi.album -> tracks) behave consistently.
try:
if display_table is not None and hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(display_table)
stage_table = display_table
except Exception:
pass
2025-12-25 04:49:22 -08:00
if not stage_table and display_table is not None:
stage_table = display_table
2025-12-20 02:12:45 -08:00
if not stage_table:
stage_table = ctx.get_last_result_table()
2025-12-25 04:49:22 -08:00
2026-01-03 21:23:55 -08:00
try:
if hasattr(ctx, "debug_table_state"):
ctx.debug_table_state(f"selection {selection_token}")
except Exception:
pass
2025-12-25 04:49:22 -08:00
if display_table is not None and stage_table is display_table:
items_list = ctx.get_last_result_items() or []
else:
if hasattr(ctx, "get_last_selectable_result_items"):
items_list = ctx.get_last_selectable_result_items(
) or []
2025-12-25 04:49:22 -08:00
else:
items_list = ctx.get_last_result_items() or []
if is_select_all:
selected_indices = list(range(len(items_list)))
2025-12-30 04:47:13 -08:00
elif filter_spec is not None:
selected_indices = [
i for i, item in enumerate(items_list)
if SelectionFilterSyntax.matches(item, filter_spec)
]
2025-12-25 04:49:22 -08:00
else:
selected_indices = sorted(
[i - 1 for i in selection]
) # type: ignore[arg-type]
2025-12-25 04:49:22 -08:00
2025-12-22 02:11:53 -08:00
resolved_items = items_list if items_list else []
2025-12-29 17:05:03 -08:00
filtered = [
resolved_items[i] for i in selected_indices
2025-12-29 17:05:03 -08:00
if 0 <= i < len(resolved_items)
]
2025-12-20 02:12:45 -08:00
if not filtered:
print("No items matched selection\n")
pipeline_status = "failed"
pipeline_error = "Empty selection"
return
2025-12-30 04:47:13 -08:00
# Filter UX: if the stage token is a filter and it's terminal,
# render a filtered table overlay rather than selecting/auto-downloading.
stage_is_last = (stage_index + 1 >= len(stages))
if filter_spec is not None and stage_is_last:
try:
base_table = stage_table
if base_table is None:
base_table = ctx.get_last_result_table()
if base_table is not None and hasattr(base_table, "copy_with_title"):
new_table = base_table.copy_with_title(getattr(base_table, "title", "") or "Results")
else:
new_table = ResultTable(getattr(base_table, "title", "") if base_table is not None else "Results")
try:
if base_table is not None and getattr(base_table, "table", None):
new_table.set_table(str(getattr(base_table, "table")))
except Exception:
pass
try:
# Attach a one-line header so users see the active filter.
safe = str(selection_token)[1:].strip()
new_table.set_header_line(f'filter: "{safe}"')
except Exception:
pass
for item in filtered:
new_table.add_result(item)
try:
ctx.set_last_result_table_overlay(new_table, items=list(filtered), subject=ctx.get_last_result_subject())
except Exception:
pass
try:
stdout_console().print()
stdout_console().print(new_table)
except Exception:
pass
except Exception:
pass
continue
2025-12-25 04:49:22 -08:00
# UX: selecting a single URL row from get-url tables should open it.
# Only do this when the selection stage is terminal to avoid surprising
# side-effects in pipelines like `@1 | download-file`.
current_table = ctx.get_current_stage_table(
) or ctx.get_last_result_table()
2025-12-25 04:49:22 -08:00
if (not is_select_all) and (len(filtered) == 1):
try:
PipelineExecutor._maybe_open_url_selection(
current_table,
filtered,
stage_is_last=(stage_index + 1 >= len(stages)),
)
except Exception:
pass
2025-12-29 17:05:03 -08:00
if PipelineExecutor._maybe_run_class_selector(
2025-12-30 04:47:13 -08:00
ctx,
config,
filtered,
stage_is_last=(stage_index + 1 >= len(stages))):
2025-12-20 02:12:45 -08:00
return
# Special case: selecting multiple tags from get-tag and piping into delete-tag
# should batch into a single operation (one backend call).
next_cmd = None
try:
if stage_index + 1 < len(stages) and stages[stage_index +
1]:
next_cmd = str(stages[stage_index + 1][0]
).replace("_",
"-").lower()
2025-12-20 02:12:45 -08:00
except Exception:
next_cmd = None
def _is_tag_row(obj: Any) -> bool:
try:
if (hasattr(obj,
"__class__")
and obj.__class__.__name__ == "TagItem"
and hasattr(obj,
"tag_name")):
2025-12-20 02:12:45 -08:00
return True
except Exception:
pass
try:
if isinstance(obj, dict) and obj.get("tag_name"):
return True
except Exception:
pass
return False
if (next_cmd in {"delete-tag",
"delete_tag"} and len(filtered) > 1
and all(_is_tag_row(x) for x in filtered)):
2025-12-20 02:12:45 -08:00
from cmdlet._shared import get_field
tags: List[str] = []
first_hash = None
first_store = None
first_path = None
for item in filtered:
tag_name = get_field(item, "tag_name")
if tag_name:
tags.append(str(tag_name))
if first_hash is None:
first_hash = get_field(item, "hash")
if first_store is None:
first_store = get_field(item, "store")
if first_path is None:
first_path = get_field(item,
"path") or get_field(
item,
"target"
)
2025-12-20 02:12:45 -08:00
if tags:
grouped = {
"table": "tag.selection",
"media_kind": "tag",
"hash": first_hash,
"store": first_store,
"path": first_path,
"tag": tags,
}
piped_result = grouped
continue
from cmdlet._shared import coerce_to_pipe_object
filtered_pipe_objs = [
coerce_to_pipe_object(item) for item in filtered
]
2025-12-29 17:05:03 -08:00
piped_result = (
filtered_pipe_objs
if len(filtered_pipe_objs) > 1 else filtered_pipe_objs[0]
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
current_table = ctx.get_current_stage_table(
) or ctx.get_last_result_table()
2025-12-29 17:05:03 -08:00
table_type = (
current_table.table
if current_table and hasattr(current_table,
"table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-30 04:47:13 -08:00
def _norm_stage_cmd(name: Any) -> str:
return str(name or "").replace("_", "-").strip().lower()
next_cmd = None
if stage_index + 1 < len(stages) and stages[stage_index + 1]:
next_cmd = _norm_stage_cmd(stages[stage_index + 1][0])
2026-01-03 03:37:48 -08:00
auto_stage = None
if isinstance(table_type, str) and table_type:
try:
from ProviderCore.registry import selection_auto_stage_for_table
# Preserve historical behavior: only forward selection-stage args
# to the auto stage when we are appending a new last stage.
at_end = bool(stage_index + 1 >= len(stages))
auto_stage = selection_auto_stage_for_table(
table_type,
stage_args if at_end else None,
)
except Exception:
auto_stage = None
2025-12-30 04:47:13 -08:00
# Auto-insert downloader stages for provider tables.
# IMPORTANT: do not auto-download for filter selections; they may match many rows.
if filter_spec is None:
if stage_index + 1 >= len(stages):
2026-01-03 03:37:48 -08:00
if auto_stage:
try:
print(f"Auto-running selection via {auto_stage[0]}")
except Exception:
pass
stages.append(list(auto_stage))
2025-12-30 04:47:13 -08:00
else:
2026-01-03 03:37:48 -08:00
if auto_stage:
auto_cmd = _norm_stage_cmd(auto_stage[0])
if next_cmd not in (auto_cmd, ".pipe", ".mpv"):
debug(f"Auto-inserting {auto_cmd} after selection")
stages.insert(stage_index + 1, list(auto_stage))
2025-12-20 02:12:45 -08:00
continue
cmd_fn = REGISTRY.get(cmd_name)
2026-01-01 20:37:27 -08:00
if not cmd_fn:
try:
mod = import_cmd_module(cmd_name)
data = getattr(mod, "CMDLET", None) if mod else None
if data and hasattr(data, "exec") and callable(getattr(data, "exec")):
run_fn = getattr(data, "exec")
REGISTRY[cmd_name] = run_fn
cmd_fn = run_fn
except Exception:
cmd_fn = None
2025-12-20 02:12:45 -08:00
if not cmd_fn:
print(f"Unknown command: {cmd_name}\n")
pipeline_status = "failed"
pipeline_error = f"Unknown command: {cmd_name}"
return
stage_session = WorkerStages.begin_stage(
worker_manager,
cmd_name=cmd_name,
stage_tokens=stage_tokens,
config=config,
command_text=" ".join(stage_tokens).strip(),
)
stage_worker_id = stage_session.worker_id if stage_session else None
2026-01-01 20:37:27 -08:00
stage_summary = self._summarize_stage_text(stage_tokens)
if pipeline_session and worker_manager:
summary_text = stage_summary or cmd_name
self._log_pipeline_event(
worker_manager,
pipeline_session.worker_id,
f"Stage {stage_index + 1}/{len(stages)} start: {summary_text}",
)
2025-12-21 05:10:09 -08:00
# Estimate how many per-item tasks this pipe will run.
pipe_idx = pipe_index_by_stage.get(stage_index)
if progress_ui is not None and pipe_idx is not None:
try:
# Prefer piped input for task counts.
if isinstance(piped_result, list):
total_items = len(piped_result)
preview_items: Optional[List[Any]] = list(piped_result)
elif piped_result is not None:
total_items = 1
preview_items = [piped_result]
else:
# First stage without piped input: infer from URL-ish args.
preview: List[Any] = []
toks = list(stage_tokens[1:])
i = 0
while i < len(toks):
t = str(toks[i])
low = t.lower().strip()
if (cmd_name == "add-file" and low in {"-path",
"--path",
"-p"}
and i + 1 < len(toks)):
2025-12-27 06:05:07 -08:00
nxt = str(toks[i + 1])
if nxt:
if "," in nxt:
2025-12-29 17:05:03 -08:00
parts = [
p.strip().strip("\"'")
for p in nxt.split(",")
2025-12-29 17:05:03 -08:00
]
2025-12-27 06:05:07 -08:00
parts = [p for p in parts if p]
if parts:
preview.extend(parts)
i += 2
continue
else:
preview.append(nxt)
i += 2
continue
if low in {"-url",
"--url"} and i + 1 < len(toks):
2025-12-21 05:10:09 -08:00
nxt = str(toks[i + 1])
if nxt and not nxt.startswith("-"):
preview.append(nxt)
i += 2
continue
if (not t.startswith("-")) and ("://" in low
or low.startswith(
("magnet:",
"torrent:"))):
2025-12-21 05:10:09 -08:00
preview.append(t)
i += 1
preview_items = preview if preview else None
total_items = len(preview) if preview else 1
2025-12-29 17:05:03 -08:00
progress_ui.begin_pipe(
pipe_idx,
total_items=int(total_items),
items_preview=preview_items
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
pass
on_emit = None
if progress_ui is not None and pipe_idx is not None:
2025-12-22 02:11:53 -08:00
_ui = cast(Any, progress_ui)
def _on_emit(
obj: Any,
_idx: int = int(pipe_idx),
_progress=_ui
) -> None:
2025-12-21 05:10:09 -08:00
try:
2025-12-22 02:11:53 -08:00
_progress.on_emit(_idx, obj)
2025-12-21 05:10:09 -08:00
except Exception:
pass
2025-12-29 17:05:03 -08:00
2025-12-21 05:10:09 -08:00
on_emit = _on_emit
2025-12-20 02:12:45 -08:00
pipeline_ctx = ctx.PipelineStageContext(
stage_index=stage_index,
total_stages=len(stages),
2025-12-21 16:59:37 -08:00
pipe_index=pipe_idx,
2025-12-20 02:12:45 -08:00
worker_id=stage_worker_id,
2025-12-21 05:10:09 -08:00
on_emit=on_emit,
2025-12-20 02:12:45 -08:00
)
ctx.set_stage_context(pipeline_ctx)
stage_status = "completed"
stage_error = ""
stage_label = f"stage {stage_index + 1}/{len(stages)} ({cmd_name})"
try:
# Avoid leaking interactive selection tables across stages.
# (Selection/expansion happens before this loop, so clearing here is safe.)
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
2025-12-20 23:57:44 -08:00
try:
if hasattr(ctx, "set_current_cmdlet_name"):
ctx.set_current_cmdlet_name(cmd_name)
except Exception:
pass
try:
if hasattr(ctx, "set_current_stage_text"):
stage_text = ""
if raw_stage_texts and stage_index < len(raw_stage_texts
):
candidate = str(raw_stage_texts[stage_index]
or "").strip()
2025-12-20 23:57:44 -08:00
if candidate:
try:
cand_tokens = shlex.split(candidate)
except Exception:
cand_tokens = candidate.split()
if cand_tokens:
first = str(cand_tokens[0]
).replace("_",
"-").lower()
2025-12-20 23:57:44 -08:00
if first == cmd_name:
stage_text = candidate
if not stage_text:
stage_text = " ".join(stage_tokens).strip()
ctx.set_current_stage_text(stage_text)
except Exception:
pass
2026-01-03 03:37:48 -08:00
# `.pipe`/`.mpv` is typically the terminal interactive stage (MPV UI).
2025-12-21 16:59:37 -08:00
# Stop Live progress before running it so output doesn't get stuck behind Live.
2026-01-03 03:37:48 -08:00
if (cmd_name in {".pipe", ".mpv"} and progress_ui is not None
and (stage_index + 1 >= len(stages))):
2025-12-21 16:59:37 -08:00
try:
progress_ui.stop()
except Exception:
pass
try:
from SYS import pipeline as _pipeline_ctx
2025-12-29 17:05:03 -08:00
2025-12-21 16:59:37 -08:00
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
except Exception:
pass
progress_ui = None
2025-12-20 02:12:45 -08:00
ret_code = cmd_fn(piped_result, list(stage_args), config)
stage_is_last = stage_index + 1 >= len(stages)
2025-12-21 05:10:09 -08:00
# Graceful early-stop: preflight declined, etc.
try:
2025-12-29 17:05:03 -08:00
stop_req = (
ctx.get_pipeline_stop()
if hasattr(ctx,
"get_pipeline_stop") else None
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
stop_req = None
if stop_req is not None:
# Do not treat as an error; just end the pipeline quietly.
pipeline_status = "completed"
pipeline_error = ""
return
2025-12-20 02:12:45 -08:00
emits: List[Any] = []
if getattr(pipeline_ctx, "emits", None) is not None:
emits = list(pipeline_ctx.emits or [])
2025-12-23 16:36:39 -08:00
# Shared `-path` behavior: persist temp/PATH artifacts to destination.
if emits:
try:
from cmdlet import _shared as sh
emits = sh.apply_output_path_from_pipeobjects(
cmd_name=cmd_name,
args=list(stage_args),
emits=emits,
)
try:
pipeline_ctx.emits = list(emits)
except Exception:
pass
except Exception:
pass
2025-12-20 02:12:45 -08:00
if emits:
# If the cmdlet already installed an overlay table (e.g. get-tag),
# don't overwrite it: set_last_result_items_only() would clear the
# overlay table/subject and break '@' subject piping.
try:
2025-12-29 17:05:03 -08:00
has_overlay = (
bool(ctx.get_display_table())
if hasattr(ctx,
"get_display_table") else False
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
has_overlay = False
if not has_overlay:
ctx.set_last_result_items_only(emits)
piped_result = emits
else:
piped_result = None
2026-01-01 20:37:27 -08:00
# Some cmdlets (notably download-file format selection) populate a selectable
2025-12-20 02:12:45 -08:00
# current-stage table without emitting pipeline items. In these cases, render
# the table and pause the pipeline so the user can pick @N.
2025-12-29 17:05:03 -08:00
stage_table = (
ctx.get_current_stage_table()
if hasattr(ctx,
"get_current_stage_table") else None
2025-12-29 17:05:03 -08:00
)
stage_table_type = (
str(getattr(stage_table,
"table",
"") or "").strip().lower()
if stage_table else ""
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
try:
2025-12-29 17:05:03 -08:00
stage_table_source = (
str(getattr(stage_table,
"source_command",
"") or "").strip().replace("_",
"-").lower()
if stage_table else ""
2025-12-29 17:05:03 -08:00
)
2025-12-25 04:49:22 -08:00
except Exception:
stage_table_source = ""
if ((not stage_is_last) and (not emits) and cmd_name in {
2026-01-01 20:37:27 -08:00
"download-file",
"download-data",
"download_data",
} and stage_table is not None
2026-01-01 20:37:27 -08:00
and (stage_table_type in {
"ytdlp.formatlist",
"download-file",
"bandcamp",
"youtube",
} or stage_table_source in {"download-file"}
2026-01-03 03:37:48 -08:00
or stage_table_type in {"internetarchive.format", "internetarchive.formats"}
2026-01-01 20:37:27 -08:00
or stage_table_source in {"download-file"})):
2025-12-20 02:12:45 -08:00
try:
is_selectable = not bool(
getattr(stage_table,
"no_choice",
False)
)
2025-12-20 02:12:45 -08:00
except Exception:
is_selectable = True
if is_selectable:
try:
2025-12-29 17:05:03 -08:00
already_rendered = bool(
getattr(
stage_table,
"_rendered_by_cmdlet",
False
)
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
already_rendered = False
if not already_rendered:
2025-12-21 05:10:09 -08:00
# Stop the Live progress display before printing a selectable table.
# Printing while Live is active can cause the table to be truncated/overwritten.
if progress_ui is not None:
try:
if pipe_idx is not None:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=True
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
pass
try:
progress_ui.stop()
except Exception:
pass
try:
from SYS import pipeline as _pipeline_ctx
2025-12-29 17:05:03 -08:00
if hasattr(_pipeline_ctx,
"set_live_progress"):
2025-12-21 05:10:09 -08:00
_pipeline_ctx.set_live_progress(None)
except Exception:
pass
progress_ui = None
2025-12-20 23:57:44 -08:00
stdout_console().print()
stdout_console().print(stage_table)
2025-12-20 02:12:45 -08:00
2025-12-25 04:49:22 -08:00
# Always pause the pipeline when a selectable table was produced.
# The user will continue by running @N/@* which will re-attach the
# pending downstream stages.
2025-12-20 02:12:45 -08:00
try:
remaining = stages[stage_index + 1:]
2025-12-20 02:12:45 -08:00
source_cmd = (
ctx.get_current_stage_table_source_command()
if hasattr(
ctx,
"get_current_stage_table_source_command"
) else None
2025-12-20 02:12:45 -08:00
)
if remaining and hasattr(
ctx,
"set_pending_pipeline_tail"):
2025-12-29 17:05:03 -08:00
ctx.set_pending_pipeline_tail(
remaining,
source_command=source_cmd or cmd_name
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
pass
return
# For the final stage, many cmdlets rely on the runner to render the
# table they placed into pipeline context (e.g. get-tag). Prefer a
# display table if one exists, otherwise the current-stage table.
if stage_is_last:
2025-12-21 05:10:09 -08:00
# Stop the Live progress display before printing the final table.
# This avoids cursor-control interactions that can truncate output.
if progress_ui is not None:
try:
if pipe_idx is not None:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=(
stage_status == "completed"
),
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
pass
try:
progress_ui.stop()
except Exception:
pass
try:
from SYS import pipeline as _pipeline_ctx
2025-12-29 17:05:03 -08:00
2025-12-21 05:10:09 -08:00
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
except Exception:
pass
progress_ui = None
2025-12-20 02:12:45 -08:00
final_table = None
try:
2025-12-29 17:05:03 -08:00
final_table = (
ctx.get_display_table()
if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
final_table = None
if final_table is None:
final_table = stage_table
2025-12-21 05:10:09 -08:00
# If the cmdlet emitted results but didn't supply a fresh table, it's
# common for `stage_table` to still point at the previous stage's table
# (e.g. add-file's canonical store table). In that case, prefer rendering
# the emitted results so the user sees the actual output of this stage.
if (emits and (ctx.get_display_table() if hasattr(
ctx,
"get_display_table") else None) is None):
2025-12-21 05:10:09 -08:00
try:
2025-12-29 17:05:03 -08:00
src_cmd = (
str(
getattr(final_table,
"source_command",
"") or ""
).strip().lower() if final_table else ""
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
src_cmd = ""
try:
cur_cmd = str(cmd_name
or "").strip().replace("_",
"-").lower()
2025-12-21 05:10:09 -08:00
except Exception:
cur_cmd = ""
if ((final_table is None) or (not src_cmd)
or (src_cmd.replace("_",
"-") != cur_cmd)):
2025-12-21 05:10:09 -08:00
try:
2025-12-29 17:05:03 -08:00
table_title = CmdletExecutor._get_table_title_for_command(
cmd_name,
emits,
list(stage_args)
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
table_title = "Results"
table = ResultTable(table_title)
for item in emits:
table.add_result(item)
try:
if hasattr(ctx,
"set_last_result_table_overlay"):
ctx.set_last_result_table_overlay(
table,
emits
)
2025-12-21 05:10:09 -08:00
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(table)
except Exception:
pass
final_table = table
2025-12-20 23:57:44 -08:00
if final_table is not None:
2025-12-20 02:12:45 -08:00
try:
2025-12-29 17:05:03 -08:00
already_rendered = bool(
getattr(
final_table,
"_rendered_by_cmdlet",
False
)
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception:
already_rendered = False
if not already_rendered:
2025-12-20 23:57:44 -08:00
stdout_console().print()
stdout_console().print(final_table)
2025-12-20 02:12:45 -08:00
2025-12-21 05:10:09 -08:00
# (Fallback handled above by synthesizing an overlay ResultTable.)
2025-12-20 02:12:45 -08:00
if isinstance(ret_code, int) and ret_code != 0:
stage_status = "failed"
stage_error = f"exit code {ret_code}"
print(f"[{stage_label} exit code: {ret_code}]\n")
pipeline_status = "failed"
pipeline_error = f"{stage_label} failed ({stage_error})"
return
except Exception as exc:
stage_status = "failed"
stage_error = f"{type(exc).__name__}: {exc}"
print(f"[error in {stage_label}]: {stage_error}\n")
pipeline_status = "failed"
pipeline_error = f"{stage_label} error: {exc}"
return
finally:
2026-01-01 20:37:27 -08:00
if pipeline_session and worker_manager:
status_label = (
"completed" if stage_status == "completed" else "failed"
)
msg = f"{stage_label} {status_label}"
if stage_error and stage_status != "completed":
msg += f": {stage_error}"
self._log_pipeline_event(
worker_manager,
pipeline_session.worker_id,
msg,
)
2025-12-21 05:10:09 -08:00
if progress_ui is not None and pipe_idx is not None:
try:
2025-12-29 17:05:03 -08:00
progress_ui.finish_pipe(
int(pipe_idx),
force_complete=(stage_status == "completed")
2025-12-29 17:05:03 -08:00
)
2025-12-21 05:10:09 -08:00
except Exception:
pass
2025-12-20 23:57:44 -08:00
try:
if hasattr(ctx, "clear_current_cmdlet_name"):
ctx.clear_current_cmdlet_name()
except Exception:
pass
try:
if hasattr(ctx, "clear_current_stage_text"):
ctx.clear_current_stage_text()
except Exception:
pass
2025-12-20 02:12:45 -08:00
if stage_session:
stage_session.close(
status=stage_status,
error_msg=stage_error
)
2025-12-20 02:12:45 -08:00
elif pipeline_session and worker_manager:
try:
worker_manager.log_step(
pipeline_session.worker_id,
f"{stage_label} {'completed' if stage_status == 'completed' else 'failed'}",
)
except Exception:
pass
if not stages and piped_result is not None:
2025-12-31 05:17:37 -08:00
# Special-case: selecting metadata rows (e.g., get-tag -scrape) should
# immediately apply tags to the target item instead of just echoing a
# selection table.
try:
items = piped_result if isinstance(piped_result, list) else [piped_result]
applied_any = False
from cmdlet._shared import normalize_hash # type: ignore
from cmdlet.get_tag import _filter_scraped_tags, _emit_tags_as_table # type: ignore
from Store import Store # type: ignore
cfg_loader = ConfigLoader(root=Path.cwd())
config = cfg_loader.load()
for item in items:
if not isinstance(item, dict):
continue
provider = item.get("provider")
tags = item.get("tag")
if not provider or not isinstance(tags, list) or not tags:
continue
file_hash = normalize_hash(
item.get("hash")
or item.get("hash_hex")
or item.get("file_hash")
or item.get("sha256")
)
store_name = item.get("store") or item.get("storage")
subject_path = (
item.get("path")
or item.get("target")
or item.get("filename")
)
if str(provider).strip().lower() == "ytdlp":
apply_tags = [str(t) for t in tags if t is not None]
else:
apply_tags = _filter_scraped_tags([str(t) for t in tags if t is not None])
if not apply_tags:
continue
if store_name and file_hash:
try:
backend = Store(config)[str(store_name)]
backend.add_tag(file_hash, apply_tags, config=config)
try:
updated_tags, _src = backend.get_tag(file_hash, config=config)
except Exception:
updated_tags = apply_tags
_emit_tags_as_table(
tags_list=list(updated_tags or apply_tags),
file_hash=file_hash,
store=str(store_name),
service_name=None,
config=config,
item_title=str(item.get("title") or provider),
path=str(subject_path) if subject_path else None,
subject=item,
)
applied_any = True
continue
except Exception:
pass
# No store/hash: just emit the tags to the pipeline/view.
_emit_tags_as_table(
tags_list=list(apply_tags),
file_hash=file_hash,
store=str(store_name or "local"),
service_name=None,
config=config,
item_title=str(item.get("title") or provider),
path=str(subject_path) if subject_path else None,
subject=item,
)
applied_any = True
if applied_any:
# Selection handled; skip default selection echo.
return
except Exception:
# Fall back to default selection rendering on any failure.
pass
2026-01-02 02:28:59 -08:00
items = piped_result if isinstance(piped_result, list) else [piped_result]
# Special-case: selecting notes should show the text content directly.
note_like_items = [
i for i in items
if isinstance(i, dict) and ("note_text" in i or "note" in i)
]
if note_like_items:
for idx, item in enumerate(note_like_items, 1):
note_name = str(
item.get("note_name")
or item.get("name")
or f"note {idx}"
).strip()
note_text = str(item.get("note_text") or item.get("note") or "")
note_text = note_text[:999]
stdout_console().print()
stdout_console().print(f"{note_name}:\n{note_text}")
ctx.set_last_result_items_only(items)
return
2026-01-11 02:26:39 -08:00
# Special-case: selecting a single image should show it directly.
if len(items) == 1:
item = items[0]
# Try to get hash and store to resolve through the backend
file_hash = None
store_name = None
if isinstance(item, dict):
file_hash = item.get("hash")
store_name = item.get("store")
else:
if hasattr(item, "hash"):
file_hash = getattr(item, "hash", None)
if hasattr(item, "store"):
store_name = getattr(item, "store", None)
# Try to resolve the file through the Store backend if we have hash + store
resolved_file_path = None
if file_hash and store_name:
try:
from Store import Store
storage = Store(config=config or {})
backend = storage[str(store_name)]
# Call get_file to resolve the hash to an actual file path
maybe_path = backend.get_file(str(file_hash))
if isinstance(maybe_path, Path):
resolved_file_path = maybe_path
elif isinstance(maybe_path, str) and maybe_path:
# Only treat as a Path if it doesn't look like a URL
if not maybe_path.startswith(("http://", "https://")):
resolved_file_path = Path(maybe_path)
except Exception:
# Fallback: try using the path field from the item
pass
# If backend resolution failed, try the path field
if not resolved_file_path:
path_str = None
if isinstance(item, dict):
path_str = (
item.get("path")
or item.get("PATH")
or item.get("target")
or item.get("filename")
)
else:
# Try attributes for PipeObject/SearchResult/etc.
for attr in ("path", "PATH", "target", "filename"):
if hasattr(item, attr):
val = getattr(item, attr)
if val and isinstance(val, (str, Path)):
path_str = val
break
if path_str:
from SYS.utils import expand_path
resolved_file_path = expand_path(path_str).resolve()
# Now check if it's an image and render it
is_image = False
if resolved_file_path:
try:
if resolved_file_path.suffix.lower() in IMAGE_EXTENSIONS and resolved_file_path.exists():
# Use our image renderer
stdout_console().print()
render_image_to_console(resolved_file_path)
is_image = True
elif resolved_file_path.suffix.lower() in IMAGE_EXTENSIONS and not resolved_file_path.exists():
stdout_console().print(f"[yellow]Warning: Image file not found at {resolved_file_path}[/yellow]")
except Exception:
pass
# Render the comprehensive details panel for the item in either case
item_to_details = item if isinstance(item, dict) else (
item.to_dict() if hasattr(item, "to_dict") else vars(item)
)
# Ensure we include the resolved path if we found one
if resolved_file_path and "path" not in item_to_details:
item_to_details["path"] = str(resolved_file_path)
render_item_details_panel(item_to_details)
ctx.set_last_result_items_only(items)
return
2025-12-20 02:12:45 -08:00
table = ResultTable("Selection Result")
for item in items:
table.add_result(item)
ctx.set_last_result_items_only(items)
2025-12-20 23:57:44 -08:00
stdout_console().print()
stdout_console().print(table)
2025-12-20 02:12:45 -08:00
except Exception as exc:
pipeline_status = "failed"
pipeline_error = str(exc)
print(f"[error] Failed to execute pipeline: {exc}\n")
finally:
2025-12-21 05:10:09 -08:00
if progress_ui is not None:
try:
progress_ui.stop()
except Exception:
pass
try:
from SYS import pipeline as _pipeline_ctx
2025-12-29 17:05:03 -08:00
2025-12-21 05:10:09 -08:00
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
except Exception:
pass
2025-12-27 14:50:59 -08:00
# End-of-command cleanup: avoid leaking current stage tables into
# the next REPL command (causes stale @ selection sources).
try:
if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None)
except Exception:
pass
2026-01-01 20:37:27 -08:00
if pipeline_session and worker_manager:
final_msg = f"Pipeline {pipeline_status}"
if pipeline_error:
final_msg += f": {pipeline_error}"
else:
final_msg += " (ok)"
self._log_pipeline_event(
worker_manager,
pipeline_session.worker_id,
final_msg,
)
2025-12-20 02:12:45 -08:00
if pipeline_session:
pipeline_session.close(
status=pipeline_status,
error_msg=pipeline_error
)
2025-12-20 02:12:45 -08:00
except Exception as exc:
print(f"[error] Failed to execute pipeline: {exc}\n")
2025-12-21 05:10:09 -08:00
2025-12-20 23:57:44 -08:00
from rich.markdown import Markdown
from rich.console import Console
2025-12-20 02:12:45 -08:00
2025-12-20 23:57:44 -08:00
console = Console()
2025-12-23 16:36:39 -08:00
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
class MedeiaCLI:
"""Main CLI application object."""
ROOT = Path(__file__).resolve().parent
def __init__(self) -> None:
self._config_loader = ConfigLoader(root=self.ROOT)
2025-12-30 23:19:02 -08:00
# Optional dependency auto-install for configured tools (best-effort).
try:
from SYS.optional_deps import maybe_auto_install_configured_tools
maybe_auto_install_configured_tools(self._config_loader.load())
except Exception:
pass
2026-01-09 01:22:06 -08:00
# Initialize the store choices cache at startup (filters disabled stores)
try:
from cmdlet._shared import SharedArgs
config = self._config_loader.load()
SharedArgs._refresh_store_choices_cache(config)
except Exception:
pass
2025-12-20 02:12:45 -08:00
self._cmdlet_executor = CmdletExecutor(config_loader=self._config_loader)
self._pipeline_executor = PipelineExecutor(config_loader=self._config_loader)
@staticmethod
def parse_selection_syntax(token: str) -> Optional[Set[int]]:
return SelectionSyntax.parse(token)
@classmethod
def get_store_choices(cls) -> List[str]:
loader = ConfigLoader(root=cls.ROOT)
return CmdletIntrospection.store_choices(loader.load())
def build_app(self) -> typer.Typer:
app = typer.Typer(help="Medeia-Macina CLI")
def _validate_pipeline_option(
ctx: typer.Context,
param: typer.CallbackParam,
value: str
):
2025-12-21 05:10:09 -08:00
try:
from SYS.cli_syntax import validate_pipeline_text
2025-12-21 05:10:09 -08:00
syntax_error = validate_pipeline_text(value)
if syntax_error:
raise typer.BadParameter(syntax_error.message)
except typer.BadParameter:
raise
except Exception:
pass
return value
2025-12-20 02:12:45 -08:00
@app.command("pipeline")
def pipeline(
2025-12-21 05:10:09 -08:00
command: str = typer.Option(
2025-12-29 17:05:03 -08:00
...,
"--pipeline",
"-p",
help="Pipeline command string to execute",
callback=_validate_pipeline_option,
),
seeds_json: Optional[str] = typer.Option(
None,
"--seeds-json",
"-s",
help="JSON string of seed items"
2025-12-21 05:10:09 -08:00
),
2025-12-20 02:12:45 -08:00
) -> None:
from SYS import pipeline as ctx
2025-12-20 02:12:45 -08:00
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
set_debug(debug_enabled)
if seeds_json:
try:
seeds = json.loads(seeds_json)
if not isinstance(seeds, list):
seeds = [seeds]
ctx.set_last_result_items_only(seeds)
except Exception as exc:
print(f"Error parsing seeds JSON: {exc}")
return
try:
from SYS.cli_syntax import validate_pipeline_text
2025-12-20 02:12:45 -08:00
syntax_error = validate_pipeline_text(command)
if syntax_error:
print(syntax_error.message, file=sys.stderr)
return
except Exception:
pass
try:
tokens = shlex.split(command)
except ValueError as exc:
print(f"Syntax error: {exc}", file=sys.stderr)
return
if not tokens:
return
self._pipeline_executor.execute_tokens(tokens)
@app.command("repl")
def repl() -> None:
self.run_repl()
@app.callback(invoke_without_command=True)
def main_callback(ctx: typer.Context) -> None:
if ctx.invoked_subcommand is None:
self.run_repl()
2025-12-30 23:19:02 -08:00
_ = (pipeline, repl, main_callback)
2025-12-20 02:12:45 -08:00
2025-12-23 16:36:39 -08:00
# Dynamically register all cmdlets as top-level Typer commands so users can
# invoke `mm <cmdlet> [args]` directly from the shell. We use Click/Typer
# context settings to allow arbitrary flags and options to pass through to
# the cmdlet system without Typer trying to parse them.
try:
names = list_cmdlet_names()
2025-12-30 23:19:02 -08:00
skip = {"pipeline", "repl"}
2025-12-23 16:36:39 -08:00
for nm in names:
if not nm or nm in skip:
continue
2025-12-29 17:05:03 -08:00
2025-12-23 16:36:39 -08:00
# create a scoped handler to capture the command name
def _make_handler(cmd_name: str):
2025-12-29 17:05:03 -08:00
@app.command(
cmd_name,
context_settings={
"ignore_unknown_options": True,
2025-12-30 23:19:02 -08:00
"allow_extra_args": True,
},
2025-12-29 17:05:03 -08:00
)
2025-12-23 16:36:39 -08:00
def _handler(ctx: typer.Context):
try:
args = list(ctx.args or [])
except Exception:
args = []
self._cmdlet_executor.execute(cmd_name, args)
2025-12-29 17:05:03 -08:00
2025-12-23 16:36:39 -08:00
return _handler
_make_handler(nm)
except Exception:
# Don't let failure to register dynamic commands break startup
pass
2025-12-20 02:12:45 -08:00
return app
def run(self) -> None:
2025-12-20 23:57:44 -08:00
# Ensure Rich tracebacks are active even when invoking subcommands.
try:
config = self._config_loader.load()
debug_enabled = bool(config.get("debug",
False)
) if isinstance(config,
dict) else False
2025-12-20 23:57:44 -08:00
except Exception:
debug_enabled = False
set_debug(debug_enabled)
_install_rich_traceback(show_locals=debug_enabled)
2025-12-20 02:12:45 -08:00
self.build_app()()
def run_repl(self) -> None:
2025-12-29 17:05:03 -08:00
# console = Console(width=100)
2025-12-24 17:58:57 -08:00
# Valid Rich rainbow colors
RAINBOW = [
"red",
"dark_orange",
"yellow",
"green",
"blue",
"purple",
"magenta",
]
def rainbow_pillar(colors, height=21, bar_width=36):
table = Table.grid(padding=0)
table.add_column(no_wrap=True)
for i in range(height):
color = colors[i % len(colors)]
table.add_row(Bar(size=1, begin=0, end=1, width=bar_width, color=color))
return table
# Build root layout
root = Layout(name="root")
root.split_row(
Layout(name="left",
ratio=2),
Layout(name="center",
ratio=8),
Layout(name="right",
ratio=2),
2025-12-24 17:58:57 -08:00
)
2025-12-23 16:36:39 -08:00
2025-12-24 17:58:57 -08:00
# Left pillar → forward rainbow
root["left"].update(
Panel(rainbow_pillar(RAINBOW,
height=21,
bar_width=36),
title="DELTA")
)
2025-12-20 02:12:45 -08:00
2025-12-24 17:58:57 -08:00
# Right pillar → reverse rainbow
root["right"].update(
Panel(
rainbow_pillar(list(reversed(RAINBOW)),
height=21,
bar_width=36),
title="LAMBDA"
)
2025-12-24 17:58:57 -08:00
)
2025-12-23 16:36:39 -08:00
2025-12-29 17:05:03 -08:00
# Center content
2025-12-24 17:58:57 -08:00
center_md = Markdown(
2025-12-29 17:05:03 -08:00
"""
2025-12-24 17:58:57 -08:00
# ****************** Medios Macina ******************
take what you want | keep what you like | share what you love
_____________________________________________________________
_____________________________________________________________
_____________________________________________________________
For suddenly you may be let loose from the net, and thrown out to sea.
Waving around clutching at gnats, unable to lift the heavy anchor. Lost
and without a map, forgotten things from the past by distracting wind storms.
_____________________________________________________________
_____________________________________________________________
_____________________________________________________________
Light shines a straight path to the golden shores.
Come to love it when others take what you share, as there is no greater joy
2025-12-23 16:36:39 -08:00
"""
2025-12-29 17:05:03 -08:00
)
2025-12-24 17:58:57 -08:00
root["center"].update(Panel(center_md, title="KAPPA", height=21))
console.print(root)
2025-12-21 16:59:37 -08:00
prompt_text = "<🜂🜄|🜁🜃>"
2025-12-20 02:12:45 -08:00
startup_table = ResultTable(
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
)
startup_table.set_no_choice(True).set_preserve_order(True)
2025-12-20 23:57:44 -08:00
startup_table.set_value_case("upper")
def _upper(value: Any) -> str:
text = "" if value is None else str(value)
return text.upper()
2025-12-20 02:12:45 -08:00
def _add_startup_check(
status: str,
name: str,
*,
provider: str = "",
store: str = "",
files: int | str | None = None,
detail: str = "",
) -> None:
row = startup_table.add_row()
2025-12-20 23:57:44 -08:00
row.add_column("STATUS", _upper(status))
row.add_column("NAME", _upper(name))
row.add_column("PROVIDER", _upper(provider or ""))
row.add_column("STORE", _upper(store or ""))
row.add_column("FILES", "" if files is None else str(files))
row.add_column("DETAIL", _upper(detail or ""))
2025-12-20 02:12:45 -08:00
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
store_cfg = cfg.get("store")
if not isinstance(store_cfg, dict):
return False
bucket = store_cfg.get(subtype)
if not isinstance(bucket, dict):
return False
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
def _has_provider(cfg: dict, name: str) -> bool:
provider_cfg = cfg.get("provider")
if not isinstance(provider_cfg, dict):
return False
block = provider_cfg.get(str(name).strip().lower())
return isinstance(block, dict) and bool(block)
2025-12-30 23:19:02 -08:00
def _has_tool(cfg: dict, name: str) -> bool:
tool_cfg = cfg.get("tool")
if not isinstance(tool_cfg, dict):
return False
block = tool_cfg.get(str(name).strip().lower())
return isinstance(block, dict) and bool(block)
2025-12-20 02:12:45 -08:00
def _ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
try:
from API.HTTP import HTTPClient
with HTTPClient(timeout=timeout, retries=1) as client:
resp = client.get(url, allow_redirects=True)
code = int(getattr(resp, "status_code", 0) or 0)
ok = 200 <= code < 500
return ok, f"{url} (HTTP {code})"
except Exception as exc:
return False, f"{url} ({type(exc).__name__})"
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
set_debug(debug_enabled)
2025-12-20 23:57:44 -08:00
_install_rich_traceback(show_locals=debug_enabled)
_add_startup_check("ENABLED" if debug_enabled else "DISABLED", "DEBUGGING")
2025-12-20 02:12:45 -08:00
try:
try:
from MPV.mpv_ipc import MPV
import shutil
MPV()
mpv_path = shutil.which("mpv")
_add_startup_check("ENABLED", "MPV", detail=mpv_path or "Available")
except Exception as exc:
_add_startup_check("DISABLED", "MPV", detail=str(exc))
store_registry = None
if config:
try:
from Store import Store as StoreRegistry
store_registry = StoreRegistry(config=config, suppress_debug=True)
except Exception:
store_registry = None
if _has_store_subtype(config, "hydrusnetwork"):
store_cfg = config.get("store")
2025-12-29 17:05:03 -08:00
hydrus_cfg = (
store_cfg.get("hydrusnetwork",
{}) if isinstance(store_cfg,
dict) else {}
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if isinstance(hydrus_cfg, dict):
for instance_name, instance_cfg in hydrus_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
url_val = str(instance_cfg.get("URL") or "").strip()
ok = bool(
store_registry
and store_registry.is_available(name_key)
)
2025-12-20 02:12:45 -08:00
status = "ENABLED" if ok else "DISABLED"
if ok:
total = None
try:
if store_registry:
backend = store_registry[name_key]
total = getattr(backend, "total_count", None)
if total is None:
getter = getattr(
backend,
"get_total_count",
None
)
2025-12-20 02:12:45 -08:00
if callable(getter):
total = getter()
except Exception:
total = None
detail = url_val
files = total if isinstance(
total,
int
) and total >= 0 else None
2025-12-20 02:12:45 -08:00
else:
err = None
if store_registry:
2025-12-29 17:05:03 -08:00
err = store_registry.get_backend_error(
instance_name
) or store_registry.get_backend_error(name_key)
detail = (url_val + (" - " if url_val else "")
) + (err or "Unavailable")
2025-12-20 02:12:45 -08:00
files = None
2025-12-29 17:05:03 -08:00
_add_startup_check(
status,
name_key,
store="hydrusnetwork",
files=files,
detail=detail
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
provider_cfg = config.get("provider"
) if isinstance(config,
dict) else None
2025-12-20 02:12:45 -08:00
if isinstance(provider_cfg, dict) and provider_cfg:
from Provider.metadata_provider import list_metadata_providers
2025-12-29 17:05:03 -08:00
from ProviderCore.registry import (
list_file_providers,
list_providers,
list_search_providers,
)
2025-12-20 02:12:45 -08:00
provider_availability = list_providers(config) or {}
search_availability = list_search_providers(config) or {}
file_availability = list_file_providers(config) or {}
meta_availability = list_metadata_providers(config) or {}
def _provider_display_name(key: str) -> str:
k = (key or "").strip()
low = k.lower()
if low == "openlibrary":
return "OpenLibrary"
if low == "alldebrid":
return "AllDebrid"
if low == "youtube":
return "YouTube"
return k[:1].upper() + k[1:] if k else "Provider"
already_checked = {"matrix"}
def _default_provider_ping_targets(provider_key: str) -> list[str]:
prov = (provider_key or "").strip().lower()
if prov == "openlibrary":
return ["https://openlibrary.org"]
if prov == "youtube":
return ["https://www.youtube.com"]
if prov == "bandcamp":
return ["https://bandcamp.com"]
if prov == "libgen":
from Provider.libgen import MIRRORS
2025-12-29 17:05:03 -08:00
mirrors = [
str(x).rstrip("/") for x in (MIRRORS or [])
if str(x).strip()
2025-12-29 17:05:03 -08:00
]
2025-12-20 02:12:45 -08:00
return [m + "/json.php" for m in mirrors]
return []
def _ping_first(urls: list[str]) -> tuple[bool, str]:
for u in urls:
ok, detail = _ping_url(u)
if ok:
return True, detail
if urls:
ok, detail = _ping_url(urls[0])
return ok, detail
return False, "No ping target"
for provider_name in provider_cfg.keys():
prov = str(provider_name or "").strip().lower()
if not prov or prov in already_checked:
continue
display = _provider_display_name(prov)
if prov == "alldebrid":
try:
from Provider.alldebrid import _get_debrid_api_key
from API.alldebrid import AllDebridClient
api_key = _get_debrid_api_key(config)
if not api_key:
2025-12-29 17:05:03 -08:00
_add_startup_check(
"DISABLED",
display,
provider=prov,
detail="Not configured"
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
else:
client = AllDebridClient(api_key)
base_url = str(
getattr(client,
"base_url",
"") or ""
).strip()
2025-12-29 17:05:03 -08:00
_add_startup_check(
"ENABLED",
display,
provider=prov,
detail=base_url or "Connected",
)
2025-12-20 02:12:45 -08:00
except Exception as exc:
2025-12-29 17:05:03 -08:00
_add_startup_check(
"DISABLED",
display,
provider=prov,
detail=str(exc)
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
continue
is_known = False
ok_val: Optional[bool] = None
if prov in provider_availability:
is_known = True
ok_val = bool(provider_availability.get(prov))
elif prov in search_availability:
is_known = True
ok_val = bool(search_availability.get(prov))
elif prov in file_availability:
is_known = True
ok_val = bool(file_availability.get(prov))
elif prov in meta_availability:
is_known = True
ok_val = bool(meta_availability.get(prov))
if not is_known:
2025-12-29 17:05:03 -08:00
_add_startup_check(
"UNKNOWN",
display,
provider=prov,
detail="Not registered"
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
else:
detail = "Configured" if ok_val else "Not configured"
ping_targets = _default_provider_ping_targets(prov)
if ping_targets:
ping_ok, ping_detail = _ping_first(ping_targets)
if ok_val:
detail = ping_detail
else:
2025-12-29 17:05:03 -08:00
detail = (
(detail + " | " +
ping_detail) if ping_detail else detail
2025-12-29 17:05:03 -08:00
)
_add_startup_check(
"ENABLED" if ok_val else "DISABLED",
display,
provider=prov,
detail=detail,
)
2025-12-20 02:12:45 -08:00
already_checked.add(prov)
default_search_providers = [
"openlibrary",
"libgen",
"youtube",
"bandcamp"
]
2025-12-20 02:12:45 -08:00
for prov in default_search_providers:
if prov in already_checked:
continue
display = _provider_display_name(prov)
2025-12-29 17:05:03 -08:00
ok_val = (
bool(search_availability.get(prov))
if prov in search_availability else False
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
ping_targets = _default_provider_ping_targets(prov)
2025-12-29 17:05:03 -08:00
ping_ok, ping_detail = (
_ping_first(ping_targets) if ping_targets else (False, "No ping target")
)
detail = ping_detail or (
"Available" if ok_val else "Unavailable"
)
2025-12-20 02:12:45 -08:00
if not ok_val:
detail = "Unavailable" + (
f" | {ping_detail}" if ping_detail else ""
)
2025-12-29 17:05:03 -08:00
_add_startup_check(
"ENABLED" if (ok_val and ping_ok) else "DISABLED",
display,
provider=prov,
detail=detail,
)
2025-12-20 02:12:45 -08:00
already_checked.add(prov)
if "0x0" not in already_checked:
2025-12-29 17:05:03 -08:00
ok_val = (
bool(file_availability.get("0x0"))
if "0x0" in file_availability else False
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
ping_ok, ping_detail = _ping_url("https://0x0.st")
detail = ping_detail
if not ok_val:
detail = "Unavailable" + (
f" | {ping_detail}" if ping_detail else ""
)
2025-12-29 17:05:03 -08:00
_add_startup_check(
"ENABLED" if (ok_val and ping_ok) else "DISABLED",
"0x0",
provider="0x0",
detail=detail,
)
2025-12-20 02:12:45 -08:00
if _has_provider(config, "matrix"):
try:
from Provider.matrix import Matrix
provider = Matrix(config)
2025-12-29 17:05:03 -08:00
matrix_conf = (
config.get("provider",
{}).get("matrix",
{}) if isinstance(config,
dict) else {}
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
homeserver = str(matrix_conf.get("homeserver") or "").strip()
room_id = str(matrix_conf.get("room_id") or "").strip()
if homeserver and not homeserver.startswith("http"):
homeserver = f"https://{homeserver}"
target = homeserver.rstrip("/")
if room_id:
target = (
target + (" " if target else "")
) + f"room:{room_id}"
2025-12-20 02:12:45 -08:00
_add_startup_check(
"ENABLED" if provider.validate() else "DISABLED",
"Matrix",
provider="matrix",
detail=target or
("Connected" if provider.validate() else "Not configured"),
2025-12-20 02:12:45 -08:00
)
except Exception as exc:
_add_startup_check(
"DISABLED",
"Matrix",
provider="matrix",
detail=str(exc)
)
2025-12-20 02:12:45 -08:00
if _has_store_subtype(config, "folder"):
store_cfg = config.get("store")
folder_cfg = store_cfg.get("folder",
{}) if isinstance(store_cfg,
dict) else {}
2025-12-20 02:12:45 -08:00
if isinstance(folder_cfg, dict) and folder_cfg:
for instance_name, instance_cfg in folder_cfg.items():
if not isinstance(instance_cfg, dict):
continue
name_key = str(instance_cfg.get("NAME") or instance_name)
2025-12-29 17:05:03 -08:00
path_val = str(
instance_cfg.get("PATH") or instance_cfg.get("path")
or ""
2025-12-29 17:05:03 -08:00
).strip()
2025-12-20 02:12:45 -08:00
ok = bool(
store_registry
and store_registry.is_available(name_key)
)
2025-12-20 02:12:45 -08:00
if ok and store_registry:
backend = store_registry[name_key]
scan_ok = bool(getattr(backend, "scan_ok", True))
scan_detail = str(
getattr(backend,
"scan_detail",
"") or ""
)
2025-12-20 02:12:45 -08:00
stats = getattr(backend, "scan_stats", None)
files = None
if isinstance(stats, dict):
total_db = stats.get("files_total_db")
if isinstance(total_db, (int, float)):
files = int(total_db)
status = "SCANNED" if scan_ok else "ERROR"
detail = (path_val + (" - " if path_val else "")
) + (scan_detail or "Up to date")
2025-12-29 17:05:03 -08:00
_add_startup_check(
status,
name_key,
store="folder",
files=files,
detail=detail
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
else:
err = None
if store_registry:
2025-12-29 17:05:03 -08:00
err = store_registry.get_backend_error(
instance_name
) or store_registry.get_backend_error(name_key)
detail = (path_val + (" - " if path_val else "")
) + (err or "Unavailable")
_add_startup_check(
"ERROR",
name_key,
store="folder",
detail=detail
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if _has_store_subtype(config, "debrid"):
try:
from SYS.config import get_debrid_api_key
2025-12-20 02:12:45 -08:00
from API.alldebrid import AllDebridClient
api_key = get_debrid_api_key(config)
if not api_key:
2025-12-29 17:05:03 -08:00
_add_startup_check(
"DISABLED",
"Debrid",
store="debrid",
detail="Not configured"
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
else:
client = AllDebridClient(api_key)
base_url = str(getattr(client,
"base_url",
"") or "").strip()
2025-12-29 17:05:03 -08:00
_add_startup_check(
"ENABLED",
"Debrid",
store="debrid",
detail=base_url or "Connected"
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
except Exception as exc:
_add_startup_check(
"DISABLED",
"Debrid",
store="debrid",
detail=str(exc)
)
2025-12-20 02:12:45 -08:00
try:
from tool.ytdlp import YtDlpTool
cookiefile = YtDlpTool(config).resolve_cookiefile()
if cookiefile is not None:
_add_startup_check("FOUND", "Cookies", detail=str(cookiefile))
else:
_add_startup_check("MISSING", "Cookies", detail="Not found")
except Exception as exc:
_add_startup_check("ERROR", "Cookies", detail=str(exc))
2025-12-30 23:19:02 -08:00
# Tool checks (configured via [tool=...])
if _has_tool(config, "florencevision"):
try:
tool_cfg = config.get("tool")
fv_cfg = tool_cfg.get("florencevision") if isinstance(tool_cfg, dict) else None
enabled = bool(fv_cfg.get("enabled")) if isinstance(fv_cfg, dict) else False
if not enabled:
_add_startup_check(
"DISABLED",
"FlorenceVision",
provider="tool",
detail="Not enabled",
)
else:
from SYS.optional_deps import florencevision_missing_modules
missing = florencevision_missing_modules()
if missing:
_add_startup_check(
"DISABLED",
"FlorenceVision",
provider="tool",
detail="Missing: " + ", ".join(missing),
)
else:
_add_startup_check(
"ENABLED",
"FlorenceVision",
provider="tool",
detail="Ready",
)
except Exception as exc:
_add_startup_check(
"DISABLED",
"FlorenceVision",
provider="tool",
detail=str(exc),
)
2025-12-20 02:12:45 -08:00
if startup_table.rows:
2025-12-20 23:57:44 -08:00
stdout_console().print()
stdout_console().print(startup_table)
2025-12-20 02:12:45 -08:00
except Exception as exc:
if debug_enabled:
debug(f"⚠ Could not check service availability: {exc}")
style = Style.from_dict(
{
"cmdlet": "#ffffff",
"argument": "#3b8eea",
"value": "#9a3209",
"string": "#6d0d93",
"pipe": "#4caf50",
"selection_at": "#f1c40f",
"selection_range": "#4caf50",
"bottom-toolbar": "noreverse",
}
)
class ToolbarState:
text: str = ""
last_update_time: float = 0.0
clear_timer: Optional[threading.Timer] = None
toolbar_state = ToolbarState()
session: Optional[PromptSession] = None
def get_toolbar() -> Optional[str]:
if not toolbar_state.text or not toolbar_state.text.strip():
return None
if time.time() - toolbar_state.last_update_time > 3:
toolbar_state.text = ""
return None
return toolbar_state.text
def update_toolbar(text: str) -> None:
nonlocal session
text = text.strip()
toolbar_state.text = text
toolbar_state.last_update_time = time.time()
if toolbar_state.clear_timer:
toolbar_state.clear_timer.cancel()
toolbar_state.clear_timer = None
if text:
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
def clear_toolbar() -> None:
toolbar_state.text = ""
toolbar_state.clear_timer = None
if session is not None and hasattr(
session,
"app") and session.app.is_running:
2025-12-20 02:12:45 -08:00
session.app.invalidate()
toolbar_state.clear_timer = threading.Timer(3.0, clear_toolbar)
toolbar_state.clear_timer.daemon = True
toolbar_state.clear_timer.start()
if session is not None and hasattr(session,
"app") and session.app.is_running:
2025-12-20 02:12:45 -08:00
session.app.invalidate()
self._pipeline_executor.set_toolbar_output(update_toolbar)
completer = CmdletCompleter(config_loader=self._config_loader)
session = PromptSession(
completer=cast(Any,
completer),
2025-12-20 02:12:45 -08:00
lexer=MedeiaLexer(),
style=style,
bottom_toolbar=get_toolbar,
refresh_interval=0.5,
)
while True:
try:
user_input = session.prompt(prompt_text).strip()
except (EOFError, KeyboardInterrupt):
print("He who is victorious through deceit is defeated by the truth.")
break
if not user_input:
continue
low = user_input.lower()
if low in {"exit",
"quit",
"q"}:
2025-12-20 02:12:45 -08:00
print("He who is victorious through deceit is defeated by the truth.")
break
if low in {"help",
"?"}:
2025-12-20 02:12:45 -08:00
CmdletHelp.show_cmdlet_list()
continue
pipeline_ctx_ref = None
try:
from SYS import pipeline as ctx
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
ctx.set_current_command_text(user_input)
pipeline_ctx_ref = ctx
except Exception:
pipeline_ctx_ref = None
try:
from SYS.cli_syntax import validate_pipeline_text
2025-12-20 02:12:45 -08:00
syntax_error = validate_pipeline_text(user_input)
if syntax_error:
print(syntax_error.message, file=sys.stderr)
continue
except Exception:
pass
try:
tokens = shlex.split(user_input)
except ValueError as exc:
print(f"Syntax error: {exc}", file=sys.stderr)
continue
if not tokens:
continue
if len(tokens) == 1 and tokens[0] == "@,,":
try:
from SYS import pipeline as ctx
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
if ctx.restore_next_result_table():
2025-12-29 17:05:03 -08:00
last_table = (
ctx.get_display_table()
if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if last_table is None:
last_table = ctx.get_last_result_table()
if last_table:
2025-12-20 23:57:44 -08:00
stdout_console().print()
2025-12-20 02:12:45 -08:00
ctx.set_current_stage_table(last_table)
2025-12-20 23:57:44 -08:00
stdout_console().print(last_table)
2025-12-20 02:12:45 -08:00
else:
items = ctx.get_last_result_items()
if items:
ctx.set_current_stage_table(None)
print(
f"Restored {len(items)} items (no table format available)"
)
2025-12-20 02:12:45 -08:00
else:
print("No forward history available", file=sys.stderr)
except Exception as exc:
print(f"Error restoring next table: {exc}", file=sys.stderr)
continue
if len(tokens) == 1 and tokens[0] == "@..":
try:
from SYS import pipeline as ctx
2025-12-29 17:05:03 -08:00
2025-12-20 02:12:45 -08:00
if ctx.restore_previous_result_table():
2025-12-29 17:05:03 -08:00
last_table = (
ctx.get_display_table()
if hasattr(ctx,
"get_display_table") else None
2025-12-29 17:05:03 -08:00
)
2025-12-20 02:12:45 -08:00
if last_table is None:
last_table = ctx.get_last_result_table()
2025-12-20 23:57:44 -08:00
2025-12-30 23:19:02 -08:00
# Auto-refresh search-file tables when navigating back,
2025-12-20 23:57:44 -08:00
# so row payloads (titles/tags) reflect latest store state.
try:
2025-12-29 17:05:03 -08:00
src_cmd = (
getattr(last_table,
"source_command",
None) if last_table else None
2025-12-29 17:05:03 -08:00
)
if (isinstance(src_cmd,
str)
and src_cmd.lower().replace("_",
2025-12-30 23:19:02 -08:00
"-") == "search-file"):
2025-12-29 17:05:03 -08:00
src_args = (
getattr(last_table,
"source_args",
None) if last_table else None
2025-12-29 17:05:03 -08:00
)
base_args = list(src_args
) if isinstance(src_args,
list) else []
2025-12-20 23:57:44 -08:00
cleaned_args = [
str(a) for a in base_args if str(a).strip().lower()
not in {"--refresh", "-refresh"}
2025-12-20 23:57:44 -08:00
]
if hasattr(ctx, "set_current_command_text"):
try:
2025-12-29 17:05:03 -08:00
title_text = (
getattr(last_table,
"title",
None) if last_table else None
2025-12-29 17:05:03 -08:00
)
if isinstance(title_text,
str) and title_text.strip():
ctx.set_current_command_text(
title_text.strip()
)
2025-12-20 23:57:44 -08:00
else:
2025-12-29 17:05:03 -08:00
ctx.set_current_command_text(
" ".join(
2025-12-30 23:19:02 -08:00
["search-file",
*cleaned_args]
).strip()
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
except Exception:
pass
try:
2025-12-29 17:05:03 -08:00
self._cmdlet_executor.execute(
2025-12-30 23:19:02 -08:00
"search-file",
cleaned_args + ["--refresh"]
2025-12-29 17:05:03 -08:00
)
2025-12-20 23:57:44 -08:00
finally:
if hasattr(ctx, "clear_current_command_text"):
try:
ctx.clear_current_command_text()
except Exception:
pass
continue
except Exception as exc:
print(
2025-12-30 23:19:02 -08:00
f"Error refreshing search-file table: {exc}",
file=sys.stderr
)
2025-12-20 23:57:44 -08:00
2025-12-20 02:12:45 -08:00
if last_table:
2025-12-20 23:57:44 -08:00
stdout_console().print()
2025-12-20 02:12:45 -08:00
ctx.set_current_stage_table(last_table)
2025-12-20 23:57:44 -08:00
stdout_console().print(last_table)
2025-12-20 02:12:45 -08:00
else:
items = ctx.get_last_result_items()
if items:
ctx.set_current_stage_table(None)
print(
f"Restored {len(items)} items (no table format available)"
)
2025-12-20 02:12:45 -08:00
else:
print("No previous result table in history")
else:
print("Result table history is empty")
except Exception as exc:
print(f"Error restoring previous result table: {exc}")
continue
try:
if "|" in tokens or (tokens and tokens[0].startswith("@")):
self._pipeline_executor.execute_tokens(tokens)
else:
cmd_name = tokens[0].replace("_", "-").lower()
is_help = any(
arg in {"-help",
"--help",
"-h"} for arg in tokens[1:]
)
2025-12-20 02:12:45 -08:00
if is_help:
CmdletHelp.show_cmdlet_help(cmd_name)
else:
self._cmdlet_executor.execute(cmd_name, tokens[1:])
finally:
if pipeline_ctx_ref:
pipeline_ctx_ref.clear_current_command_text()
2025-11-25 20:09:33 -08:00
if __name__ == "__main__":
2025-12-20 02:12:45 -08:00
MedeiaCLI().run()