dfdsf
This commit is contained in:
254
CLI.py
254
CLI.py
@@ -1078,15 +1078,16 @@ class CmdletExecutor:
|
||||
|
||||
filtered_args.append(str(arg))
|
||||
|
||||
# IMPORTANT: Do not implicitly feed the previous command's results into
|
||||
# a new command unless the user explicitly selected items via @ syntax.
|
||||
# Piping should require `|` (or an explicit @ selection).
|
||||
piped_items = ctx.get_last_result_items()
|
||||
result: Any = None
|
||||
if piped_items:
|
||||
if piped_items and (select_all or selected_indices):
|
||||
if select_all:
|
||||
result = piped_items
|
||||
elif selected_indices:
|
||||
result = [piped_items[idx] for idx in selected_indices if 0 <= idx < len(piped_items)]
|
||||
else:
|
||||
result = piped_items
|
||||
result = [piped_items[idx] for idx in selected_indices if 0 <= idx < len(piped_items)]
|
||||
|
||||
worker_manager = WorkerManagerRegistry.ensure(config)
|
||||
stage_session = WorkerStages.begin_stage(
|
||||
@@ -1249,6 +1250,12 @@ class PipelineExecutor:
|
||||
import pipeline as ctx
|
||||
|
||||
try:
|
||||
try:
|
||||
if hasattr(ctx, "clear_pipeline_stop"):
|
||||
ctx.clear_pipeline_stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
stages = self._split_stages(tokens)
|
||||
if not stages:
|
||||
print("Invalid pipeline syntax\n")
|
||||
@@ -1283,7 +1290,10 @@ class PipelineExecutor:
|
||||
|
||||
config = self._config_loader.load()
|
||||
if isinstance(config, dict):
|
||||
config["_quiet_background_output"] = True
|
||||
# This executor is used by both the REPL and the `pipeline` subcommand.
|
||||
# Quiet/background mode is helpful for detached/background runners, but
|
||||
# it suppresses interactive UX (like the pipeline Live progress UI).
|
||||
config["_quiet_background_output"] = bool(self._toolbar_output is None)
|
||||
|
||||
def _resolve_items_for_selection(table_obj, items_list):
|
||||
return items_list if items_list else []
|
||||
@@ -1322,12 +1332,19 @@ class PipelineExecutor:
|
||||
_add(getattr(item, "table", None))
|
||||
|
||||
try:
|
||||
from ProviderCore.registry import get_provider
|
||||
from ProviderCore.registry import get_provider, is_known_provider_name
|
||||
except Exception:
|
||||
get_provider = None # type: ignore
|
||||
is_known_provider_name = None # type: ignore
|
||||
|
||||
if get_provider is not None:
|
||||
for key in candidates:
|
||||
try:
|
||||
if is_known_provider_name is not None and (not is_known_provider_name(key)):
|
||||
continue
|
||||
except Exception:
|
||||
# If the predicate fails for any reason, fall back to legacy behavior.
|
||||
pass
|
||||
try:
|
||||
provider = get_provider(key, config)
|
||||
except Exception:
|
||||
@@ -1441,6 +1458,9 @@ class PipelineExecutor:
|
||||
pipeline_status = "completed"
|
||||
pipeline_error = ""
|
||||
|
||||
progress_ui = None
|
||||
pipe_index_by_stage: Dict[int, int] = {}
|
||||
|
||||
try:
|
||||
if first_stage_selection_indices:
|
||||
if not ctx.get_current_stage_table_source_command():
|
||||
@@ -1594,6 +1614,45 @@ class PipelineExecutor:
|
||||
print("No previous results to select from\n")
|
||||
return
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Multi-level pipeline progress (pipes = stages, tasks = items)
|
||||
# ------------------------------------------------------------------
|
||||
try:
|
||||
quiet_mode = bool(config.get("_quiet_background_output")) if isinstance(config, dict) else False
|
||||
except Exception:
|
||||
quiet_mode = False
|
||||
|
||||
try:
|
||||
import sys as _sys
|
||||
|
||||
if (not quiet_mode) and bool(getattr(_sys.stderr, "isatty", lambda: False)()):
|
||||
from models import PipelineLiveProgress
|
||||
|
||||
pipe_stage_indices: List[int] = []
|
||||
pipe_labels: List[str] = []
|
||||
for idx, tokens in enumerate(stages):
|
||||
if not tokens:
|
||||
continue
|
||||
name = str(tokens[0]).replace("_", "-").lower()
|
||||
if name == "@" or name.startswith("@"):
|
||||
continue
|
||||
pipe_stage_indices.append(idx)
|
||||
pipe_labels.append(name)
|
||||
|
||||
if pipe_labels:
|
||||
progress_ui = PipelineLiveProgress(pipe_labels, enabled=True)
|
||||
progress_ui.start()
|
||||
try:
|
||||
import pipeline as _pipeline_ctx
|
||||
if hasattr(_pipeline_ctx, "set_live_progress"):
|
||||
_pipeline_ctx.set_live_progress(progress_ui)
|
||||
except Exception:
|
||||
pass
|
||||
pipe_index_by_stage = {stage_idx: pipe_idx for pipe_idx, stage_idx in enumerate(pipe_stage_indices)}
|
||||
except Exception:
|
||||
progress_ui = None
|
||||
pipe_index_by_stage = {}
|
||||
|
||||
for stage_index, stage_tokens in enumerate(stages):
|
||||
if not stage_tokens:
|
||||
continue
|
||||
@@ -1735,10 +1794,60 @@ class PipelineExecutor:
|
||||
)
|
||||
|
||||
stage_worker_id = stage_session.worker_id if stage_session else None
|
||||
|
||||
# Estimate how many per-item tasks this pipe will run.
|
||||
pipe_idx = pipe_index_by_stage.get(stage_index)
|
||||
if progress_ui is not None and pipe_idx is not None:
|
||||
try:
|
||||
# Prefer piped input for task counts.
|
||||
if isinstance(piped_result, list):
|
||||
total_items = len(piped_result)
|
||||
preview_items: Optional[List[Any]] = list(piped_result)
|
||||
elif piped_result is not None:
|
||||
total_items = 1
|
||||
preview_items = [piped_result]
|
||||
else:
|
||||
# First stage without piped input: infer from URL-ish args.
|
||||
preview: List[Any] = []
|
||||
|
||||
toks = list(stage_tokens[1:])
|
||||
i = 0
|
||||
while i < len(toks):
|
||||
t = str(toks[i])
|
||||
low = t.lower().strip()
|
||||
if low in {"-url", "--url"} and i + 1 < len(toks):
|
||||
nxt = str(toks[i + 1])
|
||||
if nxt and not nxt.startswith("-"):
|
||||
preview.append(nxt)
|
||||
i += 2
|
||||
continue
|
||||
if (not t.startswith("-")) and (
|
||||
"://" in low or low.startswith(("magnet:", "torrent:"))
|
||||
):
|
||||
preview.append(t)
|
||||
i += 1
|
||||
|
||||
preview_items = preview if preview else None
|
||||
total_items = len(preview) if preview else 1
|
||||
|
||||
progress_ui.begin_pipe(pipe_idx, total_items=int(total_items), items_preview=preview_items)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
on_emit = None
|
||||
if progress_ui is not None and pipe_idx is not None:
|
||||
def _on_emit(obj: Any, _idx: int = int(pipe_idx)) -> None:
|
||||
try:
|
||||
progress_ui.on_emit(_idx, obj)
|
||||
except Exception:
|
||||
pass
|
||||
on_emit = _on_emit
|
||||
|
||||
pipeline_ctx = ctx.PipelineStageContext(
|
||||
stage_index=stage_index,
|
||||
total_stages=len(stages),
|
||||
worker_id=stage_worker_id,
|
||||
on_emit=on_emit,
|
||||
)
|
||||
ctx.set_stage_context(pipeline_ctx)
|
||||
stage_status = "completed"
|
||||
@@ -1784,6 +1893,17 @@ class PipelineExecutor:
|
||||
|
||||
stage_is_last = stage_index + 1 >= len(stages)
|
||||
|
||||
# Graceful early-stop: preflight declined, etc.
|
||||
try:
|
||||
stop_req = ctx.get_pipeline_stop() if hasattr(ctx, "get_pipeline_stop") else None
|
||||
except Exception:
|
||||
stop_req = None
|
||||
if stop_req is not None:
|
||||
# Do not treat as an error; just end the pipeline quietly.
|
||||
pipeline_status = "completed"
|
||||
pipeline_error = ""
|
||||
return
|
||||
|
||||
emits: List[Any] = []
|
||||
if getattr(pipeline_ctx, "emits", None) is not None:
|
||||
emits = list(pipeline_ctx.emits or [])
|
||||
@@ -1825,6 +1945,25 @@ class PipelineExecutor:
|
||||
already_rendered = False
|
||||
|
||||
if not already_rendered:
|
||||
# Stop the Live progress display before printing a selectable table.
|
||||
# Printing while Live is active can cause the table to be truncated/overwritten.
|
||||
if progress_ui is not None:
|
||||
try:
|
||||
if pipe_idx is not None:
|
||||
progress_ui.finish_pipe(int(pipe_idx), force_complete=True)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
progress_ui.stop()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
import pipeline as _pipeline_ctx
|
||||
if hasattr(_pipeline_ctx, "set_live_progress"):
|
||||
_pipeline_ctx.set_live_progress(None)
|
||||
except Exception:
|
||||
pass
|
||||
progress_ui = None
|
||||
stdout_console().print()
|
||||
stdout_console().print(stage_table)
|
||||
|
||||
@@ -1845,6 +1984,26 @@ class PipelineExecutor:
|
||||
# table they placed into pipeline context (e.g. get-tag). Prefer a
|
||||
# display table if one exists, otherwise the current-stage table.
|
||||
if stage_is_last:
|
||||
# Stop the Live progress display before printing the final table.
|
||||
# This avoids cursor-control interactions that can truncate output.
|
||||
if progress_ui is not None:
|
||||
try:
|
||||
if pipe_idx is not None:
|
||||
progress_ui.finish_pipe(int(pipe_idx), force_complete=(stage_status == "completed"))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
progress_ui.stop()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
import pipeline as _pipeline_ctx
|
||||
if hasattr(_pipeline_ctx, "set_live_progress"):
|
||||
_pipeline_ctx.set_live_progress(None)
|
||||
except Exception:
|
||||
pass
|
||||
progress_ui = None
|
||||
|
||||
final_table = None
|
||||
try:
|
||||
final_table = ctx.get_display_table() if hasattr(ctx, "get_display_table") else None
|
||||
@@ -1853,6 +2012,36 @@ class PipelineExecutor:
|
||||
if final_table is None:
|
||||
final_table = stage_table
|
||||
|
||||
# If the cmdlet emitted results but didn't supply a fresh table, it's
|
||||
# common for `stage_table` to still point at the previous stage's table
|
||||
# (e.g. add-file's canonical store table). In that case, prefer rendering
|
||||
# the emitted results so the user sees the actual output of this stage.
|
||||
if emits and (ctx.get_display_table() if hasattr(ctx, "get_display_table") else None) is None:
|
||||
try:
|
||||
src_cmd = str(getattr(final_table, "source_command", "") or "").strip().lower() if final_table else ""
|
||||
except Exception:
|
||||
src_cmd = ""
|
||||
try:
|
||||
cur_cmd = str(cmd_name or "").strip().replace("_", "-").lower()
|
||||
except Exception:
|
||||
cur_cmd = ""
|
||||
if (final_table is None) or (not src_cmd) or (src_cmd.replace("_", "-") != cur_cmd):
|
||||
try:
|
||||
table_title = CmdletExecutor._get_table_title_for_command(cmd_name, emits, list(stage_args))
|
||||
except Exception:
|
||||
table_title = "Results"
|
||||
table = ResultTable(table_title)
|
||||
for item in emits:
|
||||
table.add_result(item)
|
||||
try:
|
||||
if hasattr(ctx, "set_last_result_table_overlay"):
|
||||
ctx.set_last_result_table_overlay(table, emits)
|
||||
if hasattr(ctx, "set_current_stage_table"):
|
||||
ctx.set_current_stage_table(table)
|
||||
except Exception:
|
||||
pass
|
||||
final_table = table
|
||||
|
||||
if final_table is not None:
|
||||
try:
|
||||
already_rendered = bool(getattr(final_table, "_rendered_by_cmdlet", False))
|
||||
@@ -1863,18 +2052,7 @@ class PipelineExecutor:
|
||||
stdout_console().print()
|
||||
stdout_console().print(final_table)
|
||||
|
||||
# Fallback: if a cmdlet emitted results but did not provide a table,
|
||||
# render a standard ResultTable so last-stage pipelines still show output.
|
||||
if final_table is None and emits:
|
||||
try:
|
||||
table_title = CmdletExecutor._get_table_title_for_command(cmd_name, emits, list(stage_args))
|
||||
except Exception:
|
||||
table_title = "Results"
|
||||
table = ResultTable(table_title)
|
||||
for item in emits:
|
||||
table.add_result(item)
|
||||
stdout_console().print()
|
||||
stdout_console().print(table)
|
||||
# (Fallback handled above by synthesizing an overlay ResultTable.)
|
||||
|
||||
if isinstance(ret_code, int) and ret_code != 0:
|
||||
stage_status = "failed"
|
||||
@@ -1891,6 +2069,11 @@ class PipelineExecutor:
|
||||
pipeline_error = f"{stage_label} error: {exc}"
|
||||
return
|
||||
finally:
|
||||
if progress_ui is not None and pipe_idx is not None:
|
||||
try:
|
||||
progress_ui.finish_pipe(int(pipe_idx), force_complete=(stage_status == "completed"))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if hasattr(ctx, "clear_current_cmdlet_name"):
|
||||
ctx.clear_current_cmdlet_name()
|
||||
@@ -1925,6 +2108,17 @@ class PipelineExecutor:
|
||||
pipeline_error = str(exc)
|
||||
print(f"[error] Failed to execute pipeline: {exc}\n")
|
||||
finally:
|
||||
if progress_ui is not None:
|
||||
try:
|
||||
progress_ui.stop()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
import pipeline as _pipeline_ctx
|
||||
if hasattr(_pipeline_ctx, "set_live_progress"):
|
||||
_pipeline_ctx.set_live_progress(None)
|
||||
except Exception:
|
||||
pass
|
||||
if pipeline_session:
|
||||
pipeline_session.close(status=pipeline_status, error_msg=pipeline_error)
|
||||
except Exception as exc:
|
||||
@@ -1933,6 +2127,11 @@ class PipelineExecutor:
|
||||
Welcome = """
|
||||
# MEDIOS-MACINA
|
||||
|
||||
Romans 1:22 Professing themselves to be wise, they became fools,
|
||||
|
||||
|
||||
dfd
|
||||
==
|
||||
Rich can do a pretty *decent* job of rendering markdown.
|
||||
|
||||
1. This is a list item
|
||||
@@ -1966,6 +2165,19 @@ class MedeiaCLI:
|
||||
def build_app(self) -> typer.Typer:
|
||||
app = typer.Typer(help="Medeia-Macina CLI")
|
||||
|
||||
def _validate_pipeline_option(ctx: typer.Context, param: typer.CallbackParam, value: str):
|
||||
try:
|
||||
from cli_syntax import validate_pipeline_text
|
||||
|
||||
syntax_error = validate_pipeline_text(value)
|
||||
if syntax_error:
|
||||
raise typer.BadParameter(syntax_error.message)
|
||||
except typer.BadParameter:
|
||||
raise
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
def _complete_search_provider(ctx, param, incomplete: str): # pragma: no cover
|
||||
try:
|
||||
from click.shell_completion import CompletionItem
|
||||
@@ -1996,7 +2208,9 @@ class MedeiaCLI:
|
||||
|
||||
@app.command("pipeline")
|
||||
def pipeline(
|
||||
command: str = typer.Option(..., "--pipeline", "-p", help="Pipeline command string to execute"),
|
||||
command: str = typer.Option(
|
||||
..., "--pipeline", "-p", help="Pipeline command string to execute", callback=_validate_pipeline_option
|
||||
),
|
||||
seeds_json: Optional[str] = typer.Option(None, "--seeds-json", "-s", help="JSON string of seed items"),
|
||||
) -> None:
|
||||
import pipeline as ctx
|
||||
@@ -2064,7 +2278,7 @@ class MedeiaCLI:
|
||||
def run_repl(self) -> None:
|
||||
# (Startup banner is optional; keep the REPL quiet by default.)
|
||||
|
||||
prompt_text = "🜂🜄🜁🜃|"
|
||||
prompt_text = "<🜂🜄🜁🜃>"
|
||||
|
||||
startup_table = ResultTable(
|
||||
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
|
||||
|
||||
Reference in New Issue
Block a user