Migrate imports to SYS package (pipeline/result_table) and update related imports
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled

This commit is contained in:
2025-12-29 23:28:15 -08:00
parent 30d3bf480b
commit ef01ca03a0
60 changed files with 162 additions and 149 deletions

30
CLI.py
View File

@@ -32,7 +32,7 @@ from rich.panel import Panel
from rich.markdown import Markdown
from rich.bar import Bar
from rich.table import Table
from rich_display import stderr_console, stdout_console
from SYS.rich_display import stderr_console, stdout_console
def _install_rich_traceback(*, show_locals: bool = False) -> None:
@@ -66,7 +66,7 @@ from SYS.cmdlet_catalog import (
list_cmdlet_names,
)
from SYS.config import get_local_storage_path, load_config
from result_table import ResultTable
from SYS.result_table import ResultTable
class SelectionSyntax:
@@ -1134,7 +1134,7 @@ class CmdletExecutor:
return "Results"
def execute(self, cmd_name: str, args: List[str]) -> None:
import pipeline as ctx
from SYS import pipeline as ctx
from cmdlet import REGISTRY
ensure_registry_loaded()
@@ -1241,7 +1241,7 @@ class CmdletExecutor:
return
try:
from models import PipelineLiveProgress
from SYS.models import PipelineLiveProgress
progress_ui = PipelineLiveProgress([cmd_name_norm], enabled=True)
progress_ui.start()
@@ -2436,7 +2436,7 @@ class PipelineExecutor:
if (not quiet_mode) and bool(getattr(_sys.stderr,
"isatty", lambda: False)()):
from models import PipelineLiveProgress
from SYS.models import PipelineLiveProgress
pipe_stage_indices: List[int] = []
pipe_labels: List[str] = []
@@ -2502,7 +2502,7 @@ class PipelineExecutor:
progress_ui = PipelineLiveProgress(pipe_labels, enabled=True)
progress_ui.start()
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(progress_ui)
@@ -2520,7 +2520,7 @@ class PipelineExecutor:
def execute_tokens(self, tokens: List[str]) -> None:
from cmdlet import REGISTRY
import pipeline as ctx
from SYS import pipeline as ctx
try:
self._try_clear_pipeline_stop(ctx)
@@ -2999,7 +2999,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3142,7 +3142,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx,
"set_live_progress"):
@@ -3199,7 +3199,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3354,7 +3354,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3485,7 +3485,7 @@ class MedeiaCLI:
help="JSON string of seed items"
),
) -> None:
import pipeline as ctx
from SYS import pipeline as ctx
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
@@ -4229,7 +4229,7 @@ Come to love it when others take what you share, as there is no greater joy
pipeline_ctx_ref = None
try:
import pipeline as ctx
from SYS import pipeline as ctx
ctx.set_current_command_text(user_input)
pipeline_ctx_ref = ctx
@@ -4257,7 +4257,7 @@ Come to love it when others take what you share, as there is no greater joy
if len(tokens) == 1 and tokens[0] == "@,,":
try:
import pipeline as ctx
from SYS import pipeline as ctx
if ctx.restore_next_result_table():
last_table = (
@@ -4286,7 +4286,7 @@ Come to love it when others take what you share, as there is no greater joy
if len(tokens) == 1 and tokens[0] == "@..":
try:
import pipeline as ctx
from SYS import pipeline as ctx
if ctx.restore_previous_result_table():
last_table = (