2469 lines
93 KiB
Python
2469 lines
93 KiB
Python
"""Modern Textual UI for driving Medeia-Macina pipelines."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import json
|
|
import os
|
|
import re
|
|
import sys
|
|
import subprocess
|
|
import time
|
|
from pathlib import Path
|
|
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
|
|
from rich.text import Text
|
|
|
|
from textual import on, work
|
|
from textual.app import App, ComposeResult
|
|
from textual.binding import Binding
|
|
from textual.events import Key
|
|
from textual.containers import Container, Horizontal, Vertical
|
|
from textual.screen import ModalScreen
|
|
from textual.widgets import (
|
|
Button,
|
|
DataTable,
|
|
Footer,
|
|
Header,
|
|
Input,
|
|
Label,
|
|
OptionList,
|
|
Select,
|
|
Static,
|
|
TextArea,
|
|
Tree,
|
|
)
|
|
from textual.widgets.option_list import Option
|
|
try:
|
|
from textual.suggester import SuggestFromList
|
|
except Exception: # pragma: no cover - Textual version dependent
|
|
SuggestFromList = None # type: ignore[assignment]
|
|
|
|
import logging
|
|
logger = logging.getLogger(__name__)
|
|
|
|
BASE_DIR = Path(__file__).resolve().parent
|
|
REPO_ROOT = BASE_DIR
|
|
TUI_DIR = REPO_ROOT / "TUI"
|
|
for path in (REPO_ROOT, TUI_DIR):
|
|
str_path = str(path)
|
|
if str_path not in sys.path:
|
|
sys.path.insert(0, str_path)
|
|
|
|
from TUI.pipeline_runner import PipelineRunResult # type: ignore # noqa: E402
|
|
from SYS.result_table import Table, extract_hash_value, extract_store_value, get_result_table_row_style # type: ignore # noqa: E402
|
|
|
|
from SYS.config import load_config # type: ignore # noqa: E402
|
|
from SYS.database import db
|
|
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402
|
|
from SYS.cmdlet_catalog import ensure_registry_loaded, list_cmdlet_names # type: ignore # noqa: E402
|
|
from SYS.cli_syntax import validate_pipeline_text # type: ignore # noqa: E402
|
|
|
|
from TUI.pipeline_runner import PipelineRunner # type: ignore # noqa: E402
|
|
|
|
|
|
def _dedup_preserve_order(items: List[str]) -> List[str]:
|
|
out: List[str] = []
|
|
seen: set[str] = set()
|
|
for raw in items:
|
|
s = str(raw or "").strip()
|
|
if not s:
|
|
continue
|
|
key = s.lower()
|
|
if key in seen:
|
|
continue
|
|
seen.add(key)
|
|
out.append(s)
|
|
return out
|
|
|
|
|
|
def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
|
|
tags: List[str] = []
|
|
for obj in emitted or []:
|
|
try:
|
|
if hasattr(obj, "tag_name"):
|
|
val = getattr(obj, "tag_name")
|
|
if val and isinstance(val, str):
|
|
tags.append(val)
|
|
continue
|
|
except Exception:
|
|
logger.exception("Error extracting tag_name in _extract_tag_names")
|
|
|
|
if isinstance(obj, dict):
|
|
# Prefer explicit tag lists
|
|
tag_list = obj.get("tag")
|
|
if isinstance(tag_list, (list, tuple)):
|
|
for t in tag_list:
|
|
if isinstance(t, str) and t.strip():
|
|
tags.append(t.strip())
|
|
if tag_list:
|
|
continue
|
|
# Fall back to individual tag_name/value/name strings
|
|
for k in ("tag_name", "value", "name"):
|
|
v = obj.get(k)
|
|
if isinstance(v, str) and v.strip():
|
|
tags.append(v.strip())
|
|
break
|
|
continue
|
|
return _dedup_preserve_order(tags)
|
|
|
|
|
|
def _extract_tag_names_from_table(table: Any) -> List[str]:
|
|
if not table:
|
|
return []
|
|
sources: List[Any] = []
|
|
get_payloads = getattr(table, "get_payloads", None)
|
|
if callable(get_payloads):
|
|
try:
|
|
payloads = get_payloads()
|
|
if payloads:
|
|
sources.extend(payloads)
|
|
except Exception:
|
|
logger.exception("Error while calling table.get_payloads")
|
|
rows = getattr(table, "rows", []) or []
|
|
for row in rows:
|
|
for col in getattr(row, "columns", []) or []:
|
|
if str(getattr(col, "name", "") or "").strip().lower() == "tag":
|
|
val = getattr(col, "value", None)
|
|
if val:
|
|
sources.append({"tag_name": val})
|
|
if not sources:
|
|
return []
|
|
return _extract_tag_names(sources)
|
|
|
|
|
|
class TextPopup(ModalScreen[None]):
|
|
|
|
def __init__(self, *, title: str, text: str) -> None:
|
|
super().__init__()
|
|
self._title = str(title)
|
|
self._text = str(text or "")
|
|
|
|
def compose(self) -> ComposeResult:
|
|
yield Static(self._title, id="popup-title")
|
|
yield TextArea(self._text, id="popup-text", read_only=True)
|
|
yield Button("Close", id="popup-close")
|
|
|
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
if event.button.id == "popup-close":
|
|
self.dismiss(None)
|
|
|
|
|
|
class ActionMenuPopup(ModalScreen[Optional[str]]):
|
|
|
|
def __init__(self, *, actions: List[Tuple[str, str]]) -> None:
|
|
super().__init__()
|
|
self._actions = list(actions or [])
|
|
|
|
def compose(self) -> ComposeResult:
|
|
yield Static("Actions", id="popup-title")
|
|
with Vertical(id="actions-list"):
|
|
if self._actions:
|
|
for index, (label, _key) in enumerate(self._actions):
|
|
yield Button(str(label), id=f"actions-btn-{index}")
|
|
else:
|
|
yield Static("No actions available", id="actions-empty")
|
|
with Horizontal(id="actions-footer"):
|
|
yield Button("Close", id="actions-close")
|
|
|
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
if event.button.id == "actions-close":
|
|
self.dismiss(None)
|
|
return
|
|
btn_id = str(getattr(event.button, "id", "") or "")
|
|
if not btn_id.startswith("actions-btn-"):
|
|
return
|
|
try:
|
|
index = int(btn_id.rsplit("-", 1)[-1])
|
|
except Exception:
|
|
self.dismiss(None)
|
|
return
|
|
if 0 <= index < len(self._actions):
|
|
self.dismiss(str(self._actions[index][1]))
|
|
else:
|
|
self.dismiss(None)
|
|
|
|
|
|
class TagEditorPopup(ModalScreen[None]):
|
|
|
|
def __init__(
|
|
self,
|
|
*,
|
|
seeds: Any,
|
|
store_name: str,
|
|
file_hash: Optional[str]
|
|
) -> None:
|
|
super().__init__()
|
|
self._seeds = seeds
|
|
self._store = str(store_name or "").strip()
|
|
self._hash = str(file_hash or "").strip() if file_hash else ""
|
|
self._original_tags: List[str] = []
|
|
self._status: Optional[Static] = None
|
|
self._editor: Optional[TextArea] = None
|
|
|
|
def compose(self) -> ComposeResult:
|
|
yield Static("Tags", id="popup-title")
|
|
yield TextArea("", id="tags-editor")
|
|
with Horizontal(id="tags-buttons"):
|
|
yield Button("Save", id="tags-save")
|
|
yield Button("Close", id="tags-close")
|
|
yield Static("", id="tags-status")
|
|
|
|
def on_mount(self) -> None:
|
|
self._status = self.query_one("#tags-status", Static)
|
|
self._editor = self.query_one("#tags-editor", TextArea)
|
|
self._set_status("Loading tags…")
|
|
self._load_tags_background()
|
|
|
|
def _set_status(self, msg: str) -> None:
|
|
if self._status:
|
|
self._status.update(str(msg or ""))
|
|
|
|
@work(thread=True)
|
|
def _load_tags_background(self) -> None:
|
|
app = self.app # PipelineHubApp
|
|
tags = self._fetch_tags_from_store()
|
|
if not tags:
|
|
try:
|
|
runner: PipelineRunner = getattr(app, "executor")
|
|
cmd = "@1 | get-tag"
|
|
res = runner.run_pipeline(cmd, seeds=self._seeds, isolate=True)
|
|
tags = _extract_tag_names_from_table(getattr(res, "result_table", None))
|
|
if not tags:
|
|
tags = _extract_tag_names(getattr(res, "emitted", []))
|
|
except Exception as exc:
|
|
tags = []
|
|
try:
|
|
app.call_from_thread(
|
|
self._set_status,
|
|
f"Error: {type(exc).__name__}: {exc}"
|
|
)
|
|
except Exception:
|
|
self._set_status(f"Error: {type(exc).__name__}: {exc}")
|
|
self._original_tags = tags
|
|
try:
|
|
app.call_from_thread(self._apply_loaded_tags, tags)
|
|
except Exception:
|
|
self._apply_loaded_tags(tags)
|
|
|
|
def _apply_loaded_tags(self, tags: List[str]) -> None:
|
|
if self._editor:
|
|
self._editor.text = "\n".join(tags)
|
|
self._set_status(f"Loaded {len(tags)} tag(s)")
|
|
|
|
def _fetch_tags_from_store(self) -> Optional[List[str]]:
|
|
if not self._store or not self._hash:
|
|
return None
|
|
try:
|
|
cfg = load_config() or {}
|
|
except Exception:
|
|
cfg = {}
|
|
store_key = str(self._store or "").strip()
|
|
hash_value = str(self._hash or "").strip().lower()
|
|
if not store_key or not hash_value:
|
|
return None
|
|
try:
|
|
registry = StoreRegistry(config=cfg, suppress_debug=True)
|
|
except Exception:
|
|
return []
|
|
match = None
|
|
normalized = store_key.lower()
|
|
for name in registry.list_backends():
|
|
if str(name or "").strip().lower() == normalized:
|
|
match = name
|
|
break
|
|
if match is None:
|
|
return None
|
|
try:
|
|
backend = registry[match]
|
|
except KeyError:
|
|
return None
|
|
try:
|
|
tags, _src = backend.get_tag(hash_value, config=cfg)
|
|
if not tags:
|
|
return []
|
|
filtered = [str(t).strip() for t in tags if str(t).strip()]
|
|
return _dedup_preserve_order(filtered)
|
|
except Exception:
|
|
return None
|
|
|
|
def _parse_editor_tags(self) -> List[str]:
|
|
raw = ""
|
|
try:
|
|
raw = str(self._editor.text or "") if self._editor else ""
|
|
except Exception:
|
|
raw = ""
|
|
lines = [t.strip() for t in raw.replace("\r\n", "\n").split("\n")]
|
|
return _dedup_preserve_order([t for t in lines if t])
|
|
|
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
if event.button.id == "tags-close":
|
|
self.dismiss(None)
|
|
return
|
|
if event.button.id == "tags-save":
|
|
self._save_tags()
|
|
|
|
def _save_tags(self) -> None:
|
|
desired = self._parse_editor_tags()
|
|
current = _dedup_preserve_order(list(self._original_tags or []))
|
|
|
|
desired_set = {t.lower()
|
|
for t in desired}
|
|
current_set = {t.lower()
|
|
for t in current}
|
|
|
|
to_add = [t for t in desired if t.lower() not in current_set]
|
|
to_del = [t for t in current if t.lower() not in desired_set]
|
|
|
|
if not to_add and not to_del:
|
|
self._set_status("No changes")
|
|
return
|
|
|
|
self._set_status("Saving…")
|
|
self._save_tags_background(to_add, to_del, desired)
|
|
|
|
@work(thread=True)
|
|
def _save_tags_background(
|
|
self,
|
|
to_add: List[str],
|
|
to_del: List[str],
|
|
desired: List[str]
|
|
) -> None:
|
|
app = self.app # PipelineHubApp
|
|
def _log_message(msg: str) -> None:
|
|
if not msg:
|
|
return
|
|
try:
|
|
app.call_from_thread(app._append_log_line, msg)
|
|
except Exception:
|
|
logger.exception("Failed to append log line from background thread")
|
|
|
|
def _log_pipeline_command(stage: str, cmd: str) -> None:
|
|
if not cmd:
|
|
return
|
|
_log_message(f"tags-save: {stage}: {cmd}")
|
|
|
|
def _log_pipeline_result(stage: str, result: PipelineRunResult | None) -> None:
|
|
if result is None:
|
|
return
|
|
status = "success" if getattr(result, "success", False) else "failed"
|
|
_log_message(f"tags-save: {stage} result: {status}")
|
|
error = str(getattr(result, "error", "") or "").strip()
|
|
if error:
|
|
_log_message(f"tags-save: {stage} error: {error}")
|
|
for attr in ("stdout", "stderr"):
|
|
raw = str(getattr(result, attr, "") or "").strip()
|
|
if not raw:
|
|
continue
|
|
for line in raw.splitlines():
|
|
_log_message(f"tags-save: {stage} {attr}: {line}")
|
|
try:
|
|
runner: PipelineRunner = getattr(app, "executor")
|
|
store_tok = json.dumps(self._store)
|
|
query_chunk = f" -query {json.dumps(f'hash:{self._hash}')}" if self._hash else ""
|
|
|
|
failures: List[str] = []
|
|
|
|
if to_del:
|
|
del_args = " ".join(json.dumps(t) for t in to_del)
|
|
del_cmd = f"delete-tag -store {store_tok}{query_chunk} {del_args}"
|
|
_log_pipeline_command("delete-tag", del_cmd)
|
|
del_res = runner.run_pipeline(del_cmd, seeds=self._seeds, isolate=True)
|
|
_log_pipeline_result("delete-tag", del_res)
|
|
if not getattr(del_res, "success", False):
|
|
failures.append(
|
|
str(
|
|
getattr(del_res,
|
|
"error",
|
|
"") or getattr(del_res,
|
|
"stderr",
|
|
"") or "delete-tag failed"
|
|
).strip()
|
|
)
|
|
|
|
if to_add:
|
|
add_args = " ".join(json.dumps(t) for t in to_add)
|
|
add_cmd = f"add-tag -store {store_tok}{query_chunk} {add_args}"
|
|
_log_pipeline_command("add-tag", add_cmd)
|
|
add_res = runner.run_pipeline(add_cmd, seeds=self._seeds, isolate=True)
|
|
_log_pipeline_result("add-tag", add_res)
|
|
if not getattr(add_res, "success", False):
|
|
failures.append(
|
|
str(
|
|
getattr(add_res,
|
|
"error",
|
|
"") or getattr(add_res,
|
|
"stderr",
|
|
"") or "add-tag failed"
|
|
).strip()
|
|
)
|
|
|
|
if failures:
|
|
msg = failures[0]
|
|
try:
|
|
app.call_from_thread(self._set_status, f"Error: {msg}")
|
|
except Exception:
|
|
self._set_status(f"Error: {msg}")
|
|
return
|
|
|
|
reloaded = self._fetch_tags_from_store()
|
|
refreshed = reloaded is not None
|
|
tags_to_show = list(reloaded or []) if refreshed else list(desired)
|
|
self._original_tags = list(tags_to_show)
|
|
try:
|
|
app.call_from_thread(self._apply_loaded_tags, tags_to_show)
|
|
except Exception:
|
|
self._apply_loaded_tags(tags_to_show)
|
|
|
|
def _refresh_overlay() -> None:
|
|
try:
|
|
app.refresh_tag_overlay(
|
|
self._store,
|
|
self._hash,
|
|
tags_to_show,
|
|
self._seeds,
|
|
)
|
|
except Exception:
|
|
logger.exception("Failed to refresh tag overlay")
|
|
|
|
try:
|
|
app.call_from_thread(_refresh_overlay)
|
|
except Exception:
|
|
_refresh_overlay()
|
|
|
|
status_msg = f"Saved (+{len(to_add)}, -{len(to_del)})"
|
|
if refreshed:
|
|
status_msg += f"; loaded {len(tags_to_show)} tag(s)"
|
|
try:
|
|
app.call_from_thread(self._set_status, status_msg)
|
|
except Exception:
|
|
self._set_status(status_msg)
|
|
except Exception as exc:
|
|
try:
|
|
app.call_from_thread(
|
|
self._set_status,
|
|
f"Error: {type(exc).__name__}: {exc}"
|
|
)
|
|
except Exception:
|
|
self._set_status(f"Error: {type(exc).__name__}: {exc}")
|
|
|
|
|
|
class PipelineHubApp(App):
|
|
"""Textual front-end that executes cmdlet pipelines inline."""
|
|
|
|
CSS_PATH = str(TUI_DIR / "tui.tcss")
|
|
BINDINGS = [
|
|
Binding("ctrl+enter",
|
|
"run_pipeline",
|
|
"Run Pipeline"),
|
|
Binding("ctrl+s",
|
|
"save_inline_tags",
|
|
"Save Tags",
|
|
show=False),
|
|
Binding("f5",
|
|
"refresh_workers",
|
|
"Refresh Workers"),
|
|
Binding("ctrl+l",
|
|
"focus_command",
|
|
"Focus Input",
|
|
show=False),
|
|
Binding("ctrl+g",
|
|
"focus_logs",
|
|
"Focus Logs",
|
|
show=False),
|
|
]
|
|
|
|
def __init__(self) -> None:
|
|
super().__init__()
|
|
self.executor = PipelineRunner()
|
|
self.result_items: List[Any] = []
|
|
self.log_lines: List[str] = []
|
|
self.command_input: Optional[Input] = None
|
|
self.store_select: Optional[Select] = None
|
|
self.path_input: Optional[Input] = None
|
|
self.log_output: Optional[TextArea] = None
|
|
self.results_table: Optional[DataTable] = None
|
|
self.worker_table: Optional[DataTable] = None
|
|
self.status_panel: Optional[Static] = None
|
|
self.current_result_table: Optional[Table] = None
|
|
self.inline_tags_output: Optional[TextArea] = None
|
|
self.metadata_tree: Optional[Tree[Any]] = None
|
|
self.suggestion_list: Optional[OptionList] = None
|
|
self._cmdlet_names: List[str] = []
|
|
self._inline_autocomplete_enabled = False
|
|
self._inline_tags_original: List[str] = []
|
|
self._inline_tags_store: str = ""
|
|
self._inline_tags_hash: str = ""
|
|
self._inline_tags_subject: Any = None
|
|
self._pending_pipeline_tags: List[str] = []
|
|
self._pending_pipeline_tags_applied: bool = False
|
|
self._pipeline_running = False
|
|
self._pipeline_worker: Any = None
|
|
self._keep_results_for_current_run: bool = False
|
|
self._selected_row_index: int = 0
|
|
self._last_row_select_index: int = -1
|
|
self._last_row_select_at: float = 0.0
|
|
self._zt_server_proc: Optional[subprocess.Popen] = None
|
|
self._zt_server_last_config: Optional[str] = None
|
|
|
|
# ------------------------------------------------------------------
|
|
# Layout
|
|
# ------------------------------------------------------------------
|
|
def compose(self) -> ComposeResult: # noqa: D401 - Textual compose hook
|
|
yield Header(show_clock=True)
|
|
with Container(id="app-shell"):
|
|
with Vertical(id="command-pane"):
|
|
with Horizontal(id="command-row"):
|
|
yield Input(
|
|
placeholder="Enter pipeline command...",
|
|
id="pipeline-input"
|
|
)
|
|
yield Button("Run", id="run-button")
|
|
yield Button("Actions", id="actions-button")
|
|
yield Button("Tags", id="tags-button")
|
|
yield Button("Metadata", id="metadata-button")
|
|
yield Button("Relationships", id="relationships-button")
|
|
yield Button("Config", id="config-button")
|
|
yield Static("Ready", id="status-panel")
|
|
|
|
with Vertical(id="results-pane"):
|
|
with Horizontal(id="results-layout"):
|
|
with Vertical(id="results-list-pane"):
|
|
yield Label("Items", classes="section-title")
|
|
yield DataTable(id="results-table")
|
|
with Vertical(id="results-tags-pane"):
|
|
yield Label("Tags", classes="section-title")
|
|
yield TextArea(id="inline-tags-output")
|
|
with Vertical(id="results-meta-pane"):
|
|
yield Label("Metadata", classes="section-title")
|
|
yield Tree("Metadata", id="metadata-tree")
|
|
|
|
with Vertical(id="bottom-pane"):
|
|
yield Label("Store + Output", classes="section-title")
|
|
with Horizontal(id="store-row"):
|
|
yield Select([], id="store-select")
|
|
yield Input(placeholder="Output path (optional)", id="output-path")
|
|
|
|
with Horizontal(id="logs-workers-row"):
|
|
with Vertical(id="logs-pane"):
|
|
yield Label("Logs", classes="section-title")
|
|
yield TextArea(id="log-output", read_only=True)
|
|
|
|
with Vertical(id="workers-pane"):
|
|
yield Label("Workers", classes="section-title")
|
|
yield DataTable(id="workers-table")
|
|
yield Footer()
|
|
|
|
def on_mount(self) -> None:
|
|
self.command_input = self.query_one("#pipeline-input", Input)
|
|
self.status_panel = self.query_one("#status-panel", Static)
|
|
self.results_table = self.query_one("#results-table", DataTable)
|
|
self.worker_table = self.query_one("#workers-table", DataTable)
|
|
self.log_output = self.query_one("#log-output", TextArea)
|
|
self.store_select = self.query_one("#store-select", Select)
|
|
self.path_input = self.query_one("#output-path", Input)
|
|
try:
|
|
self.suggestion_list = self.query_one("#cmd-suggestions", OptionList)
|
|
except Exception:
|
|
self.suggestion_list = None
|
|
self.inline_tags_output = self.query_one("#inline-tags-output", TextArea)
|
|
self.metadata_tree = self.query_one("#metadata-tree", Tree)
|
|
|
|
if self.suggestion_list:
|
|
self.suggestion_list.display = False
|
|
|
|
if self.results_table:
|
|
self.results_table.cursor_type = "row"
|
|
self.results_table.zebra_stripes = False
|
|
try:
|
|
self.results_table.cell_padding = 0
|
|
except Exception:
|
|
pass
|
|
self.results_table.add_columns("Row", "Title", "Source", "File")
|
|
if self.worker_table:
|
|
self.worker_table.add_columns("ID", "Type", "Status", "Details")
|
|
|
|
if self.inline_tags_output:
|
|
self.inline_tags_output.text = ""
|
|
|
|
if self.metadata_tree:
|
|
try:
|
|
self.metadata_tree.root.label = "Metadata"
|
|
self.metadata_tree.root.remove_children()
|
|
self.metadata_tree.root.add("Select an item to view metadata")
|
|
self.metadata_tree.root.expand()
|
|
except Exception:
|
|
pass
|
|
|
|
# Initialize the store choices cache at startup (filters disabled stores)
|
|
try:
|
|
from cmdlet._shared import SharedArgs
|
|
config = load_config()
|
|
SharedArgs._refresh_store_choices_cache(config)
|
|
except Exception:
|
|
logger.exception("Failed to refresh store choices cache")
|
|
|
|
self._populate_store_options()
|
|
self._load_cmdlet_names()
|
|
self._configure_inline_autocomplete()
|
|
if self.executor.worker_manager:
|
|
self.set_interval(2.0, self.refresh_workers)
|
|
self.refresh_workers()
|
|
if self.command_input:
|
|
self.command_input.focus()
|
|
|
|
# Run startup check automatically
|
|
self._run_pipeline_background(".status")
|
|
|
|
# Provide a visible startup summary of configured providers/stores for debugging
|
|
try:
|
|
cfg = load_config() or {}
|
|
provs = list(cfg.get("provider", {}).keys()) if isinstance(cfg.get("provider"), dict) else []
|
|
stores = list(cfg.get("store", {}).keys()) if isinstance(cfg.get("store"), dict) else []
|
|
prov_display = ", ".join(provs[:10]) + ("..." if len(provs) > 10 else "")
|
|
store_display = ", ".join(stores[:10]) + ("..." if len(stores) > 10 else "")
|
|
self._append_log_line(f"Startup config: providers={len(provs)} ({prov_display or '(none)'}), stores={len(stores)} ({store_display or '(none)'}), db={db.db_path.name}")
|
|
except Exception:
|
|
logger.exception("Failed to produce startup config summary")
|
|
|
|
def on_unmount(self) -> None:
|
|
pass
|
|
|
|
# ------------------------------------------------------------------
|
|
# Actions
|
|
# ------------------------------------------------------------------
|
|
def action_focus_command(self) -> None:
|
|
if self.command_input:
|
|
self.command_input.focus()
|
|
|
|
def action_focus_logs(self) -> None:
|
|
if self.log_output:
|
|
self.log_output.focus()
|
|
|
|
def action_run_pipeline(self) -> None:
|
|
if self._pipeline_running:
|
|
# Self-heal if the background worker already stopped (e.g. error in thread).
|
|
worker = self._pipeline_worker
|
|
try:
|
|
is_running = bool(getattr(worker, "is_running", False))
|
|
except Exception:
|
|
is_running = True
|
|
if (worker is None) or (not is_running):
|
|
self._pipeline_running = False
|
|
self._pipeline_worker = None
|
|
else:
|
|
self.notify("Pipeline already running", severity="warning", timeout=3)
|
|
return
|
|
if not self.command_input:
|
|
return
|
|
pipeline_text = self.command_input.value.strip()
|
|
if not pipeline_text:
|
|
self.notify("Enter a pipeline to run", severity="warning", timeout=3)
|
|
return
|
|
|
|
# Special interception for .config
|
|
if pipeline_text.lower().strip() == ".config":
|
|
self._open_config_popup()
|
|
self.command_input.value = ""
|
|
return
|
|
|
|
pipeline_text = self._apply_store_path_and_tags(pipeline_text)
|
|
pipeline_text = self._apply_pending_pipeline_tags(pipeline_text)
|
|
self._start_pipeline_execution(pipeline_text)
|
|
|
|
def action_save_inline_tags(self) -> None:
|
|
if self._pipeline_running:
|
|
self.notify("Pipeline already running", severity="warning", timeout=3)
|
|
return
|
|
|
|
editor = self.inline_tags_output
|
|
if editor is None or not bool(getattr(editor, "has_focus", False)):
|
|
return
|
|
|
|
store_name = str(self._inline_tags_store or "").strip()
|
|
file_hash = str(self._inline_tags_hash or "").strip()
|
|
seeds = self._inline_tags_subject
|
|
selected_item = self._item_for_row_index(self._selected_row_index)
|
|
|
|
item, store_name_fallback, file_hash_fallback = self._resolve_selected_item()
|
|
if not seeds:
|
|
seeds = item
|
|
if not store_name and store_name_fallback:
|
|
store_name = str(store_name_fallback)
|
|
if not file_hash and file_hash_fallback:
|
|
file_hash = str(file_hash_fallback)
|
|
file_hash = self._normalize_hash_text(file_hash)
|
|
|
|
raw_text = ""
|
|
try:
|
|
raw_text = str(editor.text or "")
|
|
except Exception:
|
|
raw_text = ""
|
|
|
|
desired = _dedup_preserve_order(
|
|
[
|
|
str(line).strip()
|
|
for line in raw_text.replace("\r\n", "\n").split("\n")
|
|
if str(line).strip()
|
|
]
|
|
)
|
|
|
|
# Contextual draft mode: no selected result context yet (e.g., pre-download tagging).
|
|
if selected_item is None and not file_hash:
|
|
self._pending_pipeline_tags = list(desired)
|
|
self._pending_pipeline_tags_applied = False
|
|
self._inline_tags_original = list(desired)
|
|
self._inline_tags_store = str(self._get_selected_store() or "")
|
|
self._inline_tags_hash = ""
|
|
self._inline_tags_subject = None
|
|
self._set_inline_tags(list(desired))
|
|
self._set_status("Saved pending pipeline tags", level="success")
|
|
self.notify(f"Saved {len(desired)} pending tag(s)", timeout=3)
|
|
return
|
|
|
|
if selected_item is not None and not file_hash:
|
|
self.notify(
|
|
"Selected item is missing a usable hash; cannot save tags to store",
|
|
severity="warning",
|
|
timeout=5,
|
|
)
|
|
return
|
|
|
|
if not store_name:
|
|
self.notify("Selected item missing store", severity="warning", timeout=4)
|
|
return
|
|
|
|
current = _dedup_preserve_order(list(self._inline_tags_original or []))
|
|
desired_set = {t.lower() for t in desired}
|
|
current_set = {t.lower() for t in current}
|
|
to_add = [t for t in desired if t.lower() not in current_set]
|
|
to_del = [t for t in current if t.lower() not in desired_set]
|
|
|
|
if not to_add and not to_del:
|
|
self.notify("No tag changes", timeout=2)
|
|
return
|
|
|
|
self._set_status("Saving tags…", level="info")
|
|
self._save_inline_tags_background(
|
|
store_name=store_name,
|
|
file_hash=file_hash,
|
|
seeds=seeds,
|
|
to_add=to_add,
|
|
to_del=to_del,
|
|
desired=desired,
|
|
)
|
|
|
|
def _start_pipeline_execution(
|
|
self,
|
|
pipeline_text: str,
|
|
*,
|
|
seeds: Optional[Any] = None,
|
|
seed_table: Optional[Any] = None,
|
|
clear_log: bool = True,
|
|
clear_results: bool = True,
|
|
keep_existing_results: bool = False,
|
|
) -> None:
|
|
command = str(pipeline_text or "").strip()
|
|
if not command:
|
|
self.notify("Empty pipeline", severity="warning", timeout=3)
|
|
return
|
|
|
|
self._pipeline_running = True
|
|
self._keep_results_for_current_run = bool(keep_existing_results)
|
|
self._set_status("Running…", level="info")
|
|
if self.suggestion_list:
|
|
try:
|
|
self.suggestion_list.display = False
|
|
self.suggestion_list.clear_options() # type: ignore[attr-defined]
|
|
except Exception:
|
|
pass
|
|
if clear_log:
|
|
self._clear_log()
|
|
self._append_log_line(f"$ {command}")
|
|
if clear_results:
|
|
self._clear_results()
|
|
self._pipeline_worker = self._run_pipeline_background(command, seeds, seed_table)
|
|
|
|
@on(Input.Changed, "#pipeline-input")
|
|
def on_pipeline_input_changed(self, event: Input.Changed) -> None:
|
|
text = str(event.value or "")
|
|
self._update_suggestions(text)
|
|
self._update_syntax_status(text)
|
|
|
|
@on(OptionList.OptionSelected, "#cmd-suggestions")
|
|
def on_suggestion_selected(self, event: OptionList.OptionSelected) -> None:
|
|
if not self.command_input or not self.suggestion_list:
|
|
return
|
|
try:
|
|
suggestion = str(event.option.prompt)
|
|
except Exception:
|
|
return
|
|
new_text = self._apply_suggestion_to_text(
|
|
str(self.command_input.value or ""),
|
|
suggestion
|
|
)
|
|
self.command_input.value = new_text
|
|
self._move_command_cursor_to_end()
|
|
self.suggestion_list.display = False
|
|
self.command_input.focus()
|
|
|
|
def action_refresh_workers(self) -> None:
|
|
self.refresh_workers()
|
|
|
|
# ------------------------------------------------------------------
|
|
# Event handlers
|
|
# ------------------------------------------------------------------
|
|
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
if event.button.id == "run-button":
|
|
self.action_run_pipeline()
|
|
elif event.button.id == "actions-button":
|
|
self._open_actions_popup()
|
|
elif event.button.id == "tags-button":
|
|
self._open_tags_popup()
|
|
elif event.button.id == "metadata-button":
|
|
self._open_metadata_popup()
|
|
elif event.button.id == "relationships-button":
|
|
self._open_relationships_popup()
|
|
elif event.button.id == "config-button":
|
|
self._open_config_popup()
|
|
|
|
def _open_config_popup(self) -> None:
|
|
from TUI.modalscreen.config_modal import ConfigModal
|
|
self.push_screen(ConfigModal(), callback=self.on_config_closed)
|
|
|
|
def on_config_closed(self, result: Any = None) -> None:
|
|
"""Call when the config modal is dismissed to reload session data."""
|
|
try:
|
|
from SYS.config import load_config, clear_config_cache
|
|
from cmdlet._shared import SharedArgs
|
|
# Force a fresh load from disk
|
|
clear_config_cache()
|
|
cfg = load_config()
|
|
|
|
# Clear UI state to show a "fresh" start
|
|
self._clear_results()
|
|
self._clear_log()
|
|
self._append_log_line(">>> RESTARTING SESSION (Config updated)")
|
|
self._set_status("Reloading config…", level="info")
|
|
|
|
# Clear shared caches (especially store selection choices)
|
|
SharedArgs._refresh_store_choices_cache(cfg)
|
|
# Update the global SharedArgs choices so cmdlets pick up new stores
|
|
SharedArgs.STORE.choices = SharedArgs.get_store_choices(cfg, force=True)
|
|
|
|
# Re-build our local dropdown
|
|
self._populate_store_options()
|
|
# Reload cmdlet names (in case new ones were added or indexed)
|
|
self._load_cmdlet_names(force=True)
|
|
# Optionally update executor config if needed
|
|
cfg_loader = getattr(self.executor, "_config_loader", None)
|
|
if cfg_loader is not None and hasattr(cfg_loader, "load"):
|
|
cfg_loader.load()
|
|
|
|
self.notify("Configuration reloaded")
|
|
|
|
# Use the existing background runner to show the status table
|
|
# This will append the IGNITIO table to the logs/results
|
|
self._run_pipeline_background(".status")
|
|
|
|
except Exception as exc:
|
|
self.notify(f"Error refreshing config: {exc}", severity="error")
|
|
|
|
def on_input_submitted(self, event: Input.Submitted) -> None:
|
|
if event.input.id == "pipeline-input":
|
|
if self.suggestion_list:
|
|
try:
|
|
self.suggestion_list.display = False
|
|
except Exception:
|
|
pass
|
|
self.action_run_pipeline()
|
|
|
|
def on_key(self, event: Key) -> None:
|
|
# Make Tab accept autocomplete when typing commands.
|
|
if event.key != "tab":
|
|
return
|
|
if not self.command_input or not self.command_input.has_focus:
|
|
return
|
|
suggestion = self._get_first_suggestion()
|
|
if not suggestion:
|
|
suggestion = self._best_cmdlet_match(
|
|
self._current_cmd_prefix(str(self.command_input.value or ""))
|
|
)
|
|
if not suggestion:
|
|
return
|
|
|
|
self.command_input.value = self._apply_suggestion_to_text(
|
|
str(self.command_input.value or ""),
|
|
suggestion
|
|
)
|
|
self._move_command_cursor_to_end()
|
|
if self.suggestion_list:
|
|
self.suggestion_list.display = False
|
|
event.prevent_default()
|
|
event.stop()
|
|
|
|
def _get_first_suggestion(self) -> str:
|
|
if not self.suggestion_list or not bool(getattr(self.suggestion_list,
|
|
"display",
|
|
False)):
|
|
return ""
|
|
# Textual OptionList API differs across versions; handle best-effort.
|
|
try:
|
|
options = list(getattr(self.suggestion_list, "options", []) or [])
|
|
if options:
|
|
first = options[0]
|
|
return str(getattr(first, "prompt", "") or "")
|
|
except Exception:
|
|
logger.exception("Error retrieving first suggestion from suggestion list")
|
|
return ""
|
|
|
|
def _move_command_cursor_to_end(self) -> None:
|
|
if not self.command_input:
|
|
return
|
|
value = str(self.command_input.value or "")
|
|
end_pos = len(value)
|
|
|
|
try:
|
|
self.command_input.cursor_position = end_pos
|
|
return
|
|
except Exception:
|
|
pass
|
|
|
|
try:
|
|
self.command_input.cursor_pos = end_pos # type: ignore[attr-defined]
|
|
return
|
|
except Exception:
|
|
pass
|
|
|
|
for method_name in ("action_end", "action_cursor_end", "end"):
|
|
method = getattr(self.command_input, method_name, None)
|
|
if callable(method):
|
|
try:
|
|
method()
|
|
return
|
|
except Exception:
|
|
continue
|
|
|
|
def _best_cmdlet_match(self, prefix: str) -> str:
|
|
pfx = str(prefix or "").strip().lower()
|
|
if not pfx:
|
|
return ""
|
|
for name in self._cmdlet_names:
|
|
try:
|
|
candidate = str(name)
|
|
except Exception:
|
|
continue
|
|
if candidate.lower().startswith(pfx):
|
|
return candidate
|
|
return ""
|
|
|
|
def _configure_inline_autocomplete(self) -> None:
|
|
self._inline_autocomplete_enabled = False
|
|
if not self.command_input:
|
|
return
|
|
if SuggestFromList is None:
|
|
return
|
|
try:
|
|
self.command_input.suggester = SuggestFromList(
|
|
list(self._cmdlet_names),
|
|
case_sensitive=False,
|
|
)
|
|
self._inline_autocomplete_enabled = True
|
|
except Exception:
|
|
self._inline_autocomplete_enabled = False
|
|
|
|
def _populate_store_options(self) -> None:
|
|
"""Populate the store dropdown from the configured Store registry."""
|
|
if not self.store_select:
|
|
return
|
|
try:
|
|
cfg = load_config() or {}
|
|
except Exception:
|
|
cfg = {}
|
|
|
|
stores: List[str] = []
|
|
try:
|
|
stores = StoreRegistry(config=cfg, suppress_debug=True).list_backends()
|
|
except Exception:
|
|
logger.exception("Failed to list store backends from StoreRegistry")
|
|
stores = []
|
|
|
|
# Always offer a reasonable default even if config is missing.
|
|
if "local" not in [s.lower() for s in stores]:
|
|
stores = ["local", *stores]
|
|
|
|
options = [(name, name) for name in stores]
|
|
try:
|
|
self.store_select.set_options(options)
|
|
if options:
|
|
current = getattr(self.store_select, "value", None)
|
|
# Textual Select uses a sentinel for "no selection".
|
|
if (current is None) or (current == "") or (current is Select.BLANK):
|
|
self.store_select.value = options[0][1]
|
|
except Exception:
|
|
logger.exception("Failed to set store select options")
|
|
|
|
def _get_selected_store(self) -> Optional[str]:
|
|
if not self.store_select:
|
|
return None
|
|
try:
|
|
value = getattr(self.store_select, "value", None)
|
|
except Exception:
|
|
return None
|
|
|
|
if value is None or value is Select.BLANK:
|
|
return None
|
|
try:
|
|
text = str(value).strip()
|
|
except Exception:
|
|
return None
|
|
|
|
if not text or text == "Select.BLANK":
|
|
return None
|
|
return text
|
|
|
|
def _apply_store_path_and_tags(self, pipeline_text: str) -> str:
|
|
"""Apply store/path/tags UI fields to the pipeline text.
|
|
|
|
Rules (simple + non-destructive):
|
|
- If output path is set and the first stage is download-file and has no -path/--path, append -path.
|
|
- If a store is selected and pipeline has no add-file stage, append add-file -store <store>.
|
|
"""
|
|
base = str(pipeline_text or "").strip()
|
|
if not base:
|
|
return base
|
|
|
|
selected_store = self._get_selected_store()
|
|
|
|
output_path = ""
|
|
if self.path_input:
|
|
try:
|
|
output_path = str(self.path_input.value or "").strip()
|
|
except Exception:
|
|
output_path = ""
|
|
|
|
stages = [s.strip() for s in base.split("|") if s.strip()]
|
|
if not stages:
|
|
return base
|
|
|
|
# Identify first stage command name for conservative auto-augmentation.
|
|
first_stage_cmd = ""
|
|
try:
|
|
first_stage_cmd = (
|
|
str(stages[0].split()[0]).replace("_",
|
|
"-").strip().lower()
|
|
if stages[0].split() else ""
|
|
)
|
|
except Exception:
|
|
first_stage_cmd = ""
|
|
|
|
# Apply -path to download-file first stage (only if missing)
|
|
if output_path:
|
|
first = stages[0]
|
|
low = first.lower()
|
|
if low.startswith("download-file"
|
|
) and " -path" not in low and " --path" not in low:
|
|
stages[0] = f"{first} -path {json.dumps(output_path)}"
|
|
|
|
joined = " | ".join(stages)
|
|
|
|
low_joined = joined.lower()
|
|
|
|
# Only auto-append add-file for download pipelines.
|
|
should_auto_add_file = bool(
|
|
selected_store and ("add-file" not in low_joined) and (
|
|
first_stage_cmd
|
|
in {"download-file"}
|
|
)
|
|
)
|
|
|
|
if should_auto_add_file:
|
|
store_token = json.dumps(selected_store)
|
|
joined = f"{joined} | add-file -store {store_token}"
|
|
|
|
return joined
|
|
|
|
def _apply_pending_pipeline_tags(self, pipeline_text: str) -> str:
|
|
command = str(pipeline_text or "").strip()
|
|
pending = list(self._pending_pipeline_tags or [])
|
|
if not command or not pending:
|
|
self._pending_pipeline_tags_applied = False
|
|
return command
|
|
|
|
low = command.lower()
|
|
if "add-tag" in low:
|
|
# User already controls tag stage explicitly.
|
|
self._pending_pipeline_tags_applied = False
|
|
return command
|
|
|
|
# Apply draft tags when pipeline stores/emits files via add-file.
|
|
if "add-file" not in low:
|
|
self._pending_pipeline_tags_applied = False
|
|
return command
|
|
|
|
tag_args = " ".join(json.dumps(t) for t in pending if str(t).strip())
|
|
if not tag_args:
|
|
self._pending_pipeline_tags_applied = False
|
|
return command
|
|
|
|
self._pending_pipeline_tags_applied = True
|
|
self.notify(f"Applying {len(pending)} pending tag(s) after pipeline", timeout=3)
|
|
return f"{command} | add-tag {tag_args}"
|
|
|
|
def on_data_table_row_highlighted(self, event: DataTable.RowHighlighted) -> None:
|
|
if not self.results_table or event.control is not self.results_table:
|
|
return
|
|
index = int(event.cursor_row or 0)
|
|
if index < 0:
|
|
index = 0
|
|
self._selected_row_index = index
|
|
self._refresh_inline_detail_panels(index)
|
|
|
|
def on_data_table_row_selected(self, event: DataTable.RowSelected) -> None:
|
|
if not self.results_table or event.control is not self.results_table:
|
|
return
|
|
index = int(event.cursor_row or 0)
|
|
if index < 0:
|
|
index = 0
|
|
self._selected_row_index = index
|
|
self._refresh_inline_detail_panels(index)
|
|
if self._is_probable_row_double_click(index):
|
|
self._handle_row_double_click(index)
|
|
|
|
def _is_probable_row_double_click(self, index: int) -> bool:
|
|
now = time.monotonic()
|
|
same_row = (int(index) == int(self._last_row_select_index))
|
|
close_in_time = (now - float(self._last_row_select_at)) <= 0.45
|
|
is_double = bool(same_row and close_in_time)
|
|
self._last_row_select_index = int(index)
|
|
self._last_row_select_at = now
|
|
return is_double
|
|
|
|
def _handle_row_double_click(self, index: int) -> None:
|
|
item = self._item_for_row_index(index)
|
|
if item is None:
|
|
return
|
|
|
|
metadata = self._normalize_item_metadata(item)
|
|
table_hint = self._extract_table_hint(metadata)
|
|
if not self._is_tidal_artist_context(metadata, table_hint=table_hint):
|
|
return
|
|
|
|
seed_items = self._current_seed_items()
|
|
if not seed_items:
|
|
self.notify("No current result items available for artist selection", severity="warning", timeout=3)
|
|
return
|
|
|
|
row_num = int(index or 0) + 1
|
|
if row_num < 1:
|
|
row_num = 1
|
|
|
|
# Mirror CLI behavior: selecting artist row runs @N and opens album list.
|
|
self._start_pipeline_execution(
|
|
f"@{row_num}",
|
|
seeds=seed_items,
|
|
seed_table=self.current_result_table,
|
|
clear_log=False,
|
|
clear_results=False,
|
|
keep_existing_results=False,
|
|
)
|
|
|
|
# ------------------------------------------------------------------
|
|
# Pipeline execution helpers
|
|
# ------------------------------------------------------------------
|
|
@work(exclusive=True, thread=True)
|
|
def _run_pipeline_background(
|
|
self,
|
|
pipeline_text: str,
|
|
seeds: Optional[Any] = None,
|
|
seed_table: Optional[Any] = None,
|
|
) -> None:
|
|
try:
|
|
run_result = self.executor.run_pipeline(
|
|
pipeline_text,
|
|
seeds=seeds,
|
|
seed_table=seed_table,
|
|
on_log=self._log_from_worker
|
|
)
|
|
except Exception as exc:
|
|
# Ensure the UI never gets stuck in "running" state.
|
|
run_result = PipelineRunResult(
|
|
pipeline=str(pipeline_text or ""),
|
|
success=False,
|
|
error=f"{type(exc).__name__}: {exc}",
|
|
stderr=f"{type(exc).__name__}: {exc}",
|
|
)
|
|
self.call_from_thread(self._on_pipeline_finished, run_result)
|
|
|
|
def _on_pipeline_finished(self, run_result: PipelineRunResult) -> None:
|
|
self._pipeline_running = False
|
|
self._pipeline_worker = None
|
|
keep_existing_results = bool(self._keep_results_for_current_run)
|
|
self._keep_results_for_current_run = False
|
|
pending_applied = bool(self._pending_pipeline_tags_applied)
|
|
pending_count = len(self._pending_pipeline_tags)
|
|
if self.suggestion_list:
|
|
try:
|
|
self.suggestion_list.display = False
|
|
self.suggestion_list.clear_options() # type: ignore[attr-defined]
|
|
except Exception:
|
|
pass
|
|
status_level = "success" if run_result.success else "error"
|
|
status_text = "Completed" if run_result.success else "Failed"
|
|
self._set_status(status_text, level=status_level)
|
|
|
|
if not run_result.success:
|
|
self.notify(
|
|
run_result.error or "Pipeline failed",
|
|
severity="error",
|
|
timeout=6
|
|
)
|
|
if pending_applied and pending_count:
|
|
self.notify("Pending tags were retained (pipeline failed)", severity="warning", timeout=4)
|
|
else:
|
|
if pending_applied and pending_count:
|
|
self._pending_pipeline_tags = []
|
|
self.notify(f"Pipeline completed; applied {pending_count} pending tag(s)", timeout=4)
|
|
else:
|
|
self.notify("Pipeline completed", timeout=3)
|
|
|
|
self._pending_pipeline_tags_applied = False
|
|
|
|
if run_result.stdout.strip():
|
|
self._append_log_line("stdout:")
|
|
self._append_block(run_result.stdout)
|
|
if run_result.stderr.strip():
|
|
self._append_log_line("stderr:")
|
|
self._append_block(run_result.stderr)
|
|
|
|
for stage in run_result.stages:
|
|
summary = f"[{stage.status}] {stage.name} -> {len(stage.emitted)} item(s)"
|
|
if stage.error:
|
|
summary += f" ({stage.error})"
|
|
self._append_log_line(summary)
|
|
|
|
if not keep_existing_results:
|
|
emitted = run_result.emitted
|
|
if isinstance(emitted, list):
|
|
self.result_items = emitted
|
|
elif emitted:
|
|
self.result_items = [emitted]
|
|
else:
|
|
self.result_items = []
|
|
|
|
self.current_result_table = run_result.result_table
|
|
self._selected_row_index = 0
|
|
self._populate_results_table()
|
|
self.refresh_workers()
|
|
|
|
def _current_seed_items(self) -> List[Any]:
|
|
if self.current_result_table and getattr(self.current_result_table, "rows", None):
|
|
items: List[Any] = []
|
|
for idx in range(len(self.current_result_table.rows)):
|
|
item = self._item_for_row_index(idx)
|
|
if item is not None:
|
|
items.append(item)
|
|
if items:
|
|
return items
|
|
return list(self.result_items or [])
|
|
|
|
def _log_from_worker(self, message: str) -> None:
|
|
self.call_from_thread(self._append_log_line, message)
|
|
|
|
# ------------------------------------------------------------------
|
|
# UI helpers
|
|
# ------------------------------------------------------------------
|
|
@staticmethod
|
|
def _extract_title_for_item(item: Any) -> str:
|
|
if isinstance(item, dict):
|
|
for key in ("title", "name", "path", "url", "hash"):
|
|
value = item.get(key)
|
|
if value is not None:
|
|
text = str(value).strip()
|
|
if text:
|
|
return text
|
|
metadata = item.get("metadata")
|
|
if isinstance(metadata, dict):
|
|
for key in ("title", "name"):
|
|
value = metadata.get(key)
|
|
if value is not None:
|
|
text = str(value).strip()
|
|
if text:
|
|
return text
|
|
return str(item)
|
|
try:
|
|
for key in ("title", "name", "path", "url", "hash"):
|
|
value = getattr(item, key, None)
|
|
if value is not None:
|
|
text = str(value).strip()
|
|
if text:
|
|
return text
|
|
except Exception:
|
|
pass
|
|
return str(item)
|
|
|
|
def _item_for_row_index(self, index: int) -> Any:
|
|
idx = int(index or 0)
|
|
if idx < 0:
|
|
return None
|
|
|
|
if self.current_result_table and 0 <= idx < len(getattr(self.current_result_table, "rows", []) or []):
|
|
row = self.current_result_table.rows[idx]
|
|
payload = getattr(row, "payload", None)
|
|
if payload is not None:
|
|
return payload
|
|
src_idx = getattr(row, "source_index", None)
|
|
if isinstance(src_idx, int) and 0 <= src_idx < len(self.result_items):
|
|
return self.result_items[src_idx]
|
|
|
|
if 0 <= idx < len(self.result_items):
|
|
return self.result_items[idx]
|
|
|
|
return None
|
|
|
|
@staticmethod
|
|
def _split_tag_text(raw: Any) -> List[str]:
|
|
text = str(raw or "").strip()
|
|
if not text:
|
|
return []
|
|
if "\n" in text or "," in text:
|
|
out = []
|
|
for part in re.split(r"[\n,]", text):
|
|
p = str(part or "").strip()
|
|
if p:
|
|
out.append(p)
|
|
return out
|
|
return [text]
|
|
|
|
@staticmethod
|
|
def _normalize_hash_text(raw_hash: Any) -> str:
|
|
value = str(raw_hash or "").strip().lower()
|
|
if len(value) == 64 and all(ch in "0123456789abcdef" for ch in value):
|
|
return value
|
|
return ""
|
|
|
|
def _extract_hash_from_nested(self, value: Any) -> str:
|
|
target_keys = {"hash", "hash_hex", "file_hash", "sha256"}
|
|
|
|
def _scan(node: Any, depth: int = 0) -> str:
|
|
if depth > 8:
|
|
return ""
|
|
if isinstance(node, dict):
|
|
for key in target_keys:
|
|
if key in node:
|
|
normalized = self._normalize_hash_text(node.get(key))
|
|
if normalized:
|
|
return normalized
|
|
for child in node.values():
|
|
found = _scan(child, depth + 1)
|
|
if found:
|
|
return found
|
|
return ""
|
|
if isinstance(node, (list, tuple, set)):
|
|
for child in node:
|
|
found = _scan(child, depth + 1)
|
|
if found:
|
|
return found
|
|
return ""
|
|
|
|
return _scan(value)
|
|
|
|
def _fetch_store_tags(self, store_name: str, file_hash: str) -> Optional[List[str]]:
|
|
store_key = str(store_name or "").strip()
|
|
hash_key = self._normalize_hash_text(file_hash)
|
|
if not store_key or not hash_key:
|
|
return None
|
|
|
|
try:
|
|
cfg = load_config() or {}
|
|
except Exception:
|
|
cfg = {}
|
|
|
|
try:
|
|
registry = StoreRegistry(config=cfg, suppress_debug=True)
|
|
except Exception:
|
|
return None
|
|
|
|
match = None
|
|
normalized_store = store_key.lower()
|
|
for name in registry.list_backends():
|
|
if str(name or "").strip().lower() == normalized_store:
|
|
match = name
|
|
break
|
|
|
|
if match is None:
|
|
return None
|
|
|
|
try:
|
|
backend = registry[match]
|
|
except KeyError:
|
|
return None
|
|
|
|
try:
|
|
tags, _src = backend.get_tag(hash_key, config=cfg)
|
|
if not tags:
|
|
return []
|
|
filtered = [str(t).strip() for t in tags if str(t).strip()]
|
|
return _dedup_preserve_order(filtered)
|
|
except Exception:
|
|
return None
|
|
|
|
def _extract_tags_from_nested(self, value: Any) -> List[str]:
|
|
tags: List[str] = []
|
|
|
|
def _add_tag(candidate: Any) -> None:
|
|
if candidate is None:
|
|
return
|
|
if isinstance(candidate, (list, tuple, set)):
|
|
for entry in candidate:
|
|
_add_tag(entry)
|
|
return
|
|
if isinstance(candidate, str):
|
|
tags.extend(self._split_tag_text(candidate))
|
|
return
|
|
text = str(candidate).strip()
|
|
if text:
|
|
tags.append(text)
|
|
|
|
def _walk(node: Any) -> None:
|
|
if isinstance(node, dict):
|
|
for key, child in node.items():
|
|
k = str(key or "").strip().lower()
|
|
if k in {"tag", "tags"}:
|
|
_add_tag(child)
|
|
_walk(child)
|
|
return
|
|
if isinstance(node, (list, tuple, set)):
|
|
for child in node:
|
|
_walk(child)
|
|
|
|
_walk(value)
|
|
return _dedup_preserve_order(tags)
|
|
|
|
@staticmethod
|
|
def _normalize_item_metadata(item: Any) -> Dict[str, Any]:
|
|
if isinstance(item, dict):
|
|
data: Dict[str, Any] = {}
|
|
for key, value in item.items():
|
|
k = str(key or "").strip()
|
|
if k in {"columns", "_selection_args", "_selection_action"}:
|
|
continue
|
|
data[k] = value
|
|
return data
|
|
|
|
try:
|
|
as_dict = getattr(item, "to_dict", None)
|
|
if callable(as_dict):
|
|
value = as_dict()
|
|
if isinstance(value, dict):
|
|
return dict(value)
|
|
except Exception:
|
|
pass
|
|
|
|
return {"value": str(item)}
|
|
|
|
@staticmethod
|
|
def _collect_metadata_keys(value: Any, out: Optional[set[str]] = None, depth: int = 0) -> set[str]:
|
|
target = out if out is not None else set()
|
|
if depth > 10:
|
|
return target
|
|
if isinstance(value, dict):
|
|
for key, child in value.items():
|
|
key_text = str(key or "").strip().lower()
|
|
if key_text:
|
|
target.add(key_text)
|
|
PipelineHubApp._collect_metadata_keys(child, target, depth + 1)
|
|
return target
|
|
if isinstance(value, (list, tuple, set)):
|
|
for child in value:
|
|
PipelineHubApp._collect_metadata_keys(child, target, depth + 1)
|
|
return target
|
|
|
|
@staticmethod
|
|
def _extract_nested_value_for_keys(value: Any, target_keys: set[str], depth: int = 0) -> str:
|
|
if depth > 10:
|
|
return ""
|
|
if isinstance(value, dict):
|
|
for key, child in value.items():
|
|
key_text = str(key or "").strip().lower()
|
|
if key_text in target_keys:
|
|
val_text = str(child or "").strip()
|
|
if val_text:
|
|
return val_text
|
|
nested = PipelineHubApp._extract_nested_value_for_keys(child, target_keys, depth + 1)
|
|
if nested:
|
|
return nested
|
|
return ""
|
|
if isinstance(value, (list, tuple, set)):
|
|
for child in value:
|
|
nested = PipelineHubApp._extract_nested_value_for_keys(child, target_keys, depth + 1)
|
|
if nested:
|
|
return nested
|
|
return ""
|
|
|
|
def _extract_table_hint(self, metadata: Dict[str, Any]) -> str:
|
|
hint = self._extract_nested_value_for_keys(metadata, {"table", "source_table", "table_name"})
|
|
if hint:
|
|
return str(hint).strip().lower()
|
|
try:
|
|
current = self.current_result_table
|
|
table_attr = str(getattr(current, "table", "") or "").strip().lower() if current else ""
|
|
if table_attr:
|
|
return table_attr
|
|
except Exception:
|
|
pass
|
|
return ""
|
|
|
|
def _is_youtube_context(self, metadata: Dict[str, Any], *, table_hint: str, url_value: str) -> bool:
|
|
if "youtube" in str(table_hint or ""):
|
|
return True
|
|
if isinstance(url_value, str):
|
|
low_url = url_value.lower()
|
|
if ("youtube.com" in low_url) or ("youtu.be" in low_url):
|
|
return True
|
|
keys = self._collect_metadata_keys(metadata)
|
|
return any("youtube" in key for key in keys)
|
|
|
|
def _is_tidal_track_context(self, metadata: Dict[str, Any], *, table_hint: str) -> bool:
|
|
table_low = str(table_hint or "").strip().lower()
|
|
if ("tidal.track" in table_low) or ("tidal.album" in table_low):
|
|
return True
|
|
keys = self._collect_metadata_keys(metadata)
|
|
return "tidal.track" in keys
|
|
|
|
def _is_tidal_artist_context(self, metadata: Dict[str, Any], *, table_hint: str) -> bool:
|
|
table_low = str(table_hint or "").strip().lower()
|
|
if "tidal.artist" in table_low:
|
|
return True
|
|
keys = self._collect_metadata_keys(metadata)
|
|
has_artist = "tidal.artist" in keys
|
|
has_track = "tidal.track" in keys
|
|
return bool(has_artist and not has_track)
|
|
|
|
def _set_inline_tags(self, tags: List[str]) -> None:
|
|
if not self.inline_tags_output:
|
|
return
|
|
if tags:
|
|
self.inline_tags_output.text = "\n".join(tags)
|
|
else:
|
|
self.inline_tags_output.text = ""
|
|
|
|
def _set_metadata_tree(self, metadata: Dict[str, Any]) -> None:
|
|
if not self.metadata_tree:
|
|
return
|
|
try:
|
|
root = self.metadata_tree.root
|
|
root.label = "Metadata"
|
|
root.remove_children()
|
|
|
|
def _trim(value: Any) -> str:
|
|
text = str(value)
|
|
if len(text) > 220:
|
|
return text[:217] + "..."
|
|
return text
|
|
|
|
def _render_node(parent: Any, key: str, value: Any, depth: int = 0) -> None:
|
|
if depth > 8:
|
|
parent.add(f"{key}: ...")
|
|
return
|
|
|
|
if isinstance(value, dict):
|
|
branch = parent.add(f"{key}")
|
|
if not value:
|
|
branch.add("{}")
|
|
return
|
|
for child_key, child_value in value.items():
|
|
_render_node(branch, str(child_key), child_value, depth + 1)
|
|
return
|
|
|
|
if isinstance(value, (list, tuple, set)):
|
|
items = list(value)
|
|
branch = parent.add(f"{key} [{len(items)}]")
|
|
max_items = 50
|
|
for i, child in enumerate(items[:max_items]):
|
|
_render_node(branch, f"[{i}]", child, depth + 1)
|
|
if len(items) > max_items:
|
|
branch.add(f"... {len(items) - max_items} more")
|
|
return
|
|
|
|
parent.add(f"{key}: {_trim(value)}")
|
|
|
|
if metadata:
|
|
for key, value in metadata.items():
|
|
_render_node(root, str(key), value)
|
|
else:
|
|
root.add("No metadata")
|
|
|
|
root.expand()
|
|
except Exception:
|
|
logger.exception("Failed to render metadata tree")
|
|
|
|
def _clear_inline_detail_panels(self) -> None:
|
|
pending = list(self._pending_pipeline_tags or [])
|
|
self._inline_tags_original = list(pending)
|
|
self._inline_tags_store = str(self._get_selected_store() or "")
|
|
self._inline_tags_hash = ""
|
|
self._inline_tags_subject = None
|
|
self._set_inline_tags(pending)
|
|
self._set_metadata_tree({})
|
|
|
|
def _refresh_inline_detail_panels(self, index: Optional[int] = None) -> None:
|
|
idx = int(self._selected_row_index if index is None else index)
|
|
item = self._item_for_row_index(idx)
|
|
if item is None:
|
|
pending = list(self._pending_pipeline_tags or [])
|
|
self._inline_tags_original = list(pending)
|
|
self._inline_tags_store = str(self._get_selected_store() or "")
|
|
self._inline_tags_hash = ""
|
|
self._inline_tags_subject = None
|
|
self._set_inline_tags(pending)
|
|
self._set_metadata_tree({})
|
|
return
|
|
|
|
metadata = self._normalize_item_metadata(item)
|
|
tags = self._extract_tags_from_nested(metadata)
|
|
_, store_name, file_hash = self._resolve_selected_item()
|
|
resolved_hash = self._normalize_hash_text(file_hash)
|
|
if not resolved_hash:
|
|
resolved_hash = self._extract_hash_from_nested(metadata)
|
|
self._inline_tags_original = list(tags)
|
|
self._inline_tags_store = str(store_name or "").strip()
|
|
self._inline_tags_hash = resolved_hash
|
|
self._inline_tags_subject = item
|
|
self._set_inline_tags(tags)
|
|
self._set_metadata_tree(metadata)
|
|
|
|
@work(thread=True)
|
|
def _save_inline_tags_background(
|
|
self,
|
|
*,
|
|
store_name: str,
|
|
file_hash: str,
|
|
seeds: Any,
|
|
to_add: List[str],
|
|
to_del: List[str],
|
|
desired: List[str],
|
|
) -> None:
|
|
failures: List[str] = []
|
|
runner = self.executor
|
|
store_tok = json.dumps(str(store_name))
|
|
normalized_hash = self._normalize_hash_text(file_hash)
|
|
query_chunk = (
|
|
f" -query {json.dumps(f'hash:{normalized_hash}')}" if normalized_hash else ""
|
|
)
|
|
|
|
try:
|
|
if to_del:
|
|
del_args = " ".join(json.dumps(t) for t in to_del)
|
|
del_cmd = f"delete-tag -store {store_tok}{query_chunk} {del_args}"
|
|
del_res = runner.run_pipeline(del_cmd, seeds=seeds, isolate=True)
|
|
if not getattr(del_res, "success", False):
|
|
failures.append(
|
|
str(
|
|
getattr(del_res, "error", "")
|
|
or getattr(del_res, "stderr", "")
|
|
or "delete-tag failed"
|
|
).strip()
|
|
)
|
|
|
|
if to_add:
|
|
add_args = " ".join(json.dumps(t) for t in to_add)
|
|
add_cmd = f"add-tag -store {store_tok}{query_chunk} {add_args}"
|
|
add_res = runner.run_pipeline(add_cmd, seeds=seeds, isolate=True)
|
|
if not getattr(add_res, "success", False):
|
|
failures.append(
|
|
str(
|
|
getattr(add_res, "error", "")
|
|
or getattr(add_res, "stderr", "")
|
|
or "add-tag failed"
|
|
).strip()
|
|
)
|
|
|
|
if failures:
|
|
msg = failures[0] if failures else "Tag save failed"
|
|
self.call_from_thread(
|
|
lambda: self._set_status(f"Tag save failed: {msg}", level="error")
|
|
)
|
|
self.call_from_thread(self.notify, f"Tag save failed: {msg}", severity="error", timeout=6)
|
|
return
|
|
|
|
verified_tags = self._fetch_store_tags(store_name, normalized_hash) if normalized_hash else None
|
|
if verified_tags is not None:
|
|
desired_lower = {str(t).strip().lower() for t in desired if str(t).strip()}
|
|
verified_lower = {
|
|
str(t).strip().lower()
|
|
for t in verified_tags
|
|
if str(t).strip()
|
|
}
|
|
missing = [t for t in desired if str(t).strip().lower() not in verified_lower]
|
|
if desired_lower and missing:
|
|
preview = ", ".join(missing[:3])
|
|
if len(missing) > 3:
|
|
preview += ", ..."
|
|
msg = f"Save verification failed; missing tag(s): {preview}"
|
|
self.call_from_thread(
|
|
lambda: self._set_status(msg, level="error")
|
|
)
|
|
self.call_from_thread(
|
|
self.notify,
|
|
msg,
|
|
severity="error",
|
|
timeout=6,
|
|
)
|
|
return
|
|
|
|
final_tags = list(verified_tags) if verified_tags is not None else list(desired)
|
|
|
|
def _apply_success() -> None:
|
|
self._inline_tags_original = list(final_tags)
|
|
self._inline_tags_store = str(store_name or "").strip()
|
|
self._inline_tags_hash = normalized_hash
|
|
self._inline_tags_subject = seeds
|
|
self._set_inline_tags(list(final_tags))
|
|
if normalized_hash:
|
|
try:
|
|
self.refresh_tag_overlay(store_name, normalized_hash, list(final_tags), seeds)
|
|
except Exception:
|
|
logger.exception("Failed to refresh tag overlay after inline save")
|
|
self._set_status("Tags saved", level="success")
|
|
self.notify(f"Saved tags (+{len(to_add)}, -{len(to_del)})", timeout=3)
|
|
|
|
self.call_from_thread(_apply_success)
|
|
except Exception as exc:
|
|
self.call_from_thread(
|
|
lambda: self._set_status(
|
|
f"Tag save failed: {type(exc).__name__}",
|
|
level="error",
|
|
)
|
|
)
|
|
self.call_from_thread(
|
|
self.notify,
|
|
f"Tag save failed: {type(exc).__name__}: {exc}",
|
|
severity="error",
|
|
timeout=6,
|
|
)
|
|
|
|
def _populate_results_table(self) -> None:
|
|
if not self.results_table:
|
|
return
|
|
self.results_table.clear(columns=True)
|
|
|
|
def _add_columns_with_widths(headers: List[str], widths: List[int]) -> None:
|
|
table = self.results_table
|
|
if table is None:
|
|
return
|
|
for i, header in enumerate(headers):
|
|
width = max(1, int(widths[i])) if i < len(widths) else max(1, len(str(header)))
|
|
try:
|
|
table.add_column(str(header), width=width)
|
|
except Exception:
|
|
table.add_column(str(header))
|
|
|
|
def _pad_cell(value: Any, width: int, style: str) -> Text:
|
|
text = str(value)
|
|
if width > 0 and len(text) < width:
|
|
text = text + (" " * (width - len(text)))
|
|
return Text(text, style=style)
|
|
|
|
if self.current_result_table and self.current_result_table.rows:
|
|
headers = ["#", "Title"]
|
|
normalized_rows: List[List[str]] = []
|
|
|
|
for idx in range(len(self.current_result_table.rows)):
|
|
item = self._item_for_row_index(idx)
|
|
title_value = self._extract_title_for_item(item)
|
|
normalized_rows.append([str(idx + 1), title_value])
|
|
|
|
widths = [len(h) for h in headers]
|
|
for row in normalized_rows:
|
|
for col_idx, cell in enumerate(row):
|
|
cell_len = len(str(cell))
|
|
if cell_len > widths[col_idx]:
|
|
widths[col_idx] = cell_len
|
|
|
|
_add_columns_with_widths(headers, widths)
|
|
|
|
for idx, row in enumerate(normalized_rows, 1):
|
|
row_style = get_result_table_row_style(idx - 1)
|
|
styled_cells = [
|
|
_pad_cell(cell, widths[col_idx], row_style)
|
|
for col_idx, cell in enumerate(row)
|
|
]
|
|
self.results_table.add_row(*styled_cells, key=str(idx - 1))
|
|
else:
|
|
# Fallback or empty state
|
|
headers = ["Row", "Title", "Source", "File"]
|
|
if not self.result_items:
|
|
self.results_table.add_columns(*headers)
|
|
self.results_table.add_row("—", "No results", "", "")
|
|
self._clear_inline_detail_panels()
|
|
return
|
|
|
|
# Fallback for items without a table
|
|
raw_rows: List[List[str]] = []
|
|
for idx, item in enumerate(self.result_items, start=1):
|
|
raw_rows.append([str(idx), str(item), "—", "—"])
|
|
|
|
widths = [len(h) for h in headers]
|
|
for row in raw_rows:
|
|
for col_idx, cell in enumerate(row):
|
|
cell_len = len(str(cell))
|
|
if cell_len > widths[col_idx]:
|
|
widths[col_idx] = cell_len
|
|
|
|
_add_columns_with_widths(headers, widths)
|
|
|
|
for idx, row in enumerate(raw_rows, start=1):
|
|
row_style = get_result_table_row_style(idx - 1)
|
|
styled_cells = [
|
|
_pad_cell(cell, widths[col_idx], row_style)
|
|
for col_idx, cell in enumerate(row)
|
|
]
|
|
self.results_table.add_row(*styled_cells, key=str(idx - 1))
|
|
|
|
if self._item_for_row_index(self._selected_row_index) is None:
|
|
self._selected_row_index = 0
|
|
self._refresh_inline_detail_panels(self._selected_row_index)
|
|
|
|
def refresh_tag_overlay(self,
|
|
store_name: str,
|
|
file_hash: str,
|
|
tags: List[str],
|
|
subject: Any) -> None:
|
|
"""Update the shared get-tag overlay after manual tag edits."""
|
|
if not store_name or not file_hash:
|
|
return
|
|
try:
|
|
from cmdlet.get_tag import _emit_tags_as_table
|
|
except Exception:
|
|
return
|
|
|
|
try:
|
|
cfg = load_config() or {}
|
|
except Exception:
|
|
cfg = {}
|
|
|
|
payload_subject = subject if subject is not None else None
|
|
if not isinstance(payload_subject, dict):
|
|
payload_subject = {
|
|
"store": store_name,
|
|
"hash": file_hash,
|
|
}
|
|
|
|
try:
|
|
_emit_tags_as_table(
|
|
list(tags),
|
|
file_hash=file_hash,
|
|
store=store_name,
|
|
config=cfg,
|
|
subject=payload_subject,
|
|
)
|
|
except Exception:
|
|
logger.exception("Failed to emit tags as table")
|
|
|
|
def _load_cmdlet_names(self, force: bool = False) -> None:
|
|
try:
|
|
ensure_registry_loaded(force=force)
|
|
names = list_cmdlet_names(force=force) or []
|
|
self._cmdlet_names = sorted(
|
|
{str(n).replace("_",
|
|
"-")
|
|
for n in names if str(n).strip()}
|
|
)
|
|
except Exception:
|
|
logger.exception("Failed to load cmdlet names")
|
|
self._cmdlet_names = []
|
|
self._configure_inline_autocomplete()
|
|
|
|
def _update_syntax_status(self, text: str) -> None:
|
|
if self._pipeline_running:
|
|
return
|
|
raw = str(text or "").strip()
|
|
if not raw:
|
|
self._set_status("Ready", level="info")
|
|
return
|
|
try:
|
|
err = validate_pipeline_text(raw)
|
|
except Exception:
|
|
err = None
|
|
if err:
|
|
self._set_status(err.message, level="error")
|
|
else:
|
|
self._set_status("Ready", level="info")
|
|
|
|
def _update_suggestions(self, text: str) -> None:
|
|
if self._inline_autocomplete_enabled:
|
|
if self.suggestion_list:
|
|
try:
|
|
self.suggestion_list.display = False
|
|
except Exception:
|
|
pass
|
|
return
|
|
if not self.suggestion_list:
|
|
return
|
|
raw = str(text or "")
|
|
prefix = self._current_cmd_prefix(raw)
|
|
if not prefix:
|
|
self.suggestion_list.display = False
|
|
return
|
|
|
|
pref_low = prefix.lower()
|
|
matches = [n for n in self._cmdlet_names if n.lower().startswith(pref_low)]
|
|
matches = matches[:10]
|
|
|
|
if not matches:
|
|
self.suggestion_list.display = False
|
|
return
|
|
|
|
try:
|
|
self.suggestion_list.clear_options() # type: ignore[attr-defined]
|
|
except Exception:
|
|
try:
|
|
# Fallback for older/newer Textual APIs.
|
|
self.suggestion_list.options = [] # type: ignore[attr-defined]
|
|
except Exception:
|
|
logger.exception("Failed to clear suggestion list options via fallback")
|
|
|
|
try:
|
|
self.suggestion_list.add_options(
|
|
[Option(m) for m in matches]
|
|
) # type: ignore[attr-defined]
|
|
except Exception:
|
|
try:
|
|
self.suggestion_list.options = [
|
|
Option(m) for m in matches
|
|
] # type: ignore[attr-defined]
|
|
except Exception:
|
|
logger.exception("Failed to set suggestion list options via fallback")
|
|
|
|
self.suggestion_list.display = True
|
|
|
|
@staticmethod
|
|
def _current_cmd_prefix(text: str) -> str:
|
|
"""Best-effort prefix for cmdlet name completion.
|
|
|
|
Completes the token immediately after start-of-line or a '|'.
|
|
"""
|
|
raw = str(text or "")
|
|
# Find the segment after the last pipe.
|
|
segment = raw.split("|")[-1]
|
|
# Remove leading whitespace.
|
|
segment = segment.lstrip()
|
|
if not segment:
|
|
return ""
|
|
# Only complete the first token of the segment.
|
|
m = re.match(r"([A-Za-z0-9_\-]*)", segment)
|
|
return m.group(1) if m else ""
|
|
|
|
@staticmethod
|
|
def _apply_suggestion_to_text(text: str, suggestion: str) -> str:
|
|
raw = str(text or "")
|
|
parts = raw.split("|")
|
|
if not parts:
|
|
return suggestion
|
|
last = parts[-1]
|
|
# Preserve leading spaces after the pipe.
|
|
leading = "".join(ch for ch in last if ch.isspace())
|
|
trimmed = last.lstrip()
|
|
# Replace first token in last segment.
|
|
replaced = re.sub(r"^[A-Za-z0-9_\-]*", suggestion, trimmed)
|
|
parts[-1] = leading + replaced
|
|
return "|".join(parts)
|
|
|
|
def _resolve_selected_item(
|
|
self
|
|
) -> Tuple[Optional[Any],
|
|
Optional[str],
|
|
Optional[str]]:
|
|
"""Return (item, store_name, hash) for the currently selected row."""
|
|
index = int(getattr(self, "_selected_row_index", 0) or 0)
|
|
if index < 0:
|
|
index = 0
|
|
|
|
item: Any = None
|
|
row_payload: Any = None
|
|
row = None
|
|
column_store: Optional[str] = None
|
|
column_hash: Optional[str] = None
|
|
|
|
# Prefer mapping displayed table row -> source item.
|
|
if self.current_result_table and 0 <= index < len(
|
|
getattr(self.current_result_table,
|
|
"rows",
|
|
[]) or []):
|
|
row = self.current_result_table.rows[index]
|
|
row_payload = getattr(row, "payload", None)
|
|
src_idx = getattr(row, "source_index", None)
|
|
if isinstance(src_idx, int) and 0 <= src_idx < len(self.result_items):
|
|
item = self.result_items[src_idx]
|
|
for col in getattr(row, "columns", []) or []:
|
|
name = str(getattr(col, "name", "") or "").strip().lower()
|
|
value = str(getattr(col, "value", "") or "").strip()
|
|
if not column_store and name in {"store", "storage", "source", "table"}:
|
|
column_store = value
|
|
if not column_hash and name in {"hash", "hash_hex", "file_hash", "sha256"}:
|
|
column_hash = value
|
|
|
|
if item is None and 0 <= index < len(self.result_items):
|
|
item = self.result_items[index]
|
|
|
|
def _pick_from_candidates(
|
|
candidates: List[Any], extractor: Callable[[Any], str]
|
|
) -> str:
|
|
for candidate in candidates:
|
|
if candidate is None:
|
|
continue
|
|
try:
|
|
value = extractor(candidate)
|
|
except Exception:
|
|
value = ""
|
|
if value and str(value).strip():
|
|
return str(value).strip()
|
|
return ""
|
|
|
|
candidate_sources: List[Any] = []
|
|
if row_payload is not None:
|
|
candidate_sources.append(row_payload)
|
|
if item is not None:
|
|
candidate_sources.append(item)
|
|
|
|
store_name = _pick_from_candidates(candidate_sources, extract_store_value)
|
|
file_hash = _pick_from_candidates(candidate_sources, extract_hash_value)
|
|
|
|
if not store_name and column_store:
|
|
store_name = column_store
|
|
if not file_hash and column_hash:
|
|
file_hash = column_hash
|
|
|
|
store_text = str(store_name).strip() if store_name else ""
|
|
hash_text = str(file_hash).strip() if file_hash else ""
|
|
|
|
if not store_text:
|
|
# Fallback to UI store selection when item doesn't carry it.
|
|
store_text = self._get_selected_store() or ""
|
|
|
|
final_item = row_payload if row_payload is not None else item
|
|
if final_item is None and (store_text or hash_text):
|
|
fallback: Dict[str, str] = {}
|
|
if store_text:
|
|
fallback["store"] = store_text
|
|
if hash_text:
|
|
fallback["hash"] = hash_text
|
|
final_item = fallback
|
|
|
|
return final_item, (store_text or None), (hash_text or None)
|
|
|
|
def _open_tags_popup(self) -> None:
|
|
if self._pipeline_running:
|
|
self.notify("Pipeline already running", severity="warning", timeout=3)
|
|
return
|
|
item, store_name, file_hash = self._resolve_selected_item()
|
|
if item is None:
|
|
self.notify("No selected item", severity="warning", timeout=3)
|
|
return
|
|
if not store_name:
|
|
self.notify("Selected item missing store", severity="warning", timeout=4)
|
|
return
|
|
|
|
seeds: Any = item
|
|
if isinstance(item, dict):
|
|
seeds = dict(item)
|
|
try:
|
|
if store_name and not str(seeds.get("store") or "").strip():
|
|
seeds["store"] = store_name
|
|
except Exception:
|
|
logger.exception("Failed to set seed store value")
|
|
try:
|
|
if file_hash and not str(seeds.get("hash") or "").strip():
|
|
seeds["hash"] = file_hash
|
|
except Exception:
|
|
logger.exception("Failed to set seed hash value")
|
|
|
|
self.push_screen(
|
|
TagEditorPopup(seeds=seeds,
|
|
store_name=store_name,
|
|
file_hash=file_hash)
|
|
)
|
|
|
|
def _open_metadata_popup(self) -> None:
|
|
item, _store_name, _file_hash = self._resolve_selected_item()
|
|
if item is None:
|
|
self.notify("No selected item", severity="warning", timeout=3)
|
|
return
|
|
text = ""
|
|
idx = int(getattr(self, "_selected_row_index", 0) or 0)
|
|
if self.current_result_table and 0 <= idx < len(
|
|
getattr(self.current_result_table,
|
|
"rows",
|
|
[]) or []):
|
|
row = self.current_result_table.rows[idx]
|
|
lines = [
|
|
f"{col.name}: {col.value}" for col in getattr(row, "columns", []) or []
|
|
]
|
|
text = "\n".join(lines)
|
|
elif isinstance(item, dict):
|
|
try:
|
|
text = json.dumps(item, indent=2, ensure_ascii=False)
|
|
except Exception:
|
|
text = str(item)
|
|
else:
|
|
text = str(item)
|
|
self.push_screen(TextPopup(title="Metadata", text=text))
|
|
|
|
def _open_relationships_popup(self) -> None:
|
|
item, _store_name, _file_hash = self._resolve_selected_item()
|
|
if item is None:
|
|
self.notify("No selected item", severity="warning", timeout=3)
|
|
return
|
|
|
|
relationships = None
|
|
if isinstance(item, dict):
|
|
relationships = item.get("relationships") or item.get("relationship")
|
|
else:
|
|
relationships = getattr(item, "relationships", None)
|
|
if not relationships:
|
|
relationships = getattr(item, "get_relationships", lambda: None)()
|
|
|
|
if not relationships:
|
|
self.push_screen(TextPopup(title="Relationships", text="No relationships"))
|
|
return
|
|
|
|
lines: List[str] = []
|
|
if isinstance(relationships, dict):
|
|
for rel_type, value in relationships.items():
|
|
if isinstance(value, list):
|
|
if not value:
|
|
lines.append(f"{rel_type}: (empty)")
|
|
for v in value:
|
|
lines.append(f"{rel_type}: {v}")
|
|
else:
|
|
lines.append(f"{rel_type}: {value}")
|
|
else:
|
|
lines.append(str(relationships))
|
|
self.push_screen(TextPopup(title="Relationships", text="\n".join(lines)))
|
|
|
|
def _build_action_context(self) -> Dict[str, Any]:
|
|
item, store_name, file_hash = self._resolve_selected_item()
|
|
metadata = self._normalize_item_metadata(item) if item is not None else {}
|
|
normalized_hash = self._normalize_hash_text(file_hash)
|
|
|
|
def _candidate_values() -> List[str]:
|
|
out: List[str] = []
|
|
sources: List[Any] = [item, metadata]
|
|
for src in sources:
|
|
if not isinstance(src, dict):
|
|
continue
|
|
for key in ("url", "source_url", "target", "path", "file_path"):
|
|
raw = src.get(key)
|
|
if raw is None:
|
|
continue
|
|
text = str(raw).strip()
|
|
if text:
|
|
out.append(text)
|
|
return out
|
|
|
|
url_value = ""
|
|
path_value = ""
|
|
for value in _candidate_values():
|
|
low = value.lower()
|
|
if (low.startswith("http://") or low.startswith("https://")) and not url_value:
|
|
url_value = value
|
|
continue
|
|
if not path_value:
|
|
try:
|
|
p = Path(value)
|
|
if p.exists():
|
|
path_value = str(p)
|
|
except Exception:
|
|
pass
|
|
|
|
table_hint = self._extract_table_hint(metadata)
|
|
is_youtube_context = self._is_youtube_context(metadata, table_hint=table_hint, url_value=url_value)
|
|
is_tidal_track_context = self._is_tidal_track_context(metadata, table_hint=table_hint)
|
|
is_tidal_artist_context = self._is_tidal_artist_context(metadata, table_hint=table_hint)
|
|
can_play_mpv = bool((str(store_name or "").strip() and normalized_hash) or is_youtube_context or is_tidal_track_context)
|
|
|
|
return {
|
|
"item": item,
|
|
"store": str(store_name or "").strip(),
|
|
"hash": normalized_hash,
|
|
"url": url_value,
|
|
"path": path_value,
|
|
"selected_store": str(self._get_selected_store() or "").strip(),
|
|
"table_hint": table_hint,
|
|
"is_youtube_context": is_youtube_context,
|
|
"is_tidal_track_context": is_tidal_track_context,
|
|
"is_tidal_artist_context": is_tidal_artist_context,
|
|
"can_play_mpv": can_play_mpv,
|
|
}
|
|
|
|
def _build_context_actions(self, ctx_obj: Dict[str, Any]) -> List[Tuple[str, str]]:
|
|
actions: List[Tuple[str, str]] = []
|
|
url_value = str(ctx_obj.get("url") or "").strip()
|
|
path_value = str(ctx_obj.get("path") or "").strip()
|
|
store_name = str(ctx_obj.get("store") or "").strip()
|
|
hash_value = str(ctx_obj.get("hash") or "").strip()
|
|
selected_store = str(ctx_obj.get("selected_store") or "").strip()
|
|
|
|
if url_value:
|
|
actions.append(("Open URL", "open_url"))
|
|
|
|
if path_value:
|
|
actions.append(("Open File", "open_file"))
|
|
actions.append(("Open File Folder", "open_folder"))
|
|
actions.append(("Copy File Path", "copy_path"))
|
|
|
|
can_play_mpv = bool(ctx_obj.get("can_play_mpv", False))
|
|
|
|
if can_play_mpv:
|
|
actions.append(("Play in MPV", "play_mpv"))
|
|
|
|
if store_name and hash_value:
|
|
actions.append(("Delete from Store", "delete_store_item"))
|
|
|
|
if selected_store and selected_store.lower() != store_name.lower():
|
|
actions.append(
|
|
(f"Copy to Store ({selected_store})", "copy_to_selected_store")
|
|
)
|
|
actions.append(
|
|
(f"Move to Store ({selected_store})", "move_to_selected_store")
|
|
)
|
|
|
|
return actions
|
|
|
|
def _open_actions_popup(self) -> None:
|
|
if self._pipeline_running:
|
|
self.notify("Pipeline already running", severity="warning", timeout=3)
|
|
return
|
|
|
|
ctx_obj = self._build_action_context()
|
|
actions = self._build_context_actions(ctx_obj)
|
|
if not actions:
|
|
self.notify("No actions available for selected item", severity="warning", timeout=3)
|
|
return
|
|
|
|
def _run_selected(action_key: Optional[str]) -> None:
|
|
if not action_key:
|
|
return
|
|
self._execute_context_action(str(action_key), ctx_obj)
|
|
|
|
self.push_screen(ActionMenuPopup(actions=actions), callback=_run_selected)
|
|
|
|
def _copy_text_to_clipboard(self, text: str) -> bool:
|
|
content = str(text or "")
|
|
if not content:
|
|
return False
|
|
try:
|
|
self.copy_to_clipboard(content)
|
|
return True
|
|
except Exception:
|
|
pass
|
|
|
|
try:
|
|
import subprocess as _sp
|
|
|
|
_sp.run(
|
|
[
|
|
"powershell",
|
|
"-NoProfile",
|
|
"-Command",
|
|
f"Set-Clipboard -Value {json.dumps(content)}",
|
|
],
|
|
check=False,
|
|
capture_output=True,
|
|
text=True,
|
|
)
|
|
return True
|
|
except Exception:
|
|
return False
|
|
|
|
def _execute_context_action(self, action_key: str, ctx_obj: Dict[str, Any]) -> None:
|
|
action = str(action_key or "").strip().lower()
|
|
if not action:
|
|
return
|
|
|
|
url_value = str(ctx_obj.get("url") or "").strip()
|
|
path_value = str(ctx_obj.get("path") or "").strip()
|
|
store_name = str(ctx_obj.get("store") or "").strip()
|
|
hash_value = str(ctx_obj.get("hash") or "").strip()
|
|
selected_store = str(ctx_obj.get("selected_store") or "").strip()
|
|
|
|
if action == "open_url":
|
|
if not url_value:
|
|
self.notify("No URL found on selected item", severity="warning", timeout=3)
|
|
return
|
|
try:
|
|
import webbrowser
|
|
|
|
webbrowser.open(url_value)
|
|
self.notify("Opened URL", timeout=2)
|
|
except Exception as exc:
|
|
self.notify(f"Failed to open URL: {exc}", severity="error", timeout=4)
|
|
return
|
|
|
|
if action == "open_file":
|
|
if not path_value:
|
|
self.notify("No local file path found", severity="warning", timeout=3)
|
|
return
|
|
try:
|
|
p = Path(path_value)
|
|
if not p.exists():
|
|
self.notify("Local file path does not exist", severity="warning", timeout=3)
|
|
return
|
|
if sys.platform.startswith("win"):
|
|
os.startfile(str(p)) # type: ignore[attr-defined]
|
|
elif sys.platform == "darwin":
|
|
subprocess.Popen(["open", str(p)])
|
|
else:
|
|
subprocess.Popen(["xdg-open", str(p)])
|
|
self.notify("Opened file", timeout=2)
|
|
except Exception as exc:
|
|
self.notify(f"Failed to open file: {exc}", severity="error", timeout=4)
|
|
return
|
|
|
|
if action == "open_folder":
|
|
if not path_value:
|
|
self.notify("No local file path found", severity="warning", timeout=3)
|
|
return
|
|
try:
|
|
p = Path(path_value)
|
|
folder = p.parent if p.is_file() else p
|
|
if not folder.exists():
|
|
self.notify("Folder does not exist", severity="warning", timeout=3)
|
|
return
|
|
if sys.platform.startswith("win"):
|
|
os.startfile(str(folder)) # type: ignore[attr-defined]
|
|
elif sys.platform == "darwin":
|
|
subprocess.Popen(["open", str(folder)])
|
|
else:
|
|
subprocess.Popen(["xdg-open", str(folder)])
|
|
self.notify("Opened folder", timeout=2)
|
|
except Exception as exc:
|
|
self.notify(f"Failed to open folder: {exc}", severity="error", timeout=4)
|
|
return
|
|
|
|
if action == "copy_path":
|
|
if not path_value:
|
|
self.notify("No local file path found", severity="warning", timeout=3)
|
|
return
|
|
if self._copy_text_to_clipboard(path_value):
|
|
self.notify("Copied file path", timeout=2)
|
|
else:
|
|
self.notify("Failed to copy path", severity="error", timeout=3)
|
|
return
|
|
|
|
if action == "delete_store_item":
|
|
if not store_name or not hash_value:
|
|
self.notify("Delete action requires store + hash", severity="warning", timeout=3)
|
|
return
|
|
query = f"hash:{hash_value}"
|
|
cmd = f"delete-file -store {json.dumps(store_name)} -query {json.dumps(query)}"
|
|
self._start_pipeline_execution(cmd)
|
|
return
|
|
|
|
if action == "play_mpv":
|
|
if not bool(ctx_obj.get("can_play_mpv", False)):
|
|
self.notify("MPV is unavailable for this selected item", severity="warning", timeout=3)
|
|
return
|
|
selected = self._item_for_row_index(self._selected_row_index)
|
|
if selected is None:
|
|
self.notify("No selected row for MPV action", severity="warning", timeout=3)
|
|
return
|
|
seed_items = self._current_seed_items()
|
|
if not seed_items:
|
|
self.notify("No current result items available for MPV selection", severity="warning", timeout=3)
|
|
return
|
|
row_num = int(self._selected_row_index or 0) + 1
|
|
if row_num < 1:
|
|
row_num = 1
|
|
cmd = f"@{row_num} | .mpv"
|
|
self._start_pipeline_execution(
|
|
cmd,
|
|
seeds=seed_items,
|
|
seed_table=self.current_result_table,
|
|
clear_log=False,
|
|
clear_results=False,
|
|
keep_existing_results=True,
|
|
)
|
|
return
|
|
|
|
if action in {"copy_to_selected_store", "move_to_selected_store"}:
|
|
if not store_name or not hash_value or not selected_store:
|
|
self.notify("Copy/Move requires source store, hash, and selected target store", severity="warning", timeout=4)
|
|
return
|
|
if selected_store.lower() == store_name.lower():
|
|
self.notify("Target store must be different from source store", severity="warning", timeout=3)
|
|
return
|
|
|
|
query = f"hash:{hash_value}"
|
|
base_copy = (
|
|
f"search-file -store {json.dumps(store_name)} {json.dumps(query)}"
|
|
f" | add-file -store {json.dumps(selected_store)}"
|
|
)
|
|
if action == "move_to_selected_store":
|
|
delete_cmd = f"delete-file -store {json.dumps(store_name)} -query {json.dumps(query)}"
|
|
cmd = f"{base_copy} | @ | {delete_cmd}"
|
|
else:
|
|
cmd = base_copy
|
|
|
|
self._start_pipeline_execution(cmd)
|
|
return
|
|
|
|
self.notify(f"Unknown action: {action}", severity="warning", timeout=3)
|
|
|
|
def _clear_log(self) -> None:
|
|
self.log_lines = []
|
|
if self.log_output:
|
|
self.log_output.text = ""
|
|
|
|
def _append_log_line(self, line: str) -> None:
|
|
self.log_lines.append(line)
|
|
if len(self.log_lines) > 500:
|
|
self.log_lines = self.log_lines[-500:]
|
|
if self.log_output:
|
|
self.log_output.text = "\n".join(self.log_lines)
|
|
|
|
def _append_block(self, text: str) -> None:
|
|
for line in text.strip().splitlines():
|
|
self._append_log_line(f" {line}")
|
|
|
|
def _clear_results(self) -> None:
|
|
self.result_items = []
|
|
if self.results_table:
|
|
self.results_table.clear()
|
|
self._selected_row_index = 0
|
|
self._clear_inline_detail_panels()
|
|
|
|
def _set_status(self, message: str, *, level: str = "info") -> None:
|
|
if not self.status_panel:
|
|
return
|
|
for css in ("status-info", "status-success", "status-error"):
|
|
self.status_panel.remove_class(css)
|
|
css_class = f"status-{level if level in {'success', 'error'} else 'info'}"
|
|
self.status_panel.add_class(css_class)
|
|
self.status_panel.update(message)
|
|
|
|
def refresh_workers(self) -> None:
|
|
if not self.worker_table:
|
|
return
|
|
manager = self.executor.worker_manager
|
|
self.worker_table.clear()
|
|
if manager is None:
|
|
self.worker_table.add_row("—", "—", "—", "Worker manager unavailable")
|
|
return
|
|
workers = manager.get_active_workers()
|
|
if not workers:
|
|
self.worker_table.add_row("—", "—", "—", "No active workers")
|
|
return
|
|
for worker in workers:
|
|
worker_id = str(worker.get("worker_id") or worker.get("id") or "?")[:8]
|
|
worker_type = str(worker.get("worker_type") or worker.get("type") or "?")
|
|
status = str(worker.get("status") or worker.get("result") or "running")
|
|
details = (
|
|
worker.get("current_step") or worker.get("description")
|
|
or worker.get("pipe") or ""
|
|
)
|
|
self.worker_table.add_row(worker_id, worker_type, status, str(details)[:80])
|
|
|
|
|
|
if __name__ == "__main__":
|
|
PipelineHubApp().run()
|