j
This commit is contained in:
289
TUI.py
289
TUI.py
@@ -6,7 +6,7 @@ import json
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional, Sequence, Tuple
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
from textual import on, work
|
||||
from textual.app import App, ComposeResult
|
||||
@@ -28,6 +28,7 @@ from textual.widgets import (
|
||||
)
|
||||
from textual.widgets.option_list import Option
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
REPO_ROOT = BASE_DIR
|
||||
TUI_DIR = REPO_ROOT / "TUI"
|
||||
@@ -37,7 +38,7 @@ for path in (REPO_ROOT, TUI_DIR):
|
||||
sys.path.insert(0, str_path)
|
||||
|
||||
from TUI.pipeline_runner import PipelineRunResult # type: ignore # noqa: E402
|
||||
from SYS.result_table import ResultTable # type: ignore # noqa: E402
|
||||
from SYS.result_table import ResultTable, extract_hash_value, extract_store_value # type: ignore # noqa: E402
|
||||
|
||||
from SYS.config import load_config # type: ignore # noqa: E402
|
||||
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402
|
||||
@@ -68,14 +69,23 @@ def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
|
||||
try:
|
||||
if hasattr(obj, "tag_name"):
|
||||
val = getattr(obj, "tag_name")
|
||||
if val:
|
||||
tags.append(str(val))
|
||||
if val and isinstance(val, str):
|
||||
tags.append(val)
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(obj, dict):
|
||||
for k in ("tag_name", "tag", "name", "value"):
|
||||
# Prefer explicit tag lists
|
||||
tag_list = obj.get("tag")
|
||||
if isinstance(tag_list, (list, tuple)):
|
||||
for t in tag_list:
|
||||
if isinstance(t, str) and t.strip():
|
||||
tags.append(t.strip())
|
||||
if tag_list:
|
||||
continue
|
||||
# Fall back to individual tag_name/value/name strings
|
||||
for k in ("tag_name", "value", "name"):
|
||||
v = obj.get(k)
|
||||
if isinstance(v, str) and v.strip():
|
||||
tags.append(v.strip())
|
||||
@@ -84,6 +94,30 @@ def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
|
||||
return _dedup_preserve_order(tags)
|
||||
|
||||
|
||||
def _extract_tag_names_from_table(table: Any) -> List[str]:
|
||||
if not table:
|
||||
return []
|
||||
sources: List[Any] = []
|
||||
get_payloads = getattr(table, "get_payloads", None)
|
||||
if callable(get_payloads):
|
||||
try:
|
||||
payloads = get_payloads()
|
||||
if payloads:
|
||||
sources.extend(payloads)
|
||||
except Exception:
|
||||
pass
|
||||
rows = getattr(table, "rows", []) or []
|
||||
for row in rows:
|
||||
for col in getattr(row, "columns", []) or []:
|
||||
if str(getattr(col, "name", "") or "").strip().lower() == "tag":
|
||||
val = getattr(col, "value", None)
|
||||
if val:
|
||||
sources.append({"tag_name": val})
|
||||
if not sources:
|
||||
return []
|
||||
return _extract_tag_names(sources)
|
||||
|
||||
|
||||
class TextPopup(ModalScreen[None]):
|
||||
|
||||
def __init__(self, *, title: str, text: str) -> None:
|
||||
@@ -139,20 +173,24 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
@work(thread=True)
|
||||
def _load_tags_background(self) -> None:
|
||||
app = self.app # PipelineHubApp
|
||||
try:
|
||||
runner: PipelineRunner = getattr(app, "executor")
|
||||
cmd = f"@1 | get-tag -emit"
|
||||
res = runner.run_pipeline(cmd, seeds=self._seeds, isolate=True)
|
||||
tags = _extract_tag_names(res.emitted)
|
||||
except Exception as exc:
|
||||
tags = []
|
||||
tags = self._fetch_tags_from_store()
|
||||
if not tags:
|
||||
try:
|
||||
app.call_from_thread(
|
||||
self._set_status,
|
||||
f"Error: {type(exc).__name__}: {exc}"
|
||||
)
|
||||
except Exception:
|
||||
self._set_status(f"Error: {type(exc).__name__}: {exc}")
|
||||
runner: PipelineRunner = getattr(app, "executor")
|
||||
cmd = "@1 | get-tag"
|
||||
res = runner.run_pipeline(cmd, seeds=self._seeds, isolate=True)
|
||||
tags = _extract_tag_names_from_table(getattr(res, "result_table", None))
|
||||
if not tags:
|
||||
tags = _extract_tag_names(getattr(res, "emitted", []))
|
||||
except Exception as exc:
|
||||
tags = []
|
||||
try:
|
||||
app.call_from_thread(
|
||||
self._set_status,
|
||||
f"Error: {type(exc).__name__}: {exc}"
|
||||
)
|
||||
except Exception:
|
||||
self._set_status(f"Error: {type(exc).__name__}: {exc}")
|
||||
self._original_tags = tags
|
||||
try:
|
||||
app.call_from_thread(self._apply_loaded_tags, tags)
|
||||
@@ -164,6 +202,42 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
self._editor.text = "\n".join(tags)
|
||||
self._set_status(f"Loaded {len(tags)} tag(s)")
|
||||
|
||||
def _fetch_tags_from_store(self) -> Optional[List[str]]:
|
||||
if not self._store or not self._hash:
|
||||
return None
|
||||
try:
|
||||
cfg = load_config() or {}
|
||||
except Exception:
|
||||
cfg = {}
|
||||
store_key = str(self._store or "").strip()
|
||||
hash_value = str(self._hash or "").strip().lower()
|
||||
if not store_key or not hash_value:
|
||||
return None
|
||||
try:
|
||||
registry = StoreRegistry(config=cfg, suppress_debug=True)
|
||||
except Exception:
|
||||
return []
|
||||
match = None
|
||||
normalized = store_key.lower()
|
||||
for name in registry.list_backends():
|
||||
if str(name or "").strip().lower() == normalized:
|
||||
match = name
|
||||
break
|
||||
if match is None:
|
||||
return None
|
||||
try:
|
||||
backend = registry[match]
|
||||
except KeyError:
|
||||
return None
|
||||
try:
|
||||
tags, _src = backend.get_tag(hash_value, config=cfg)
|
||||
if not tags:
|
||||
return []
|
||||
filtered = [str(t).strip() for t in tags if str(t).strip()]
|
||||
return _dedup_preserve_order(filtered)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _parse_editor_tags(self) -> List[str]:
|
||||
raw = ""
|
||||
try:
|
||||
@@ -207,6 +281,33 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
desired: List[str]
|
||||
) -> None:
|
||||
app = self.app # PipelineHubApp
|
||||
def _log_message(msg: str) -> None:
|
||||
if not msg:
|
||||
return
|
||||
try:
|
||||
app.call_from_thread(app._append_log_line, msg)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _log_pipeline_command(stage: str, cmd: str) -> None:
|
||||
if not cmd:
|
||||
return
|
||||
_log_message(f"tags-save: {stage}: {cmd}")
|
||||
|
||||
def _log_pipeline_result(stage: str, result: PipelineRunResult | None) -> None:
|
||||
if result is None:
|
||||
return
|
||||
status = "success" if getattr(result, "success", False) else "failed"
|
||||
_log_message(f"tags-save: {stage} result: {status}")
|
||||
error = str(getattr(result, "error", "") or "").strip()
|
||||
if error:
|
||||
_log_message(f"tags-save: {stage} error: {error}")
|
||||
for attr in ("stdout", "stderr"):
|
||||
raw = str(getattr(result, attr, "") or "").strip()
|
||||
if not raw:
|
||||
continue
|
||||
for line in raw.splitlines():
|
||||
_log_message(f"tags-save: {stage} {attr}: {line}")
|
||||
try:
|
||||
runner: PipelineRunner = getattr(app, "executor")
|
||||
store_tok = json.dumps(self._store)
|
||||
@@ -216,8 +317,10 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
|
||||
if to_del:
|
||||
del_args = " ".join(json.dumps(t) for t in to_del)
|
||||
del_cmd = f"@1 | delete-tag -store {store_tok}{query_chunk} {del_args}"
|
||||
del_cmd = f"delete-tag -store {store_tok}{query_chunk} {del_args}"
|
||||
_log_pipeline_command("delete-tag", del_cmd)
|
||||
del_res = runner.run_pipeline(del_cmd, seeds=self._seeds, isolate=True)
|
||||
_log_pipeline_result("delete-tag", del_res)
|
||||
if not getattr(del_res, "success", False):
|
||||
failures.append(
|
||||
str(
|
||||
@@ -231,8 +334,10 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
|
||||
if to_add:
|
||||
add_args = " ".join(json.dumps(t) for t in to_add)
|
||||
add_cmd = f"@1 | add-tag -store {store_tok}{query_chunk} {add_args}"
|
||||
add_cmd = f"add-tag -store {store_tok}{query_chunk} {add_args}"
|
||||
_log_pipeline_command("add-tag", add_cmd)
|
||||
add_res = runner.run_pipeline(add_cmd, seeds=self._seeds, isolate=True)
|
||||
_log_pipeline_result("add-tag", add_res)
|
||||
if not getattr(add_res, "success", False):
|
||||
failures.append(
|
||||
str(
|
||||
@@ -252,14 +357,38 @@ class TagEditorPopup(ModalScreen[None]):
|
||||
self._set_status(f"Error: {msg}")
|
||||
return
|
||||
|
||||
self._original_tags = list(desired)
|
||||
reloaded = self._fetch_tags_from_store()
|
||||
refreshed = reloaded is not None
|
||||
tags_to_show = list(reloaded or []) if refreshed else list(desired)
|
||||
self._original_tags = list(tags_to_show)
|
||||
try:
|
||||
app.call_from_thread(
|
||||
self._set_status,
|
||||
f"Saved (+{len(to_add)}, -{len(to_del)})"
|
||||
)
|
||||
app.call_from_thread(self._apply_loaded_tags, tags_to_show)
|
||||
except Exception:
|
||||
self._set_status(f"Saved (+{len(to_add)}, -{len(to_del)})")
|
||||
self._apply_loaded_tags(tags_to_show)
|
||||
|
||||
def _refresh_overlay() -> None:
|
||||
try:
|
||||
app.refresh_tag_overlay(
|
||||
self._store,
|
||||
self._hash,
|
||||
tags_to_show,
|
||||
self._seeds,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
app.call_from_thread(_refresh_overlay)
|
||||
except Exception:
|
||||
_refresh_overlay()
|
||||
|
||||
status_msg = f"Saved (+{len(to_add)}, -{len(to_del)})"
|
||||
if refreshed:
|
||||
status_msg += f"; loaded {len(tags_to_show)} tag(s)"
|
||||
try:
|
||||
app.call_from_thread(self._set_status, status_msg)
|
||||
except Exception:
|
||||
self._set_status(status_msg)
|
||||
except Exception as exc:
|
||||
try:
|
||||
app.call_from_thread(
|
||||
@@ -547,7 +676,7 @@ class PipelineHubApp(App):
|
||||
"""Apply store/path/tags UI fields to the pipeline text.
|
||||
|
||||
Rules (simple + non-destructive):
|
||||
- If output path is set and the first stage is download-media and has no -path/--path, append -path.
|
||||
- If output path is set and the first stage is download-file and has no -path/--path, append -path.
|
||||
- If a store is selected and pipeline has no add-file stage, append add-file -store <store>.
|
||||
"""
|
||||
base = str(pipeline_text or "").strip()
|
||||
@@ -578,11 +707,11 @@ class PipelineHubApp(App):
|
||||
except Exception:
|
||||
first_stage_cmd = ""
|
||||
|
||||
# Apply -path to download-media first stage (only if missing)
|
||||
# Apply -path to download-file first stage (only if missing)
|
||||
if output_path:
|
||||
first = stages[0]
|
||||
low = first.lower()
|
||||
if low.startswith("download-media"
|
||||
if low.startswith("download-file"
|
||||
) and " -path" not in low and " --path" not in low:
|
||||
stages[0] = f"{first} -path {json.dumps(output_path)}"
|
||||
|
||||
@@ -594,9 +723,7 @@ class PipelineHubApp(App):
|
||||
should_auto_add_file = bool(
|
||||
selected_store and ("add-file" not in low_joined) and (
|
||||
first_stage_cmd
|
||||
in {"download-media",
|
||||
"download-file",
|
||||
"download-torrent"}
|
||||
in {"download-file"}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -714,6 +841,42 @@ class PipelineHubApp(App):
|
||||
key=str(idx - 1)
|
||||
)
|
||||
|
||||
def refresh_tag_overlay(self,
|
||||
store_name: str,
|
||||
file_hash: str,
|
||||
tags: List[str],
|
||||
subject: Any) -> None:
|
||||
"""Update the shared get-tag overlay after manual tag edits."""
|
||||
if not store_name or not file_hash:
|
||||
return
|
||||
try:
|
||||
from cmdlet.get_tag import _emit_tags_as_table
|
||||
except Exception:
|
||||
return
|
||||
|
||||
try:
|
||||
cfg = load_config() or {}
|
||||
except Exception:
|
||||
cfg = {}
|
||||
|
||||
payload_subject = subject if subject is not None else None
|
||||
if not isinstance(payload_subject, dict):
|
||||
payload_subject = {
|
||||
"store": store_name,
|
||||
"hash": file_hash,
|
||||
}
|
||||
|
||||
try:
|
||||
_emit_tags_as_table(
|
||||
list(tags),
|
||||
file_hash=file_hash,
|
||||
store=store_name,
|
||||
config=cfg,
|
||||
subject=payload_subject,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _load_cmdlet_names(self) -> None:
|
||||
try:
|
||||
ensure_registry_loaded()
|
||||
@@ -825,6 +988,10 @@ class PipelineHubApp(App):
|
||||
index = 0
|
||||
|
||||
item: Any = None
|
||||
row_payload: Any = None
|
||||
row = None
|
||||
column_store: Optional[str] = None
|
||||
column_hash: Optional[str] = None
|
||||
|
||||
# Prefer mapping displayed table row -> source item.
|
||||
if self.current_result_table and 0 <= index < len(
|
||||
@@ -832,30 +999,66 @@ class PipelineHubApp(App):
|
||||
"rows",
|
||||
[]) or []):
|
||||
row = self.current_result_table.rows[index]
|
||||
row_payload = getattr(row, "payload", None)
|
||||
src_idx = getattr(row, "source_index", None)
|
||||
if isinstance(src_idx, int) and 0 <= src_idx < len(self.result_items):
|
||||
item = self.result_items[src_idx]
|
||||
for col in getattr(row, "columns", []) or []:
|
||||
name = str(getattr(col, "name", "") or "").strip().lower()
|
||||
value = str(getattr(col, "value", "") or "").strip()
|
||||
if not column_store and name in {"store", "storage", "source", "table"}:
|
||||
column_store = value
|
||||
if not column_hash and name in {"hash", "hash_hex", "file_hash", "sha256"}:
|
||||
column_hash = value
|
||||
|
||||
if item is None and 0 <= index < len(self.result_items):
|
||||
item = self.result_items[index]
|
||||
|
||||
store_name = None
|
||||
file_hash = None
|
||||
if isinstance(item, dict):
|
||||
store_name = item.get("store")
|
||||
file_hash = item.get("hash")
|
||||
else:
|
||||
store_name = getattr(item, "store", None)
|
||||
file_hash = getattr(item, "hash", None)
|
||||
def _pick_from_candidates(
|
||||
candidates: List[Any], extractor: Callable[[Any], str]
|
||||
) -> str:
|
||||
for candidate in candidates:
|
||||
if candidate is None:
|
||||
continue
|
||||
try:
|
||||
value = extractor(candidate)
|
||||
except Exception:
|
||||
value = ""
|
||||
if value and str(value).strip():
|
||||
return str(value).strip()
|
||||
return ""
|
||||
|
||||
store_text = str(store_name).strip() if store_name is not None else ""
|
||||
hash_text = str(file_hash).strip() if file_hash is not None else ""
|
||||
candidate_sources: List[Any] = []
|
||||
if row_payload is not None:
|
||||
candidate_sources.append(row_payload)
|
||||
if item is not None:
|
||||
candidate_sources.append(item)
|
||||
|
||||
store_name = _pick_from_candidates(candidate_sources, extract_store_value)
|
||||
file_hash = _pick_from_candidates(candidate_sources, extract_hash_value)
|
||||
|
||||
if not store_name and column_store:
|
||||
store_name = column_store
|
||||
if not file_hash and column_hash:
|
||||
file_hash = column_hash
|
||||
|
||||
store_text = str(store_name).strip() if store_name else ""
|
||||
hash_text = str(file_hash).strip() if file_hash else ""
|
||||
|
||||
if not store_text:
|
||||
# Fallback to UI store selection when item doesn't carry it.
|
||||
store_text = self._get_selected_store() or ""
|
||||
|
||||
return item, (store_text or None), (hash_text or None)
|
||||
final_item = row_payload if row_payload is not None else item
|
||||
if final_item is None and (store_text or hash_text):
|
||||
fallback: Dict[str, str] = {}
|
||||
if store_text:
|
||||
fallback["store"] = store_text
|
||||
if hash_text:
|
||||
fallback["hash"] = hash_text
|
||||
final_item = fallback
|
||||
|
||||
return final_item, (store_text or None), (hash_text or None)
|
||||
|
||||
def _open_tags_popup(self) -> None:
|
||||
if self._pipeline_running:
|
||||
|
||||
Reference in New Issue
Block a user