dfdfdf
This commit is contained in:
47
cmdnat/__init__.py
Normal file
47
cmdnat/__init__.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from importlib import import_module
|
||||
from typing import Any, Callable, Dict, Sequence
|
||||
|
||||
CmdletFn = Callable[[Any, Sequence[str], Dict[str, Any]], int]
|
||||
|
||||
|
||||
def _register_cmdlet_object(cmdlet_obj, registry: Dict[str, CmdletFn]) -> None:
|
||||
run_fn = getattr(cmdlet_obj, "exec", None) if hasattr(cmdlet_obj, "exec") else None
|
||||
if not callable(run_fn):
|
||||
return
|
||||
|
||||
if hasattr(cmdlet_obj, "name") and cmdlet_obj.name:
|
||||
registry[cmdlet_obj.name.replace("_", "-").lower()] = run_fn
|
||||
|
||||
# Cmdlet uses 'alias' (List[str]). Some older objects may use 'aliases'.
|
||||
aliases = []
|
||||
if hasattr(cmdlet_obj, "alias") and getattr(cmdlet_obj, "alias"):
|
||||
aliases.extend(getattr(cmdlet_obj, "alias") or [])
|
||||
if hasattr(cmdlet_obj, "aliases") and getattr(cmdlet_obj, "aliases"):
|
||||
aliases.extend(getattr(cmdlet_obj, "aliases") or [])
|
||||
|
||||
for alias in aliases:
|
||||
if not alias:
|
||||
continue
|
||||
registry[alias.replace("_", "-").lower()] = run_fn
|
||||
|
||||
|
||||
def register_native_commands(registry: Dict[str, CmdletFn]) -> None:
|
||||
"""Import native command modules and register their CMDLET exec functions."""
|
||||
base_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(base_dir):
|
||||
if not (filename.endswith(".py") and not filename.startswith("_") and filename != "__init__.py"):
|
||||
continue
|
||||
|
||||
mod_name = filename[:-3]
|
||||
try:
|
||||
module = import_module(f".{mod_name}", __name__)
|
||||
cmdlet_obj = getattr(module, "CMDLET", None)
|
||||
if cmdlet_obj:
|
||||
_register_cmdlet_object(cmdlet_obj, registry)
|
||||
except Exception as exc:
|
||||
import sys
|
||||
print(f"Error importing native command '{mod_name}': {exc}", file=sys.stderr)
|
||||
continue
|
||||
130
cmdnat/adjective.json
Normal file
130
cmdnat/adjective.json
Normal file
@@ -0,0 +1,130 @@
|
||||
{
|
||||
"Occult": [
|
||||
"esoterica",
|
||||
"ritual",
|
||||
"alchemy",
|
||||
"magic",
|
||||
"hermetic",
|
||||
"divination",
|
||||
"grimoires",
|
||||
"symbolism",
|
||||
"ceremony"
|
||||
],
|
||||
"Philosophy": [
|
||||
"ethics",
|
||||
"metaphysics",
|
||||
"epistemology",
|
||||
"logic",
|
||||
"existentialism",
|
||||
"stoicism",
|
||||
"phenomenology",
|
||||
"dialectic",
|
||||
"aesthetics"
|
||||
],
|
||||
"Mystery": [
|
||||
"investigation",
|
||||
"crime",
|
||||
"detective",
|
||||
"noir",
|
||||
"thriller",
|
||||
"suspense",
|
||||
"conspiracy",
|
||||
"whodunit",
|
||||
"clues"
|
||||
],
|
||||
"Religion": [
|
||||
"scripture",
|
||||
"theology",
|
||||
"worship",
|
||||
"ritual",
|
||||
"doctrine",
|
||||
"faith",
|
||||
"tradition",
|
||||
"liturgy",
|
||||
"sacred"
|
||||
],
|
||||
"Mythology": [
|
||||
"gods",
|
||||
"creation",
|
||||
"heroes",
|
||||
"legends",
|
||||
"folklore",
|
||||
"pantheon",
|
||||
"epic",
|
||||
"mythic",
|
||||
"archetype"
|
||||
],
|
||||
"Science": [
|
||||
"research",
|
||||
"experiment",
|
||||
"theory",
|
||||
"biology",
|
||||
"physics",
|
||||
"chemistry",
|
||||
"data",
|
||||
"method",
|
||||
"innovation"
|
||||
],
|
||||
"Art": [
|
||||
"visual",
|
||||
"painting",
|
||||
"sculpture",
|
||||
"modernism",
|
||||
"technique",
|
||||
"studio",
|
||||
"curation",
|
||||
"expression",
|
||||
"composition"
|
||||
],
|
||||
"Literature": [
|
||||
"fiction",
|
||||
"poetry",
|
||||
"novel",
|
||||
"criticism",
|
||||
"narrative",
|
||||
"prose",
|
||||
"drama",
|
||||
"canonical",
|
||||
"translation"
|
||||
],
|
||||
"History": [
|
||||
"archaeology",
|
||||
"chronicle",
|
||||
"period",
|
||||
"empire",
|
||||
"revolution",
|
||||
"archive",
|
||||
"heritage",
|
||||
"historiography",
|
||||
"timeline"
|
||||
],
|
||||
"Psychology": [
|
||||
"cognition",
|
||||
"behavior",
|
||||
"therapy",
|
||||
"development",
|
||||
"neuroscience",
|
||||
"personality",
|
||||
"perception",
|
||||
"emotion",
|
||||
"motivation"
|
||||
],
|
||||
"gnostic": [
|
||||
"religion",
|
||||
"scripture",
|
||||
"gnostic",
|
||||
"gospel",
|
||||
"wisdom",
|
||||
"spirituality",
|
||||
"ancient",
|
||||
"philosophy",
|
||||
"esoteric",
|
||||
"mysticism",
|
||||
"mythology",
|
||||
"theology",
|
||||
"sacred",
|
||||
"divine",
|
||||
"apocrapha",
|
||||
"gnosticism"
|
||||
]
|
||||
}
|
||||
148
cmdnat/adjective.py
Normal file
148
cmdnat/adjective.py
Normal file
@@ -0,0 +1,148 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Dict, Any, Optional, Sequence
|
||||
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from SYS.logger import log
|
||||
from result_table import ResultTable
|
||||
import pipeline as ctx
|
||||
|
||||
ADJECTIVE_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)), "cmdnat", "adjective.json")
|
||||
|
||||
def _load_adjectives() -> Dict[str, List[str]]:
|
||||
try:
|
||||
if os.path.exists(ADJECTIVE_FILE):
|
||||
with open(ADJECTIVE_FILE, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
log(f"Error loading adjectives: {e}", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
def _save_adjectives(data: Dict[str, List[str]]) -> bool:
|
||||
try:
|
||||
with open(ADJECTIVE_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"Error saving adjectives: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
data = _load_adjectives()
|
||||
|
||||
# Parse arguments manually first to handle positional args
|
||||
# We expect: .adjective [category] [tag] [-add] [-delete]
|
||||
|
||||
# If no args, list categories
|
||||
if not args:
|
||||
table = ResultTable("Adjective Categories")
|
||||
for i, (category, tags) in enumerate(data.items()):
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Category", category)
|
||||
row.add_column("Tag Amount", str(len(tags)))
|
||||
|
||||
# Selection expands to: .adjective "Category Name"
|
||||
table.set_row_selection_args(i, [category])
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, list(data.keys()))
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
# We have args. First arg is likely category.
|
||||
category = args[0]
|
||||
|
||||
# Check if we are adding a new category (implicit if it doesn't exist)
|
||||
if category not in data:
|
||||
# If only category provided, create it
|
||||
if len(args) == 1:
|
||||
data[category] = []
|
||||
_save_adjectives(data)
|
||||
log(f"Created new category: {category}")
|
||||
# If more args, we might be trying to add to a non-existent category
|
||||
elif "-add" in args:
|
||||
data[category] = []
|
||||
# Continue to add logic
|
||||
|
||||
# Handle operations within category
|
||||
remaining_args = list(args[1:])
|
||||
|
||||
# Check for -add flag
|
||||
if "-add" in remaining_args:
|
||||
# .adjective category -add tag
|
||||
# or .adjective category tag -add
|
||||
add_idx = remaining_args.index("-add")
|
||||
# Tag could be before or after
|
||||
tag = None
|
||||
if add_idx + 1 < len(remaining_args):
|
||||
tag = remaining_args[add_idx + 1]
|
||||
elif add_idx > 0:
|
||||
tag = remaining_args[add_idx - 1]
|
||||
|
||||
if tag:
|
||||
if tag not in data[category]:
|
||||
data[category].append(tag)
|
||||
_save_adjectives(data)
|
||||
log(f"Added '{tag}' to '{category}'")
|
||||
else:
|
||||
log(f"Tag '{tag}' already exists in '{category}'")
|
||||
else:
|
||||
log("Error: No tag specified to add")
|
||||
return 1
|
||||
|
||||
# Check for -delete flag
|
||||
elif "-delete" in remaining_args:
|
||||
# .adjective category -delete tag
|
||||
# or .adjective category tag -delete
|
||||
del_idx = remaining_args.index("-delete")
|
||||
tag = None
|
||||
if del_idx + 1 < len(remaining_args):
|
||||
tag = remaining_args[del_idx + 1]
|
||||
elif del_idx > 0:
|
||||
tag = remaining_args[del_idx - 1]
|
||||
|
||||
if tag:
|
||||
if tag in data[category]:
|
||||
data[category].remove(tag)
|
||||
_save_adjectives(data)
|
||||
log(f"Deleted '{tag}' from '{category}'")
|
||||
else:
|
||||
log(f"Tag '{tag}' not found in '{category}'")
|
||||
else:
|
||||
log("Error: No tag specified to delete")
|
||||
return 1
|
||||
|
||||
# List tags in category (Default action if no flags or after modification)
|
||||
tags = data.get(category, [])
|
||||
table = ResultTable(f"Tags in '{category}'")
|
||||
for i, tag in enumerate(tags):
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Tag", tag)
|
||||
|
||||
# Selection expands to: .adjective "Category" "Tag"
|
||||
# This allows typing @N -delete to delete it
|
||||
table.set_row_selection_args(i, [category, tag])
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, tags)
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".adjective",
|
||||
alias=["adj"],
|
||||
summary="Manage adjective categories and tags",
|
||||
usage=".adjective [category] [-add tag] [-delete tag]",
|
||||
arg=[
|
||||
CmdletArg(name="category", type="string", description="Category name", required=False),
|
||||
CmdletArg(name="tag", type="string", description="Tag name", required=False),
|
||||
CmdletArg(name="add", type="flag", description="Add tag"),
|
||||
CmdletArg(name="delete", type="flag", description="Delete tag"),
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
139
cmdnat/config.py
Normal file
139
cmdnat/config.py
Normal file
@@ -0,0 +1,139 @@
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from cmdlet._shared import Cmdlet, CmdletArg
|
||||
from config import load_config, save_config
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".config",
|
||||
summary="Manage configuration settings",
|
||||
usage=".config [key] [value]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="key",
|
||||
description="Configuration key to update (dot-separated)",
|
||||
required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="value",
|
||||
description="New value for the configuration key",
|
||||
required=False
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
def flatten_config(config: Dict[str, Any], parent_key: str = '', sep: str = '.') -> List[Dict[str, Any]]:
|
||||
items = []
|
||||
for k, v in config.items():
|
||||
if k.startswith('_'): # Skip internal keys
|
||||
continue
|
||||
|
||||
new_key = f"{parent_key}{sep}{k}" if parent_key else k
|
||||
if isinstance(v, dict):
|
||||
items.extend(flatten_config(v, new_key, sep=sep))
|
||||
else:
|
||||
items.append({
|
||||
"Key": new_key,
|
||||
"Value": str(v),
|
||||
"Type": type(v).__name__,
|
||||
"_selection_args": [new_key]
|
||||
})
|
||||
return items
|
||||
|
||||
def set_nested_config(config: Dict[str, Any], key: str, value: str) -> bool:
|
||||
keys = key.split('.')
|
||||
d = config
|
||||
|
||||
# Navigate to the parent dict
|
||||
for k in keys[:-1]:
|
||||
if k not in d or not isinstance(d[k], dict):
|
||||
d[k] = {}
|
||||
d = d[k]
|
||||
|
||||
last_key = keys[-1]
|
||||
|
||||
# Try to preserve type if key exists
|
||||
if last_key in d:
|
||||
current_val = d[last_key]
|
||||
if isinstance(current_val, bool):
|
||||
if value.lower() in ('true', 'yes', '1', 'on'):
|
||||
d[last_key] = True
|
||||
elif value.lower() in ('false', 'no', '0', 'off'):
|
||||
d[last_key] = False
|
||||
else:
|
||||
# Fallback to boolean conversion of string (usually True for non-empty)
|
||||
# But for config, explicit is better.
|
||||
print(f"Warning: Could not convert '{value}' to boolean. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, int):
|
||||
try:
|
||||
d[last_key] = int(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to int. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, float):
|
||||
try:
|
||||
d[last_key] = float(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to float. Using string.")
|
||||
d[last_key] = value
|
||||
else:
|
||||
d[last_key] = value
|
||||
else:
|
||||
# New key, try to infer type
|
||||
if value.lower() in ('true', 'false'):
|
||||
d[last_key] = (value.lower() == 'true')
|
||||
elif value.isdigit():
|
||||
d[last_key] = int(value)
|
||||
else:
|
||||
d[last_key] = value
|
||||
|
||||
return True
|
||||
|
||||
def _run(piped_result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
||||
# Reload config to ensure we have the latest on disk
|
||||
# We don't use the passed 'config' because we want to edit the file
|
||||
# and 'config' might contain runtime objects (like worker manager)
|
||||
# But load_config() returns a fresh dict from disk (or cache)
|
||||
# We should use load_config()
|
||||
|
||||
current_config = load_config()
|
||||
|
||||
# Parse args
|
||||
# We handle args manually because of the potential for spaces in values
|
||||
# and the @ expansion logic in CLI.py passing args
|
||||
|
||||
if not args:
|
||||
# List mode
|
||||
items = flatten_config(current_config)
|
||||
# Sort by key
|
||||
items.sort(key=lambda x: x['Key'])
|
||||
|
||||
# Emit items for ResultTable
|
||||
import pipeline as ctx
|
||||
for item in items:
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
# Update mode
|
||||
key = args[0]
|
||||
|
||||
if len(args) < 2:
|
||||
print(f"Error: Value required for key '{key}'")
|
||||
return 1
|
||||
|
||||
value = " ".join(args[1:])
|
||||
|
||||
# Remove quotes if present
|
||||
if (value.startswith('"') and value.endswith('"')) or (value.startswith("'") and value.endswith("'")):
|
||||
value = value[1:-1]
|
||||
|
||||
try:
|
||||
set_nested_config(current_config, key, value)
|
||||
save_config(current_config)
|
||||
print(f"Updated '{key}' to '{value}'")
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error updating config: {e}")
|
||||
return 1
|
||||
|
||||
CMDLET.exec = _run
|
||||
185
cmdnat/help.py
Normal file
185
cmdnat/help.py
Normal file
@@ -0,0 +1,185 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from SYS.logger import log
|
||||
from result_table import ResultTable
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
def _normalize_choice_list(arg_names: Optional[List[str]]) -> List[str]:
|
||||
return sorted(set(arg_names or []))
|
||||
|
||||
|
||||
def _examples_for_cmd(name: str) -> List[str]:
|
||||
"""Return example invocations for a given command (best-effort)."""
|
||||
lookup = {
|
||||
".adjective": [
|
||||
'.adjective -add "example"',
|
||||
'.adjective -delete "example"',
|
||||
],
|
||||
}
|
||||
|
||||
key = name.replace("_", "-").lower()
|
||||
return lookup.get(key, [])
|
||||
|
||||
|
||||
def _find_cmd_metadata(name: str, metadata: Dict[str, Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
||||
target = name.replace("_", "-").lower()
|
||||
for cmd_name, meta in metadata.items():
|
||||
if target == cmd_name:
|
||||
return meta
|
||||
aliases = meta.get("aliases", []) or []
|
||||
if target in aliases:
|
||||
return meta
|
||||
return None
|
||||
|
||||
|
||||
def _render_list(metadata: Dict[str, Dict[str, Any]], filter_text: Optional[str], args: Sequence[str]) -> None:
|
||||
table = ResultTable("Help")
|
||||
table.set_source_command(".help", list(args))
|
||||
|
||||
items: List[Dict[str, Any]] = []
|
||||
needle = (filter_text or "").lower().strip()
|
||||
|
||||
for name in sorted(metadata.keys()):
|
||||
meta = metadata[name]
|
||||
summary = meta.get("summary", "") or ""
|
||||
if needle and needle not in name.lower() and needle not in summary.lower():
|
||||
continue
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("Cmd", name)
|
||||
aliases = ", ".join(meta.get("aliases", []) or [])
|
||||
row.add_column("Aliases", aliases)
|
||||
arg_names = [a.get("name") for a in meta.get("args", []) if a.get("name")]
|
||||
row.add_column("Args", ", ".join(f"-{a}" for a in arg_names))
|
||||
table.set_row_selection_args(len(table.rows) - 1, ["-cmd", name])
|
||||
items.append(meta)
|
||||
|
||||
ctx.set_last_result_table(table, items)
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
|
||||
|
||||
def _render_detail(meta: Dict[str, Any], args: Sequence[str]) -> None:
|
||||
title = f"Help: {meta.get('name', '') or 'cmd'}"
|
||||
table = ResultTable(title)
|
||||
table.set_source_command(".help", list(args))
|
||||
|
||||
header_lines: List[str] = []
|
||||
summary = meta.get("summary", "")
|
||||
usage = meta.get("usage", "")
|
||||
aliases = meta.get("aliases", []) or []
|
||||
examples = _examples_for_cmd(meta.get("name", ""))
|
||||
first_example_tokens: List[str] = []
|
||||
first_example_cmd: Optional[str] = None
|
||||
if examples:
|
||||
try:
|
||||
split_tokens = shlex.split(examples[0])
|
||||
if split_tokens:
|
||||
first_example_cmd = split_tokens[0]
|
||||
first_example_tokens = split_tokens[1:]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if summary:
|
||||
header_lines.append(summary)
|
||||
if usage:
|
||||
header_lines.append(f"Usage: {usage}")
|
||||
if aliases:
|
||||
header_lines.append("Aliases: " + ", ".join(aliases))
|
||||
if examples:
|
||||
header_lines.append("Examples: " + " | ".join(examples))
|
||||
if header_lines:
|
||||
table.set_header_lines(header_lines)
|
||||
|
||||
args_meta = meta.get("args", []) or []
|
||||
example_text = " | ".join(examples)
|
||||
# If we have an example, use it as the source command so @N runs that example
|
||||
if first_example_cmd:
|
||||
table.set_source_command(first_example_cmd, [])
|
||||
if not args_meta:
|
||||
row = table.add_row()
|
||||
row.add_column("Arg", "(none)")
|
||||
row.add_column("Type", "")
|
||||
row.add_column("Req", "")
|
||||
row.add_column("Description", "")
|
||||
row.add_column("Example", example_text)
|
||||
if first_example_tokens:
|
||||
table.set_row_selection_args(len(table.rows) - 1, first_example_tokens)
|
||||
else:
|
||||
for arg in args_meta:
|
||||
row = table.add_row()
|
||||
name = arg.get("name") or ""
|
||||
row.add_column("Arg", f"-{name}" if name else "")
|
||||
row.add_column("Type", arg.get("type", ""))
|
||||
row.add_column("Req", "yes" if arg.get("required") else "")
|
||||
desc = arg.get("description", "") or ""
|
||||
choices = arg.get("choices", []) or []
|
||||
if choices:
|
||||
choice_text = f"choices: {', '.join(choices)}"
|
||||
desc = f"{desc} ({choice_text})" if desc else choice_text
|
||||
row.add_column("Description", desc)
|
||||
row.add_column("Example", example_text)
|
||||
if first_example_tokens:
|
||||
table.set_row_selection_args(len(table.rows) - 1, first_example_tokens)
|
||||
|
||||
ctx.set_last_result_table_overlay(table, [meta])
|
||||
ctx.set_current_stage_table(table)
|
||||
print(table)
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
from cmdlet import catalog as _catalog
|
||||
|
||||
CMDLET.arg[0].choices = _normalize_choice_list(_catalog.list_cmdlet_names())
|
||||
metadata = _catalog.list_cmdlet_metadata()
|
||||
except Exception:
|
||||
CMDLET.arg[0].choices = []
|
||||
metadata = {}
|
||||
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
filter_text = parsed.get("filter")
|
||||
cmd_arg = parsed.get("cmd")
|
||||
|
||||
if cmd_arg:
|
||||
target_meta = _find_cmd_metadata(str(cmd_arg), metadata)
|
||||
if not target_meta:
|
||||
log(f"Unknown command: {cmd_arg}", file=sys.stderr)
|
||||
return 1
|
||||
_render_detail(target_meta, args)
|
||||
return 0
|
||||
|
||||
_render_list(metadata, filter_text, args)
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".help",
|
||||
alias=["help", "?"],
|
||||
summary="Show cmdlet or detailed help",
|
||||
usage=".help [cmd] [-filter text]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="cmd",
|
||||
type="string",
|
||||
description="Cmdlet name to show detailed help",
|
||||
required=False,
|
||||
choices=[],
|
||||
),
|
||||
CmdletArg(
|
||||
name="-filter",
|
||||
type="string",
|
||||
description="Filter cmdlet by substring",
|
||||
required=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
CMDLET.exec = _run
|
||||
29
cmdnat/matrix.py
Normal file
29
cmdnat/matrix.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from typing import Any, Dict, Sequence, List
|
||||
import sys
|
||||
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from SYS.logger import log, debug
|
||||
from result_table import ResultTable
|
||||
# REFACTOR: Commenting out Matrix import until provider refactor is complete
|
||||
from config import save_config, load_config
|
||||
import pipeline as ctx
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# REFACTOR: Matrix cmdlet temporarily disabled during storage provider refactor
|
||||
log("⚠️ Matrix cmdlet is temporarily disabled during refactor", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".matrix",
|
||||
alias=["matrix", "rooms"],
|
||||
summary="List and select default Matrix room",
|
||||
usage=".matrix [selection]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="selection",
|
||||
type="string",
|
||||
description="Index or ID of the room to set as default",
|
||||
required=False
|
||||
)
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
1486
cmdnat/pipe.py
Normal file
1486
cmdnat/pipe.py
Normal file
File diff suppressed because it is too large
Load Diff
327
cmdnat/worker.py
Normal file
327
cmdnat/worker.py
Normal file
@@ -0,0 +1,327 @@
|
||||
"""Worker cmdlet: Display workers table in ResultTable format."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, Sequence, List
|
||||
|
||||
from cmdlet import register
|
||||
from cmdlet._shared import Cmdlet, CmdletArg
|
||||
import pipeline as ctx
|
||||
from SYS.logger import log
|
||||
from config import get_local_storage_path
|
||||
|
||||
DEFAULT_LIMIT = 100
|
||||
WORKER_STATUS_FILTERS = {"running", "completed", "error", "cancelled"}
|
||||
HELP_FLAGS = {"-?", "/?", "--help", "-h", "help", "--cmdlet"}
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".worker",
|
||||
summary="Display workers table in result table format.",
|
||||
usage=".worker [status] [-limit N] [@N]",
|
||||
arg=[
|
||||
CmdletArg("status", description="Filter by status: running, completed, error (default: all)"),
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
||||
CmdletArg("@N", description="Select worker by index (1-based) and display full logs"),
|
||||
CmdletArg("-id", description="Show full logs for a specific worker"),
|
||||
CmdletArg("-clear", type="flag", description="Remove completed workers from the database"),
|
||||
],
|
||||
detail=[
|
||||
"- Shows all background worker tasks and their output",
|
||||
"- Can filter by status: running, completed, error",
|
||||
"- Search result stdout is captured from each worker",
|
||||
"- Use @N to select a specific worker by index and display its full logs",
|
||||
"Examples:",
|
||||
".worker # Show all workers",
|
||||
".worker running # Show running workers only",
|
||||
".worker completed -limit 50 # Show 50 most recent completed workers",
|
||||
".worker @3 # Show full logs for the 3rd worker",
|
||||
".worker running @2 # Show full logs for the 2nd running worker",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _has_help_flag(args_list: Sequence[str]) -> bool:
|
||||
return any(str(arg).lower() in HELP_FLAGS for arg in args_list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerCommandOptions:
|
||||
status: str | None = None
|
||||
limit: int = DEFAULT_LIMIT
|
||||
worker_id: str | None = None
|
||||
clear: bool = False
|
||||
|
||||
|
||||
@register([".worker", "worker", "workers"])
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Display workers table or show detailed logs for a specific worker."""
|
||||
args_list = [str(arg) for arg in (args or [])]
|
||||
selection_indices = ctx.get_last_selection()
|
||||
selection_requested = bool(selection_indices) and isinstance(result, list) and len(result) > 0
|
||||
|
||||
if _has_help_flag(args_list):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
|
||||
options = _parse_worker_args(args_list)
|
||||
|
||||
library_root = get_local_storage_path(config or {})
|
||||
if not library_root:
|
||||
log("No library root configured", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
from API.folder import API_folder_store
|
||||
|
||||
with API_folder_store(library_root) as db:
|
||||
if options.clear:
|
||||
count = db.clear_finished_workers()
|
||||
log(f"Cleared {count} finished workers.")
|
||||
return 0
|
||||
|
||||
if options.worker_id:
|
||||
worker = db.get_worker(options.worker_id)
|
||||
if worker:
|
||||
events: List[Dict[str, Any]] = []
|
||||
try:
|
||||
wid = worker.get("worker_id")
|
||||
if wid and hasattr(db, "get_worker_events"):
|
||||
events = db.get_worker_events(wid)
|
||||
except Exception:
|
||||
pass
|
||||
_emit_worker_detail(worker, events)
|
||||
return 0
|
||||
log(f"Worker not found: {options.worker_id}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if selection_requested:
|
||||
return _render_worker_selection(db, result)
|
||||
|
||||
return _render_worker_list(db, options.status, options.limit)
|
||||
except Exception as exc:
|
||||
log(f"Workers query failed: {exc}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def _parse_worker_args(args_list: Sequence[str]) -> WorkerCommandOptions:
|
||||
options = WorkerCommandOptions()
|
||||
i = 0
|
||||
while i < len(args_list):
|
||||
arg = args_list[i]
|
||||
low = arg.lower()
|
||||
if low in {"-limit", "--limit"} and i + 1 < len(args_list):
|
||||
options.limit = _normalize_limit(args_list[i + 1])
|
||||
i += 2
|
||||
elif low in {"-id", "--id"} and i + 1 < len(args_list):
|
||||
options.worker_id = args_list[i + 1]
|
||||
i += 2
|
||||
elif low in {"-clear", "--clear"}:
|
||||
options.clear = True
|
||||
i += 1
|
||||
elif low in {"-status", "--status"} and i + 1 < len(args_list):
|
||||
options.status = args_list[i + 1].lower()
|
||||
i += 2
|
||||
elif low in WORKER_STATUS_FILTERS:
|
||||
options.status = low
|
||||
i += 1
|
||||
elif not arg.startswith("-"):
|
||||
options.status = low
|
||||
i += 1
|
||||
else:
|
||||
i += 1
|
||||
return options
|
||||
|
||||
|
||||
def _normalize_limit(value: Any) -> int:
|
||||
try:
|
||||
return max(1, int(value))
|
||||
except (TypeError, ValueError):
|
||||
return DEFAULT_LIMIT
|
||||
|
||||
|
||||
def _render_worker_list(db, status_filter: str | None, limit: int) -> int:
|
||||
workers = db.get_all_workers(limit=limit)
|
||||
if status_filter:
|
||||
workers = [w for w in workers if str(w.get("status", "")).lower() == status_filter]
|
||||
|
||||
if not workers:
|
||||
log("No workers found", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
for worker in workers:
|
||||
started = worker.get("started_at", "")
|
||||
ended = worker.get("completed_at", worker.get("last_updated", ""))
|
||||
|
||||
date_str = _extract_date(started)
|
||||
start_time = _format_event_timestamp(started)
|
||||
end_time = _format_event_timestamp(ended)
|
||||
|
||||
item = {
|
||||
"columns": [
|
||||
("Status", worker.get("status", "")),
|
||||
("Pipe", _summarize_pipe(worker.get("pipe"))),
|
||||
("Date", date_str),
|
||||
("Start Time", start_time),
|
||||
("End Time", end_time),
|
||||
],
|
||||
"__worker_metadata": worker,
|
||||
"_selection_args": ["-id", worker.get("worker_id")],
|
||||
}
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
|
||||
def _render_worker_selection(db, selected_items: Any) -> int:
|
||||
if not isinstance(selected_items, list):
|
||||
log("Selection payload missing", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
emitted = False
|
||||
for item in selected_items:
|
||||
worker = _resolve_worker_record(db, item)
|
||||
if not worker:
|
||||
continue
|
||||
events: List[Dict[str, Any]] = []
|
||||
try:
|
||||
events = db.get_worker_events(worker.get("worker_id")) if hasattr(db, "get_worker_events") else []
|
||||
except Exception:
|
||||
events = []
|
||||
_emit_worker_detail(worker, events)
|
||||
emitted = True
|
||||
if not emitted:
|
||||
log("Selected rows no longer exist", file=sys.stderr)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def _resolve_worker_record(db, payload: Any) -> Dict[str, Any] | None:
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
worker_data = payload.get("__worker_metadata")
|
||||
worker_id = None
|
||||
if isinstance(worker_data, dict):
|
||||
worker_id = worker_data.get("worker_id")
|
||||
else:
|
||||
worker_id = payload.get("worker_id")
|
||||
worker_data = None
|
||||
if worker_id:
|
||||
fresh = db.get_worker(worker_id)
|
||||
if fresh:
|
||||
return fresh
|
||||
return worker_data if isinstance(worker_data, dict) else None
|
||||
|
||||
|
||||
def _emit_worker_detail(worker: Dict[str, Any], events: List[Dict[str, Any]]) -> None:
|
||||
stdout_content = worker.get("stdout", "") or ""
|
||||
|
||||
lines = stdout_content.splitlines()
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
timestamp = ""
|
||||
level = "INFO"
|
||||
message = line
|
||||
|
||||
try:
|
||||
parts = line.split(" - ", 3)
|
||||
if len(parts) >= 4:
|
||||
ts_str, _, lvl, msg = parts
|
||||
timestamp = _format_event_timestamp(ts_str)
|
||||
level = lvl
|
||||
message = msg
|
||||
elif len(parts) == 3:
|
||||
ts_str, lvl, msg = parts
|
||||
timestamp = _format_event_timestamp(ts_str)
|
||||
level = lvl
|
||||
message = msg
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
item = {
|
||||
"columns": [
|
||||
("Time", timestamp),
|
||||
("Level", level),
|
||||
("Message", message),
|
||||
]
|
||||
}
|
||||
ctx.emit(item)
|
||||
|
||||
# Events are already always derived from stdout for now.
|
||||
|
||||
|
||||
def _summarize_pipe(pipe_value: Any, limit: int = 60) -> str:
|
||||
text = str(pipe_value or "").strip()
|
||||
if not text:
|
||||
return "(none)"
|
||||
return text if len(text) <= limit else text[: limit - 3] + "..."
|
||||
|
||||
|
||||
def _format_event_timestamp(raw_timestamp: Any) -> str:
|
||||
dt = _parse_to_local(raw_timestamp)
|
||||
if dt:
|
||||
return dt.strftime("%H:%M:%S")
|
||||
|
||||
if not raw_timestamp:
|
||||
return "--:--:--"
|
||||
text = str(raw_timestamp)
|
||||
if "T" in text:
|
||||
time_part = text.split("T", 1)[1]
|
||||
elif " " in text:
|
||||
time_part = text.split(" ", 1)[1]
|
||||
else:
|
||||
time_part = text
|
||||
return time_part[:8] if len(time_part) >= 8 else time_part
|
||||
|
||||
|
||||
def _parse_to_local(timestamp_str: Any) -> datetime | None:
|
||||
if not timestamp_str:
|
||||
return None
|
||||
text = str(timestamp_str).strip()
|
||||
if not text:
|
||||
return None
|
||||
|
||||
try:
|
||||
if "T" in text:
|
||||
return datetime.fromisoformat(text)
|
||||
if " " in text:
|
||||
dt = datetime.strptime(text, "%Y-%m-%d %H:%M:%S")
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _extract_date(raw_timestamp: Any) -> str:
|
||||
dt = _parse_to_local(raw_timestamp)
|
||||
if dt:
|
||||
return dt.strftime("%m-%d-%y")
|
||||
|
||||
if not raw_timestamp:
|
||||
return ""
|
||||
text = str(raw_timestamp)
|
||||
date_part = ""
|
||||
if "T" in text:
|
||||
date_part = text.split("T", 1)[0]
|
||||
elif " " in text:
|
||||
date_part = text.split(" ", 1)[0]
|
||||
else:
|
||||
date_part = text
|
||||
|
||||
try:
|
||||
parts = date_part.split("-")
|
||||
if len(parts) == 3:
|
||||
year, month, day = parts
|
||||
return f"{month}-{day}-{year[2:]}"
|
||||
except Exception:
|
||||
pass
|
||||
return date_part
|
||||
Reference in New Issue
Block a user