df
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
"""Search-store cmdlet: Search for files in storage backends (Folder, Hydrus)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
@@ -12,7 +13,16 @@ from SYS.logger import log, debug
|
||||
|
||||
from . import _shared as sh
|
||||
|
||||
Cmdlet, CmdletArg, SharedArgs, get_field, should_show_help, normalize_hash, first_title_tag, parse_hash_query = (
|
||||
(
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
get_field,
|
||||
should_show_help,
|
||||
normalize_hash,
|
||||
first_title_tag,
|
||||
parse_hash_query,
|
||||
) = (
|
||||
sh.Cmdlet,
|
||||
sh.CmdletArg,
|
||||
sh.SharedArgs,
|
||||
@@ -37,9 +47,9 @@ class Search_Store(Cmdlet):
|
||||
summary="Search storage backends (Folder, Hydrus) for files.",
|
||||
usage="search-store [-query <query>] [-store BACKEND] [-limit N]",
|
||||
arg=[
|
||||
CmdletArg("query", description="Search query string"),
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
],
|
||||
detail=[
|
||||
"Search across storage backends: Folder stores and Hydrus instances",
|
||||
@@ -84,20 +94,22 @@ class Search_Store(Cmdlet):
|
||||
|
||||
# Ensure we have title field
|
||||
if "title" not in payload:
|
||||
payload["title"] = payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
|
||||
payload["title"] = (
|
||||
payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
)
|
||||
|
||||
# Ensure we have ext field
|
||||
if "ext" not in payload:
|
||||
title = str(payload.get("title", ""))
|
||||
path_obj = Path(title)
|
||||
if path_obj.suffix:
|
||||
payload["ext"] = self._normalize_extension(path_obj.suffix.lstrip('.'))
|
||||
payload["ext"] = self._normalize_extension(path_obj.suffix.lstrip("."))
|
||||
else:
|
||||
payload["ext"] = payload.get("ext", "")
|
||||
|
||||
|
||||
# Ensure size_bytes is present for display (already set by search_file())
|
||||
# result_table will handle formatting it
|
||||
|
||||
|
||||
# Don't create manual columns - let result_table handle display
|
||||
# This allows the table to respect max_columns and apply consistent formatting
|
||||
return payload
|
||||
@@ -134,11 +146,15 @@ class Search_Store(Cmdlet):
|
||||
|
||||
raw_title = None
|
||||
try:
|
||||
raw_title = ctx.get_current_stage_text("") if hasattr(ctx, "get_current_stage_text") else None
|
||||
raw_title = (
|
||||
ctx.get_current_stage_text("") if hasattr(ctx, "get_current_stage_text") else None
|
||||
)
|
||||
except Exception:
|
||||
raw_title = None
|
||||
|
||||
command_title = (str(raw_title).strip() if raw_title else "") or _format_command_title("search-store", list(args_list))
|
||||
command_title = (str(raw_title).strip() if raw_title else "") or _format_command_title(
|
||||
"search-store", list(args_list)
|
||||
)
|
||||
|
||||
# Build dynamic flag variants from cmdlet arg definitions.
|
||||
# This avoids hardcoding flag spellings in parsing loops.
|
||||
@@ -184,7 +200,7 @@ class Search_Store(Cmdlet):
|
||||
store_filter = match.group(1).strip() or None
|
||||
query = re.sub(r"\s*[,]?\s*store:[^\s,]+", " ", query, flags=re.IGNORECASE)
|
||||
query = re.sub(r"\s{2,}", " ", query)
|
||||
query = query.strip().strip(',')
|
||||
query = query.strip().strip(",")
|
||||
|
||||
if store_filter and not storage_backend:
|
||||
storage_backend = store_filter
|
||||
@@ -198,6 +214,7 @@ class Search_Store(Cmdlet):
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
import uuid
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
library_root = get_local_storage_path(config or {})
|
||||
if not library_root:
|
||||
@@ -212,12 +229,13 @@ class Search_Store(Cmdlet):
|
||||
"search-store",
|
||||
title=f"Search: {query}",
|
||||
description=f"Query: {query}",
|
||||
pipe=ctx.get_current_command_text()
|
||||
pipe=ctx.get_current_command_text(),
|
||||
)
|
||||
|
||||
results_list = []
|
||||
import result_table
|
||||
import importlib
|
||||
|
||||
importlib.reload(result_table)
|
||||
from result_table import ResultTable
|
||||
|
||||
@@ -233,6 +251,7 @@ class Search_Store(Cmdlet):
|
||||
pass
|
||||
|
||||
from Store import Store
|
||||
|
||||
storage = Store(config=config or {})
|
||||
from Store._base import Store as BaseStore
|
||||
|
||||
@@ -301,7 +320,11 @@ class Search_Store(Cmdlet):
|
||||
else:
|
||||
maybe_tags = tag_result
|
||||
if isinstance(maybe_tags, list):
|
||||
tags_list = [str(t).strip() for t in maybe_tags if isinstance(t, str) and str(t).strip()]
|
||||
tags_list = [
|
||||
str(t).strip()
|
||||
for t in maybe_tags
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
except Exception:
|
||||
tags_list = []
|
||||
|
||||
@@ -336,7 +359,9 @@ class Search_Store(Cmdlet):
|
||||
if size_bytes is None:
|
||||
size_bytes = meta_obj.get("size_bytes")
|
||||
try:
|
||||
size_bytes_int: Optional[int] = int(size_bytes) if size_bytes is not None else None
|
||||
size_bytes_int: Optional[int] = (
|
||||
int(size_bytes) if size_bytes is not None else None
|
||||
)
|
||||
except Exception:
|
||||
size_bytes_int = None
|
||||
|
||||
@@ -362,7 +387,7 @@ class Search_Store(Cmdlet):
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
return 0
|
||||
|
||||
log("No results found", file=sys.stderr)
|
||||
@@ -373,15 +398,18 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
db.append_worker_stdout(worker_id, json.dumps([], indent=2))
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
return 0
|
||||
|
||||
if backend_to_search:
|
||||
searched_backends.append(backend_to_search)
|
||||
target_backend = storage[backend_to_search]
|
||||
if type(target_backend).search is BaseStore.search:
|
||||
log(f"Backend '{backend_to_search}' does not support searching", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
log(
|
||||
f"Backend '{backend_to_search}' does not support searching",
|
||||
file=sys.stderr,
|
||||
)
|
||||
db.update_worker_status(worker_id, "error")
|
||||
return 1
|
||||
debug(f"[search-store] Searching '{backend_to_search}'")
|
||||
results = target_backend.search(query, limit=limit)
|
||||
@@ -395,7 +423,9 @@ class Search_Store(Cmdlet):
|
||||
|
||||
debug(f"[search-store] Searching '{backend_name}'")
|
||||
backend_results = backend.search(query, limit=limit - len(all_results))
|
||||
debug(f"[search-store] '{backend_name}' -> {len(backend_results or [])} result(s)")
|
||||
debug(
|
||||
f"[search-store] '{backend_name}' -> {len(backend_results or [])} result(s)"
|
||||
)
|
||||
if backend_results:
|
||||
all_results.extend(backend_results)
|
||||
if len(all_results) >= limit:
|
||||
@@ -406,6 +436,7 @@ class Search_Store(Cmdlet):
|
||||
|
||||
if results:
|
||||
for item in results:
|
||||
|
||||
def _as_dict(obj: Any) -> Dict[str, Any]:
|
||||
if isinstance(obj, dict):
|
||||
return dict(obj)
|
||||
@@ -450,15 +481,16 @@ class Search_Store(Cmdlet):
|
||||
pass
|
||||
db.append_worker_stdout(worker_id, json.dumps([], indent=2))
|
||||
|
||||
db.update_worker_status(worker_id, 'completed')
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
return 0
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Search failed: {exc}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
try:
|
||||
db.update_worker_status(worker_id, 'error')
|
||||
db.update_worker_status(worker_id, "error")
|
||||
except Exception:
|
||||
pass
|
||||
return 1
|
||||
|
||||
Reference in New Issue
Block a user