Add YAPF style + ignore, and format tracked Python files
This commit is contained in:
@@ -34,8 +34,9 @@ from . import _shared as sh
|
||||
)
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
STORAGE_ORIGINS = {"local", "hydrus", "folder"}
|
||||
STORAGE_ORIGINS = {"local",
|
||||
"hydrus",
|
||||
"folder"}
|
||||
|
||||
|
||||
class Search_Store(Cmdlet):
|
||||
@@ -47,7 +48,11 @@ class Search_Store(Cmdlet):
|
||||
summary="Search storage backends (Folder, Hydrus) for files.",
|
||||
usage="search-store [-query <query>] [-store BACKEND] [-limit N]",
|
||||
arg=[
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
||||
CmdletArg(
|
||||
"limit",
|
||||
type="integer",
|
||||
description="Limit results (default: 100)"
|
||||
),
|
||||
SharedArgs.STORE,
|
||||
SharedArgs.QUERY,
|
||||
],
|
||||
@@ -95,7 +100,8 @@ class Search_Store(Cmdlet):
|
||||
# Ensure we have title field
|
||||
if "title" not in payload:
|
||||
payload["title"] = (
|
||||
payload.get("name") or payload.get("target") or payload.get("path") or "Result"
|
||||
payload.get("name") or payload.get("target") or payload.get("path")
|
||||
or "Result"
|
||||
)
|
||||
|
||||
# Ensure we have ext field
|
||||
@@ -123,9 +129,13 @@ class Search_Store(Cmdlet):
|
||||
|
||||
args_list = [str(arg) for arg in (args or [])]
|
||||
|
||||
refresh_mode = any(str(a).strip().lower() in {"--refresh", "-refresh"} for a in args_list)
|
||||
refresh_mode = any(
|
||||
str(a).strip().lower() in {"--refresh",
|
||||
"-refresh"} for a in args_list
|
||||
)
|
||||
|
||||
def _format_command_title(command: str, raw_args: List[str]) -> str:
|
||||
|
||||
def _quote(value: str) -> str:
|
||||
text = str(value)
|
||||
if not text:
|
||||
@@ -136,8 +146,7 @@ class Search_Store(Cmdlet):
|
||||
return '"' + text.replace('"', '\\"') + '"'
|
||||
|
||||
cleaned = [
|
||||
str(a)
|
||||
for a in (raw_args or [])
|
||||
str(a) for a in (raw_args or [])
|
||||
if str(a).strip().lower() not in {"--refresh", "-refresh"}
|
||||
]
|
||||
if not cleaned:
|
||||
@@ -147,21 +156,32 @@ class Search_Store(Cmdlet):
|
||||
raw_title = None
|
||||
try:
|
||||
raw_title = (
|
||||
ctx.get_current_stage_text("") if hasattr(ctx, "get_current_stage_text") else None
|
||||
ctx.get_current_stage_text("")
|
||||
if hasattr(ctx,
|
||||
"get_current_stage_text") else None
|
||||
)
|
||||
except Exception:
|
||||
raw_title = None
|
||||
|
||||
command_title = (str(raw_title).strip() if raw_title else "") or _format_command_title(
|
||||
"search-store", list(args_list)
|
||||
)
|
||||
command_title = (str(raw_title).strip() if raw_title else
|
||||
"") or _format_command_title("search-store",
|
||||
list(args_list))
|
||||
|
||||
# Build dynamic flag variants from cmdlet arg definitions.
|
||||
# This avoids hardcoding flag spellings in parsing loops.
|
||||
flag_registry = self.build_flag_registry()
|
||||
query_flags = {f.lower() for f in (flag_registry.get("query") or {"-query", "--query"})}
|
||||
store_flags = {f.lower() for f in (flag_registry.get("store") or {"-store", "--store"})}
|
||||
limit_flags = {f.lower() for f in (flag_registry.get("limit") or {"-limit", "--limit"})}
|
||||
query_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("query") or {"-query", "--query"})
|
||||
}
|
||||
store_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("store") or {"-store", "--store"})
|
||||
}
|
||||
limit_flags = {
|
||||
f.lower()
|
||||
for f in (flag_registry.get("limit") or {"-limit", "--limit"})
|
||||
}
|
||||
|
||||
# Parse arguments
|
||||
query = ""
|
||||
@@ -212,7 +232,7 @@ class Search_Store(Cmdlet):
|
||||
return 1
|
||||
|
||||
from API.folder import API_folder_store
|
||||
from config import get_local_storage_path
|
||||
from SYS.config import get_local_storage_path
|
||||
import uuid
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
@@ -306,7 +326,8 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
path_str = None
|
||||
|
||||
meta_obj: Dict[str, Any] = {}
|
||||
meta_obj: Dict[str,
|
||||
Any] = {}
|
||||
try:
|
||||
meta_obj = resolved_backend.get_metadata(h) or {}
|
||||
except Exception:
|
||||
@@ -321,8 +342,7 @@ class Search_Store(Cmdlet):
|
||||
maybe_tags = tag_result
|
||||
if isinstance(maybe_tags, list):
|
||||
tags_list = [
|
||||
str(t).strip()
|
||||
for t in maybe_tags
|
||||
str(t).strip() for t in maybe_tags
|
||||
if isinstance(t, str) and str(t).strip()
|
||||
]
|
||||
except Exception:
|
||||
@@ -336,7 +356,9 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
title_from_tag = None
|
||||
|
||||
title = title_from_tag or meta_obj.get("title") or meta_obj.get("name")
|
||||
title = title_from_tag or meta_obj.get("title") or meta_obj.get(
|
||||
"name"
|
||||
)
|
||||
if not title and path_str:
|
||||
try:
|
||||
title = Path(path_str).stem
|
||||
@@ -365,15 +387,16 @@ class Search_Store(Cmdlet):
|
||||
except Exception:
|
||||
size_bytes_int = None
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"title": str(title or h),
|
||||
"hash": h,
|
||||
"store": resolved_backend_name,
|
||||
"path": path_str,
|
||||
"ext": self._normalize_extension(ext_val),
|
||||
"size_bytes": size_bytes_int,
|
||||
"tag": tags_list,
|
||||
}
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"title": str(title or h),
|
||||
"hash": h,
|
||||
"store": resolved_backend_name,
|
||||
"path": path_str,
|
||||
"ext": self._normalize_extension(ext_val),
|
||||
"size_bytes": size_bytes_int,
|
||||
"tag": tags_list,
|
||||
}
|
||||
|
||||
table.add_result(payload)
|
||||
results_list.append(payload)
|
||||
@@ -383,10 +406,17 @@ class Search_Store(Cmdlet):
|
||||
table.title = command_title
|
||||
|
||||
if refresh_mode:
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
ctx.set_last_result_table_preserve_history(
|
||||
table,
|
||||
results_list
|
||||
)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.append_worker_stdout(
|
||||
worker_id,
|
||||
json.dumps(results_list,
|
||||
indent=2)
|
||||
)
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
return 0
|
||||
|
||||
@@ -413,7 +443,9 @@ class Search_Store(Cmdlet):
|
||||
return 1
|
||||
debug(f"[search-store] Searching '{backend_to_search}'")
|
||||
results = target_backend.search(query, limit=limit)
|
||||
debug(f"[search-store] '{backend_to_search}' -> {len(results or [])} result(s)")
|
||||
debug(
|
||||
f"[search-store] '{backend_to_search}' -> {len(results or [])} result(s)"
|
||||
)
|
||||
else:
|
||||
all_results = []
|
||||
for backend_name in storage.list_searchable_backends():
|
||||
@@ -422,7 +454,10 @@ class Search_Store(Cmdlet):
|
||||
searched_backends.append(backend_name)
|
||||
|
||||
debug(f"[search-store] Searching '{backend_name}'")
|
||||
backend_results = backend.search(query, limit=limit - len(all_results))
|
||||
backend_results = backend.search(
|
||||
query,
|
||||
limit=limit - len(all_results)
|
||||
)
|
||||
debug(
|
||||
f"[search-store] '{backend_name}' -> {len(backend_results or [])} result(s)"
|
||||
)
|
||||
@@ -431,7 +466,10 @@ class Search_Store(Cmdlet):
|
||||
if len(all_results) >= limit:
|
||||
break
|
||||
except Exception as exc:
|
||||
log(f"Backend {backend_name} search failed: {exc}", file=sys.stderr)
|
||||
log(
|
||||
f"Backend {backend_name} search failed: {exc}",
|
||||
file=sys.stderr
|
||||
)
|
||||
results = all_results[:limit]
|
||||
|
||||
if results:
|
||||
@@ -440,9 +478,13 @@ class Search_Store(Cmdlet):
|
||||
def _as_dict(obj: Any) -> Dict[str, Any]:
|
||||
if isinstance(obj, dict):
|
||||
return dict(obj)
|
||||
if hasattr(obj, "to_dict") and callable(getattr(obj, "to_dict")):
|
||||
if hasattr(obj,
|
||||
"to_dict") and callable(getattr(obj,
|
||||
"to_dict")):
|
||||
return obj.to_dict() # type: ignore[arg-type]
|
||||
return {"title": str(obj)}
|
||||
return {
|
||||
"title": str(obj)
|
||||
}
|
||||
|
||||
item_dict = _as_dict(item)
|
||||
if store_filter:
|
||||
@@ -470,7 +512,11 @@ class Search_Store(Cmdlet):
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.append_worker_stdout(
|
||||
worker_id,
|
||||
json.dumps(results_list,
|
||||
indent=2)
|
||||
)
|
||||
else:
|
||||
log("No results found", file=sys.stderr)
|
||||
if refresh_mode:
|
||||
|
||||
Reference in New Issue
Block a user