df
This commit is contained in:
85
CLI.py
85
CLI.py
@@ -822,7 +822,7 @@ def _create_cmdlet_cli():
|
|||||||
shell_complete=_complete_search_provider,
|
shell_complete=_complete_search_provider,
|
||||||
),
|
),
|
||||||
query: str = typer.Argument(..., help="Search query (quote for spaces)"),
|
query: str = typer.Argument(..., help="Search query (quote for spaces)"),
|
||||||
limit: int = typer.Option(50, "--limit", "-l", help="Maximum results to return"),
|
limit: int = typer.Option(36, "--limit", "-l", help="Maximum results to return"),
|
||||||
):
|
):
|
||||||
"""Search external providers (Typer wrapper around the cmdlet)."""
|
"""Search external providers (Typer wrapper around the cmdlet)."""
|
||||||
# Delegate to the existing cmdlet so behavior stays consistent.
|
# Delegate to the existing cmdlet so behavior stays consistent.
|
||||||
@@ -917,17 +917,27 @@ def _create_cmdlet_cli():
|
|||||||
prompt_text = "🜂🜄🜁🜃|"
|
prompt_text = "🜂🜄🜁🜃|"
|
||||||
|
|
||||||
# Prepare startup table (always attempt; fall back gracefully if import fails)
|
# Prepare startup table (always attempt; fall back gracefully if import fails)
|
||||||
startup_table = ResultTable("Startup checks") if RESULT_TABLE_AVAILABLE else None
|
startup_table = ResultTable("*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********") if RESULT_TABLE_AVAILABLE else None
|
||||||
if startup_table:
|
if startup_table:
|
||||||
startup_table.set_no_choice(True).set_preserve_order(True)
|
startup_table.set_no_choice(True).set_preserve_order(True)
|
||||||
|
|
||||||
def _add_startup_check(status: str, name: str, store_or_provider: str, detail: str = "") -> None:
|
def _add_startup_check(
|
||||||
|
status: str,
|
||||||
|
name: str,
|
||||||
|
*,
|
||||||
|
provider: str = "",
|
||||||
|
store: str = "",
|
||||||
|
files: int | str | None = None,
|
||||||
|
detail: str = "",
|
||||||
|
) -> None:
|
||||||
if startup_table is None:
|
if startup_table is None:
|
||||||
return
|
return
|
||||||
row = startup_table.add_row()
|
row = startup_table.add_row()
|
||||||
row.add_column("Status", status)
|
row.add_column("Status", status)
|
||||||
row.add_column("Name", name)
|
row.add_column("Name", name)
|
||||||
row.add_column("Store/Provi", store_or_provider)
|
row.add_column("Provider", provider or "")
|
||||||
|
row.add_column("Store", store or "")
|
||||||
|
row.add_column("Files", "" if files is None else str(files))
|
||||||
row.add_column("Detail", detail or "")
|
row.add_column("Detail", detail or "")
|
||||||
|
|
||||||
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
|
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
|
||||||
@@ -997,9 +1007,9 @@ def _create_cmdlet_cli():
|
|||||||
except Exception:
|
except Exception:
|
||||||
mpv_path = None
|
mpv_path = None
|
||||||
|
|
||||||
_add_startup_check("ENABLED", "MPV", "N/A", mpv_path or "Available")
|
_add_startup_check("ENABLED", "MPV", detail=mpv_path or "Available")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
_add_startup_check("DISABLED", "MPV", "N/A", str(exc))
|
_add_startup_check("DISABLED", "MPV", detail=str(exc))
|
||||||
|
|
||||||
store_registry = None
|
store_registry = None
|
||||||
|
|
||||||
@@ -1033,18 +1043,22 @@ def _create_cmdlet_cli():
|
|||||||
if store_registry:
|
if store_registry:
|
||||||
backend = store_registry[name_key]
|
backend = store_registry[name_key]
|
||||||
total = getattr(backend, "total_count", None)
|
total = getattr(backend, "total_count", None)
|
||||||
|
if total is None:
|
||||||
|
getter = getattr(backend, "get_total_count", None)
|
||||||
|
if callable(getter):
|
||||||
|
total = getter()
|
||||||
except Exception:
|
except Exception:
|
||||||
total = None
|
total = None
|
||||||
|
|
||||||
detail = (url_val + (" - " if url_val else "")) + "Connected"
|
detail = url_val
|
||||||
if isinstance(total, int) and total >= 0:
|
files = total if isinstance(total, int) and total >= 0 else None
|
||||||
detail += f" (Total: {total})"
|
|
||||||
else:
|
else:
|
||||||
err = None
|
err = None
|
||||||
if store_registry:
|
if store_registry:
|
||||||
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
||||||
detail = (url_val + (" - " if url_val else "")) + (err or "Unavailable")
|
detail = (url_val + (" - " if url_val else "")) + (err or "Unavailable")
|
||||||
_add_startup_check(status, name_key, "hydrusnetwork", detail)
|
files = None
|
||||||
|
_add_startup_check(status, name_key, store="hydrusnetwork", files=files, detail=detail)
|
||||||
|
|
||||||
# Configured providers (dynamic): show any [provider=...] blocks.
|
# Configured providers (dynamic): show any [provider=...] blocks.
|
||||||
# This complements store checks and avoids hardcoding per-provider rows.
|
# This complements store checks and avoids hardcoding per-provider rows.
|
||||||
@@ -1148,9 +1162,9 @@ def _create_cmdlet_cli():
|
|||||||
|
|
||||||
client = AllDebridClient(api_key)
|
client = AllDebridClient(api_key)
|
||||||
base_url = str(getattr(client, "base_url", "") or "").strip()
|
base_url = str(getattr(client, "base_url", "") or "").strip()
|
||||||
_add_startup_check("ENABLED", display, prov, base_url or "Connected")
|
_add_startup_check("ENABLED", display, provider=prov, detail=base_url or "Connected")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
_add_startup_check("DISABLED", display, prov, str(exc))
|
_add_startup_check("DISABLED", display, provider=prov, detail=str(exc))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
is_known = False
|
is_known = False
|
||||||
@@ -1167,7 +1181,7 @@ def _create_cmdlet_cli():
|
|||||||
ok = bool(meta_availability.get(prov))
|
ok = bool(meta_availability.get(prov))
|
||||||
|
|
||||||
if not is_known:
|
if not is_known:
|
||||||
_add_startup_check("UNKNOWN", display, prov, "Not registered")
|
_add_startup_check("UNKNOWN", display, provider=prov, detail="Not registered")
|
||||||
else:
|
else:
|
||||||
# For non-login providers, include a lightweight URL reachability check.
|
# For non-login providers, include a lightweight URL reachability check.
|
||||||
detail = "Configured" if ok else "Not configured"
|
detail = "Configured" if ok else "Not configured"
|
||||||
@@ -1178,7 +1192,7 @@ def _create_cmdlet_cli():
|
|||||||
detail = ping_detail
|
detail = ping_detail
|
||||||
else:
|
else:
|
||||||
detail = (detail + " | " + ping_detail) if ping_detail else detail
|
detail = (detail + " | " + ping_detail) if ping_detail else detail
|
||||||
_add_startup_check("ENABLED" if ok else "DISABLED", display, prov, detail)
|
_add_startup_check("ENABLED" if ok else "DISABLED", display, provider=prov, detail=detail)
|
||||||
|
|
||||||
already_checked.add(prov)
|
already_checked.add(prov)
|
||||||
|
|
||||||
@@ -1196,7 +1210,7 @@ def _create_cmdlet_cli():
|
|||||||
# If the provider isn't even import/dep available, show that first.
|
# If the provider isn't even import/dep available, show that first.
|
||||||
if not ok:
|
if not ok:
|
||||||
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
||||||
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", display, prov, detail)
|
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", display, provider=prov, detail=detail)
|
||||||
already_checked.add(prov)
|
already_checked.add(prov)
|
||||||
|
|
||||||
# Default file providers (no login): 0x0
|
# Default file providers (no login): 0x0
|
||||||
@@ -1206,7 +1220,7 @@ def _create_cmdlet_cli():
|
|||||||
detail = ping_detail
|
detail = ping_detail
|
||||||
if not ok:
|
if not ok:
|
||||||
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
detail = ("Unavailable" + (f" | {ping_detail}" if ping_detail else ""))
|
||||||
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", "0x0", "0x0", detail)
|
_add_startup_check("ENABLED" if (ok and ping_ok) else "DISABLED", "0x0", provider="0x0", detail=detail)
|
||||||
already_checked.add("0x0")
|
already_checked.add("0x0")
|
||||||
|
|
||||||
if _has_provider(config, "matrix"):
|
if _has_provider(config, "matrix"):
|
||||||
@@ -1225,7 +1239,7 @@ def _create_cmdlet_cli():
|
|||||||
target = (target + (" " if target else "")) + f"room:{room_id}"
|
target = (target + (" " if target else "")) + f"room:{room_id}"
|
||||||
|
|
||||||
if provider.validate():
|
if provider.validate():
|
||||||
_add_startup_check("ENABLED", "Matrix", "matrix", target or "Connected")
|
_add_startup_check("ENABLED", "Matrix", provider="matrix", detail=target or "Connected")
|
||||||
else:
|
else:
|
||||||
missing: list[str] = []
|
missing: list[str] = []
|
||||||
if not homeserver:
|
if not homeserver:
|
||||||
@@ -1235,9 +1249,9 @@ def _create_cmdlet_cli():
|
|||||||
if not (matrix_conf.get("access_token") or matrix_conf.get("password")):
|
if not (matrix_conf.get("access_token") or matrix_conf.get("password")):
|
||||||
missing.append("access_token/password")
|
missing.append("access_token/password")
|
||||||
detail = "Not configured" + (f" ({', '.join(missing)})" if missing else "")
|
detail = "Not configured" + (f" ({', '.join(missing)})" if missing else "")
|
||||||
_add_startup_check("DISABLED", "Matrix", "matrix", detail)
|
_add_startup_check("DISABLED", "Matrix", provider="matrix", detail=detail)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
_add_startup_check("DISABLED", "Matrix", "matrix", str(exc))
|
_add_startup_check("DISABLED", "Matrix", provider="matrix", detail=str(exc))
|
||||||
|
|
||||||
if _has_store_subtype(config, "folder"):
|
if _has_store_subtype(config, "folder"):
|
||||||
# Folder local scan/index is performed by Store.Folder.__init__.
|
# Folder local scan/index is performed by Store.Folder.__init__.
|
||||||
@@ -1255,17 +1269,26 @@ def _create_cmdlet_cli():
|
|||||||
backend = store_registry[name_key]
|
backend = store_registry[name_key]
|
||||||
scan_ok = bool(getattr(backend, "scan_ok", True))
|
scan_ok = bool(getattr(backend, "scan_ok", True))
|
||||||
scan_detail = str(getattr(backend, "scan_detail", "") or "")
|
scan_detail = str(getattr(backend, "scan_detail", "") or "")
|
||||||
|
stats = getattr(backend, "scan_stats", None)
|
||||||
|
files = None
|
||||||
|
if isinstance(stats, dict):
|
||||||
|
try:
|
||||||
|
total_db = stats.get("files_total_db")
|
||||||
|
if isinstance(total_db, (int, float)):
|
||||||
|
files = int(total_db)
|
||||||
|
except Exception:
|
||||||
|
files = None
|
||||||
status = "SCANNED" if scan_ok else "ERROR"
|
status = "SCANNED" if scan_ok else "ERROR"
|
||||||
detail = (path_val + (" - " if path_val else "")) + (scan_detail or "Up to date")
|
detail = (path_val + (" - " if path_val else "")) + (scan_detail or "Up to date")
|
||||||
_add_startup_check(status, name_key, "folder", detail)
|
_add_startup_check(status, name_key, store="folder", files=files, detail=detail)
|
||||||
else:
|
else:
|
||||||
err = None
|
err = None
|
||||||
if store_registry:
|
if store_registry:
|
||||||
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
err = store_registry.get_backend_error(instance_name) or store_registry.get_backend_error(name_key)
|
||||||
detail = (path_val + (" - " if path_val else "")) + (err or "Unavailable")
|
detail = (path_val + (" - " if path_val else "")) + (err or "Unavailable")
|
||||||
_add_startup_check("ERROR", name_key, "folder", detail)
|
_add_startup_check("ERROR", name_key, store="folder", detail=detail)
|
||||||
else:
|
else:
|
||||||
_add_startup_check("SKIPPED", "Folder", "folder", "No folder stores configured")
|
_add_startup_check("SKIPPED", "Folder", store="folder", detail="No folder stores configured")
|
||||||
|
|
||||||
if _has_store_subtype(config, "debrid"):
|
if _has_store_subtype(config, "debrid"):
|
||||||
# Debrid availability is validated by API.alldebrid.AllDebridClient.__init__.
|
# Debrid availability is validated by API.alldebrid.AllDebridClient.__init__.
|
||||||
@@ -1274,15 +1297,15 @@ def _create_cmdlet_cli():
|
|||||||
|
|
||||||
api_key = get_debrid_api_key(config)
|
api_key = get_debrid_api_key(config)
|
||||||
if not api_key:
|
if not api_key:
|
||||||
_add_startup_check("DISABLED", "Debrid", "debrid", "Not configured")
|
_add_startup_check("DISABLED", "Debrid", store="debrid", detail="Not configured")
|
||||||
else:
|
else:
|
||||||
from API.alldebrid import AllDebridClient
|
from API.alldebrid import AllDebridClient
|
||||||
|
|
||||||
client = AllDebridClient(api_key)
|
client = AllDebridClient(api_key)
|
||||||
base_url = str(getattr(client, "base_url", "") or "").strip()
|
base_url = str(getattr(client, "base_url", "") or "").strip()
|
||||||
_add_startup_check("ENABLED", "Debrid", "debrid", base_url or "Connected")
|
_add_startup_check("ENABLED", "Debrid", store="debrid", detail=base_url or "Connected")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
_add_startup_check("DISABLED", "Debrid", "debrid", str(exc))
|
_add_startup_check("DISABLED", "Debrid", store="debrid", detail=str(exc))
|
||||||
|
|
||||||
# Cookies are used by yt-dlp; keep this centralized utility.
|
# Cookies are used by yt-dlp; keep this centralized utility.
|
||||||
try:
|
try:
|
||||||
@@ -1290,11 +1313,11 @@ def _create_cmdlet_cli():
|
|||||||
|
|
||||||
cookiefile = YtDlpTool(config).resolve_cookiefile()
|
cookiefile = YtDlpTool(config).resolve_cookiefile()
|
||||||
if cookiefile is not None:
|
if cookiefile is not None:
|
||||||
_add_startup_check("FOUND", "Cookies", "N/A", str(cookiefile))
|
_add_startup_check("FOUND", "Cookies", detail=str(cookiefile))
|
||||||
else:
|
else:
|
||||||
_add_startup_check("MISSING", "Cookies", "N/A", "Not found")
|
_add_startup_check("MISSING", "Cookies", detail="Not found")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
_add_startup_check("ERROR", "Cookies", "N/A", str(exc))
|
_add_startup_check("ERROR", "Cookies", detail=str(exc))
|
||||||
|
|
||||||
if startup_table is not None and startup_table.rows:
|
if startup_table is not None and startup_table.rows:
|
||||||
print()
|
print()
|
||||||
@@ -1579,11 +1602,11 @@ def _execute_pipeline(tokens: list):
|
|||||||
if hash_val != 'N/A':
|
if hash_val != 'N/A':
|
||||||
hash_display = str(hash_val)
|
hash_display = str(hash_val)
|
||||||
title_display = str(title_val)
|
title_display = str(title_val)
|
||||||
print(f" -> hash:{hash_display}, title:{title_display}")
|
debug(f" -> hash:{hash_display}, title:{title_display}")
|
||||||
else:
|
else:
|
||||||
print(f" -> title:{title_val}")
|
debug(f" -> title:{title_val}")
|
||||||
else:
|
else:
|
||||||
print(" -> [source_index out of range]")
|
debug(" -> [source_index out of range]")
|
||||||
if resolved_list is not None:
|
if resolved_list is not None:
|
||||||
debug(f"[debug] resolved_len={len(resolved_list)}")
|
debug(f"[debug] resolved_len={len(resolved_list)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ from API.HTTP import HTTPClient
|
|||||||
from ProviderCore.base import SearchProvider, SearchResult
|
from ProviderCore.base import SearchProvider, SearchResult
|
||||||
from ProviderCore.download import download_file, sanitize_filename
|
from ProviderCore.download import download_file, sanitize_filename
|
||||||
from cli_syntax import get_field, get_free_text, parse_query
|
from cli_syntax import get_field, get_free_text, parse_query
|
||||||
from SYS.logger import log
|
from SYS.logger import debug, log
|
||||||
from SYS.utils import unique_path
|
from SYS.utils import unique_path
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -564,7 +564,7 @@ class OpenLibrary(SearchProvider):
|
|||||||
if val:
|
if val:
|
||||||
new_tags.append(f"{ns}:{val}")
|
new_tags.append(f"{ns}:{val}")
|
||||||
|
|
||||||
log(f"Found {len(new_tags)} tag(s) from ISBN lookup")
|
debug(f"Found {len(new_tags)} tag(s) from ISBN lookup")
|
||||||
return new_tags
|
return new_tags
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -703,7 +703,7 @@ class OpenLibrary(SearchProvider):
|
|||||||
if isinstance(ocaid, str) and ocaid.strip():
|
if isinstance(ocaid, str) and ocaid.strip():
|
||||||
new_tags.append(f"internet_archive:{ocaid.strip()}")
|
new_tags.append(f"internet_archive:{ocaid.strip()}")
|
||||||
|
|
||||||
log(f"Found {len(new_tags)} tag(s) from OpenLibrary lookup")
|
debug(f"Found {len(new_tags)} tag(s) from OpenLibrary lookup")
|
||||||
return new_tags
|
return new_tags
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
@@ -788,7 +788,6 @@ class OpenLibrary(SearchProvider):
|
|||||||
|
|
||||||
availability_rows: List[Tuple[str, str, str, str]] = [("unknown", "", "", "") for _ in range(len(docs))]
|
availability_rows: List[Tuple[str, str, str, str]] = [("unknown", "", "", "") for _ in range(len(docs))]
|
||||||
if docs:
|
if docs:
|
||||||
log(f"[openlibrary] Enriching availability for {len(docs)} result(s)...")
|
|
||||||
max_workers = min(8, max(1, len(docs)))
|
max_workers = min(8, max(1, len(docs)))
|
||||||
done = 0
|
done = 0
|
||||||
with futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
with futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
@@ -804,9 +803,7 @@ class OpenLibrary(SearchProvider):
|
|||||||
except Exception:
|
except Exception:
|
||||||
availability_rows[i] = ("unknown", "", "", "")
|
availability_rows[i] = ("unknown", "", "", "")
|
||||||
done += 1
|
done += 1
|
||||||
if done in {1, len(future_to_index)} or (done % 10 == 0):
|
|
||||||
log(f"[openlibrary] Availability: {done}/{len(future_to_index)}")
|
|
||||||
log("[openlibrary] Availability enrichment complete")
|
|
||||||
|
|
||||||
for idx, doc in enumerate(docs):
|
for idx, doc in enumerate(docs):
|
||||||
if not isinstance(doc, dict):
|
if not isinstance(doc, dict):
|
||||||
|
|||||||
@@ -393,7 +393,7 @@ class Folder(Store):
|
|||||||
if url:
|
if url:
|
||||||
self.add_url(file_hash, url)
|
self.add_url(file_hash, url)
|
||||||
|
|
||||||
log(f"✓ Added to local storage: {save_file.name}", file=sys.stderr)
|
##log(f"✓ Added to local storage: {save_file.name}", file=sys.stderr)
|
||||||
return file_hash
|
return file_hash
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|||||||
@@ -115,7 +115,57 @@ class HydrusNetwork(Store):
|
|||||||
# Create a persistent client for this instance (auth via access key by default).
|
# Create a persistent client for this instance (auth via access key by default).
|
||||||
self._client = HydrusClient(url=self.URL, access_key=self.API, instance_name=self.NAME)
|
self._client = HydrusClient(url=self.URL, access_key=self.API, instance_name=self.NAME)
|
||||||
|
|
||||||
# Best-effort total count (fast on Hydrus side; does not fetch IDs/hashes).
|
# Best-effort total count (used for startup diagnostics). Avoid heavy payloads.
|
||||||
|
# Some Hydrus setups appear to return no count via the CBOR client for this endpoint,
|
||||||
|
# so prefer a direct JSON request with a short timeout.
|
||||||
|
try:
|
||||||
|
self.get_total_count(refresh=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_total_count(self, *, refresh: bool = False) -> Optional[int]:
|
||||||
|
"""Best-effort total file count for this Hydrus instance.
|
||||||
|
|
||||||
|
Intended for diagnostics (e.g., REPL startup checks). This should be fast,
|
||||||
|
and it MUST NOT raise.
|
||||||
|
"""
|
||||||
|
if self.total_count is not None and not refresh:
|
||||||
|
return self.total_count
|
||||||
|
|
||||||
|
# 1) Prefer a direct JSON request (fast + avoids CBOR edge cases).
|
||||||
|
try:
|
||||||
|
import json as _json
|
||||||
|
|
||||||
|
url = f"{self.URL}/get_files/search_files"
|
||||||
|
params = {
|
||||||
|
"tags": _json.dumps(["system:everything"]),
|
||||||
|
"return_hashes": "false",
|
||||||
|
"return_file_ids": "false",
|
||||||
|
"return_file_count": "true",
|
||||||
|
}
|
||||||
|
headers = {
|
||||||
|
"Hydrus-Client-API-Access-Key": self.API,
|
||||||
|
"Accept": "application/json",
|
||||||
|
}
|
||||||
|
with httpx.Client(timeout=5.0, verify=False, follow_redirects=True) as client:
|
||||||
|
resp = client.get(url, params=params, headers=headers)
|
||||||
|
resp.raise_for_status()
|
||||||
|
payload = resp.json()
|
||||||
|
|
||||||
|
count_val = None
|
||||||
|
if isinstance(payload, dict):
|
||||||
|
count_val = payload.get("file_count")
|
||||||
|
if count_val is None:
|
||||||
|
count_val = payload.get("file_count_inclusive")
|
||||||
|
if count_val is None:
|
||||||
|
count_val = payload.get("num_files")
|
||||||
|
if isinstance(count_val, int):
|
||||||
|
self.total_count = count_val
|
||||||
|
return self.total_count
|
||||||
|
except Exception as exc:
|
||||||
|
debug(f"{self._log_prefix()} total count (json) unavailable: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
|
# 2) Fallback to the API client (CBOR).
|
||||||
try:
|
try:
|
||||||
payload = self._client.search_files(
|
payload = self._client.search_files(
|
||||||
tags=["system:everything"],
|
tags=["system:everything"],
|
||||||
@@ -132,8 +182,11 @@ class HydrusNetwork(Store):
|
|||||||
count_val = payload.get("num_files")
|
count_val = payload.get("num_files")
|
||||||
if isinstance(count_val, int):
|
if isinstance(count_val, int):
|
||||||
self.total_count = count_val
|
self.total_count = count_val
|
||||||
|
return self.total_count
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
debug(f"{self._log_prefix()} total count unavailable: {exc}", file=sys.stderr)
|
debug(f"{self._log_prefix()} total count (client) unavailable: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
|
return self.total_count
|
||||||
|
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return self.NAME
|
return self.NAME
|
||||||
|
|||||||
@@ -174,6 +174,18 @@ class Add_File(Cmdlet):
|
|||||||
failures += 1
|
failures += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Lean, non-debug status output (the ResultTable search follows after ingest).
|
||||||
|
try:
|
||||||
|
title_text = str(getattr(pipe_obj, "title", "") or "").strip()
|
||||||
|
if not title_text and isinstance(full_metadata, dict):
|
||||||
|
title_text = str(full_metadata.get("title") or "").strip()
|
||||||
|
tags_list = list(getattr(pipe_obj, "tag", None) or [])
|
||||||
|
tags_text = ", ".join(str(t).strip() for t in tags_list if str(t).strip())
|
||||||
|
log(f"Title: {title_text or 'Unknown'}")
|
||||||
|
log(f"Tags: {tags_text}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
temp_dir_to_cleanup = Path(tempfile.mkdtemp(prefix="medios_openlibrary_"))
|
temp_dir_to_cleanup = Path(tempfile.mkdtemp(prefix="medios_openlibrary_"))
|
||||||
sr = SearchResult(
|
sr = SearchResult(
|
||||||
table="openlibrary",
|
table="openlibrary",
|
||||||
@@ -1169,7 +1181,6 @@ class Add_File(Cmdlet):
|
|||||||
from cmdlet.search_store import CMDLET as search_store_cmdlet
|
from cmdlet.search_store import CMDLET as search_store_cmdlet
|
||||||
|
|
||||||
args = ["-store", str(store), f"hash:{str(hash_value)}"]
|
args = ["-store", str(store), f"hash:{str(hash_value)}"]
|
||||||
log(f"[add-file] Refresh: search-store -store {store} \"hash:{hash_value}\"", file=sys.stderr)
|
|
||||||
|
|
||||||
# Run search-store under a temporary stage context so its ctx.emit() calls
|
# Run search-store under a temporary stage context so its ctx.emit() calls
|
||||||
# don't interfere with the outer add-file pipeline stage.
|
# don't interfere with the outer add-file pipeline stage.
|
||||||
@@ -1569,7 +1580,7 @@ class Add_File(Cmdlet):
|
|||||||
auto_search_store: bool = True,
|
auto_search_store: bool = True,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Handle uploading to a registered storage backend (e.g., 'test' folder store, 'hydrus', etc.)."""
|
"""Handle uploading to a registered storage backend (e.g., 'test' folder store, 'hydrus', etc.)."""
|
||||||
log(f"Adding file to storage backend '{backend_name}': {media_path.name}", file=sys.stderr)
|
##log(f"Adding file to storage backend '{backend_name}': {media_path.name}", file=sys.stderr)
|
||||||
|
|
||||||
delete_after_effective = bool(delete_after)
|
delete_after_effective = bool(delete_after)
|
||||||
if not delete_after_effective:
|
if not delete_after_effective:
|
||||||
@@ -1623,7 +1634,7 @@ class Add_File(Cmdlet):
|
|||||||
tag=tags,
|
tag=tags,
|
||||||
url=url
|
url=url
|
||||||
)
|
)
|
||||||
log(f"✓ File added to '{backend_name}': {file_identifier}", file=sys.stderr)
|
##log(f"✓ File added to '{backend_name}': {file_identifier}", file=sys.stderr)
|
||||||
|
|
||||||
stored_path: Optional[str] = None
|
stored_path: Optional[str] = None
|
||||||
# IMPORTANT: avoid calling get_file() for remote backends.
|
# IMPORTANT: avoid calling get_file() for remote backends.
|
||||||
@@ -1892,7 +1903,7 @@ class Add_File(Cmdlet):
|
|||||||
is_temp_merge = "(merged)" in media_path.name or ".dlhx_" in media_path.name
|
is_temp_merge = "(merged)" in media_path.name or ".dlhx_" in media_path.name
|
||||||
|
|
||||||
if delete_source or is_temp_merge:
|
if delete_source or is_temp_merge:
|
||||||
log(f"Deleting source file...", file=sys.stderr)
|
##log(f"Deleting source file...", file=sys.stderr)
|
||||||
try:
|
try:
|
||||||
media_path.unlink()
|
media_path.unlink()
|
||||||
Add_File._cleanup_sidecar_files(media_path)
|
Add_File._cleanup_sidecar_files(media_path)
|
||||||
|
|||||||
@@ -24,9 +24,9 @@ class Add_Note(Cmdlet):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
super().__init__(
|
super().__init__(
|
||||||
name="add-note",
|
name="add-note",
|
||||||
summary="Add or set a named note on a file in a store.",
|
summary="Add file store note",
|
||||||
usage="add-note -store <store> [-hash <sha256>] <name> <text...>",
|
usage="add-note -store <store> [-hash <sha256>] <name> <text...>",
|
||||||
alias=["set-note", "add_note"],
|
alias=[""],
|
||||||
arg=[
|
arg=[
|
||||||
SharedArgs.STORE,
|
SharedArgs.STORE,
|
||||||
SharedArgs.HASH,
|
SharedArgs.HASH,
|
||||||
@@ -34,8 +34,9 @@ class Add_Note(Cmdlet):
|
|||||||
CmdletArg("text", type="string", required=True, description="Note text/content to store.", variadic=True),
|
CmdletArg("text", type="string", required=True, description="Note text/content to store.", variadic=True),
|
||||||
],
|
],
|
||||||
detail=[
|
detail=[
|
||||||
"- Notes are stored via the selected store backend.",
|
"""
|
||||||
"- For lyrics: store LRC text in a note named 'lyric'.",
|
dde
|
||||||
|
"""
|
||||||
],
|
],
|
||||||
exec=self.run,
|
exec=self.run,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,563 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any, Dict, List, Sequence, Optional
|
|
||||||
from pathlib import Path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from SYS.logger import log
|
|
||||||
|
|
||||||
import models
|
|
||||||
import pipeline as ctx
|
|
||||||
from . import _shared as sh
|
|
||||||
|
|
||||||
normalize_result_input = sh.normalize_result_input
|
|
||||||
filter_results_by_temp = sh.filter_results_by_temp
|
|
||||||
Cmdlet = sh.Cmdlet
|
|
||||||
CmdletArg = sh.CmdletArg
|
|
||||||
SharedArgs = sh.SharedArgs
|
|
||||||
normalize_hash = sh.normalize_hash
|
|
||||||
parse_tag_arguments = sh.parse_tag_arguments
|
|
||||||
expand_tag_groups = sh.expand_tag_groups
|
|
||||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
|
||||||
collapse_namespace_tags = sh.collapse_namespace_tags
|
|
||||||
should_show_help = sh.should_show_help
|
|
||||||
get_field = sh.get_field
|
|
||||||
from Store import Store
|
|
||||||
from SYS.utils import sha256_file
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
|
||||||
"""Return the value of the first title: tag if present."""
|
|
||||||
for tag in tags:
|
|
||||||
if isinstance(tag, str) and tag.lower().startswith("title:"):
|
|
||||||
value = tag.split(":", 1)[1].strip()
|
|
||||||
if value:
|
|
||||||
return value
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_item_tags(res: Any) -> List[str]:
|
|
||||||
if isinstance(res, models.PipeObject):
|
|
||||||
raw = getattr(res, "tag", None)
|
|
||||||
elif isinstance(res, dict):
|
|
||||||
raw = res.get("tag")
|
|
||||||
else:
|
|
||||||
raw = None
|
|
||||||
|
|
||||||
if isinstance(raw, list):
|
|
||||||
return [str(t) for t in raw if t is not None]
|
|
||||||
if isinstance(raw, str) and raw.strip():
|
|
||||||
return [raw]
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def _set_item_tags(res: Any, tags: List[str]) -> None:
|
|
||||||
if isinstance(res, models.PipeObject):
|
|
||||||
res.tag = tags
|
|
||||||
elif isinstance(res, dict):
|
|
||||||
res["tag"] = tags
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
|
||||||
"""Update result object/dict title fields and columns in-place."""
|
|
||||||
if not title_value:
|
|
||||||
return
|
|
||||||
if isinstance(res, models.PipeObject):
|
|
||||||
res.title = title_value
|
|
||||||
# Update columns if present (Title column assumed index 0)
|
|
||||||
if hasattr(res, "columns") and isinstance(res.columns, list) and res.columns:
|
|
||||||
label, *_ = res.columns[0]
|
|
||||||
if str(label).lower() == "title":
|
|
||||||
res.columns[0] = (res.columns[0][0], title_value)
|
|
||||||
elif isinstance(res, dict):
|
|
||||||
res["title"] = title_value
|
|
||||||
cols = res.get("columns")
|
|
||||||
if isinstance(cols, list):
|
|
||||||
updated = []
|
|
||||||
changed = False
|
|
||||||
for col in cols:
|
|
||||||
if isinstance(col, tuple) and len(col) == 2:
|
|
||||||
label, val = col
|
|
||||||
if str(label).lower() == "title":
|
|
||||||
updated.append((label, title_value))
|
|
||||||
changed = True
|
|
||||||
else:
|
|
||||||
updated.append(col)
|
|
||||||
else:
|
|
||||||
updated.append(col)
|
|
||||||
if changed:
|
|
||||||
res["columns"] = updated
|
|
||||||
|
|
||||||
|
|
||||||
def _matches_target(item: Any, target_hash: Optional[str], target_path: Optional[str]) -> bool:
|
|
||||||
"""Determine whether a result item refers to the given hash/path target (canonical fields only)."""
|
|
||||||
|
|
||||||
def norm(val: Any) -> Optional[str]:
|
|
||||||
return str(val).lower() if val is not None else None
|
|
||||||
|
|
||||||
target_hash_l = target_hash.lower() if target_hash else None
|
|
||||||
target_path_l = target_path.lower() if target_path else None
|
|
||||||
|
|
||||||
if isinstance(item, dict):
|
|
||||||
hashes = [norm(item.get("hash"))]
|
|
||||||
paths = [norm(item.get("path"))]
|
|
||||||
else:
|
|
||||||
hashes = [norm(get_field(item, "hash"))]
|
|
||||||
paths = [norm(get_field(item, "path"))]
|
|
||||||
|
|
||||||
if target_hash_l and target_hash_l in hashes:
|
|
||||||
return True
|
|
||||||
if target_path_l and target_path_l in paths:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _update_item_title_fields(item: Any, new_title: str) -> None:
|
|
||||||
"""Mutate an item to reflect a new title in plain fields and columns."""
|
|
||||||
if isinstance(item, models.PipeObject):
|
|
||||||
item.title = new_title
|
|
||||||
if hasattr(item, "columns") and isinstance(item.columns, list) and item.columns:
|
|
||||||
label, *_ = item.columns[0]
|
|
||||||
if str(label).lower() == "title":
|
|
||||||
item.columns[0] = (label, new_title)
|
|
||||||
elif isinstance(item, dict):
|
|
||||||
item["title"] = new_title
|
|
||||||
cols = item.get("columns")
|
|
||||||
if isinstance(cols, list):
|
|
||||||
updated_cols = []
|
|
||||||
changed = False
|
|
||||||
for col in cols:
|
|
||||||
if isinstance(col, tuple) and len(col) == 2:
|
|
||||||
label, val = col
|
|
||||||
if str(label).lower() == "title":
|
|
||||||
updated_cols.append((label, new_title))
|
|
||||||
changed = True
|
|
||||||
else:
|
|
||||||
updated_cols.append(col)
|
|
||||||
else:
|
|
||||||
updated_cols.append(col)
|
|
||||||
if changed:
|
|
||||||
item["columns"] = updated_cols
|
|
||||||
|
|
||||||
|
|
||||||
def _refresh_result_table_title(new_title: str, target_hash: Optional[str], target_path: Optional[str]) -> None:
|
|
||||||
"""Refresh the cached result table with an updated title and redisplay it."""
|
|
||||||
try:
|
|
||||||
last_table = ctx.get_last_result_table()
|
|
||||||
items = ctx.get_last_result_items()
|
|
||||||
if not last_table or not items:
|
|
||||||
return
|
|
||||||
|
|
||||||
updated_items = []
|
|
||||||
match_found = False
|
|
||||||
for item in items:
|
|
||||||
try:
|
|
||||||
if _matches_target(item, target_hash, target_path):
|
|
||||||
_update_item_title_fields(item, new_title)
|
|
||||||
match_found = True
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
updated_items.append(item)
|
|
||||||
if not match_found:
|
|
||||||
return
|
|
||||||
|
|
||||||
from result_table import ResultTable # Local import to avoid circular dependency
|
|
||||||
|
|
||||||
new_table = last_table.copy_with_title(getattr(last_table, "title", ""))
|
|
||||||
|
|
||||||
for item in updated_items:
|
|
||||||
new_table.add_result(item)
|
|
||||||
|
|
||||||
# Keep the underlying history intact; update only the overlay so @.. can
|
|
||||||
# clear the overlay then continue back to prior tables (e.g., the search list).
|
|
||||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _refresh_tags_view(res: Any, target_hash: Optional[str], store_name: Optional[str], target_path: Optional[str], config: Dict[str, Any]) -> None:
|
|
||||||
"""Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh."""
|
|
||||||
try:
|
|
||||||
from cmdlet import get_tag as get_tag_cmd # type: ignore
|
|
||||||
except Exception:
|
|
||||||
return
|
|
||||||
|
|
||||||
if not target_hash or not store_name:
|
|
||||||
return
|
|
||||||
|
|
||||||
refresh_args: List[str] = ["-hash", target_hash, "-store", store_name]
|
|
||||||
|
|
||||||
try:
|
|
||||||
subject = ctx.get_last_result_subject()
|
|
||||||
if subject and _matches_target(subject, target_hash, target_path):
|
|
||||||
get_tag_cmd._run(subject, refresh_args, config)
|
|
||||||
return
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
get_tag_cmd._run(res, refresh_args, config)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Add_Tag(Cmdlet):
|
|
||||||
"""Class-based add-tags cmdlet with Cmdlet metadata inheritance."""
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__(
|
|
||||||
name="add-tags",
|
|
||||||
summary="Add tags to a file in a store.",
|
|
||||||
usage="add-tags -store <store> [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...]",
|
|
||||||
arg=[
|
|
||||||
SharedArgs.HASH,
|
|
||||||
SharedArgs.STORE,
|
|
||||||
CmdletArg("-duplicate", type="string", description="Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred)"),
|
|
||||||
CmdletArg("-list", type="string", description="Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult)."),
|
|
||||||
CmdletArg("--all", type="flag", description="Include temporary files in tagging (by default, only tags non-temporary files)."),
|
|
||||||
CmdletArg("tags", type="string", required=False, description="One or more tags to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tags from pipeline payload.", variadic=True),
|
|
||||||
],
|
|
||||||
detail=[
|
|
||||||
"- By default, only tags non-temporary files (from pipelines). Use --all to tag everything.",
|
|
||||||
"- Requires a store backend: use -store or pipe items that include store.",
|
|
||||||
"- If -hash is not provided, uses the piped item's hash (or derives from its path when possible).",
|
|
||||||
"- Multiple tags can be comma-separated or space-separated.",
|
|
||||||
"- Use -list to include predefined tag lists from adjective.json: -list philosophy,occult",
|
|
||||||
"- Tags can also reference lists with curly braces: add-tags {philosophy} \"other:tag\"",
|
|
||||||
"- Use -duplicate to copy EXISTING tag values to new namespaces:",
|
|
||||||
" Explicit format: -duplicate title:album,artist (copies title: to album: and artist:)",
|
|
||||||
" Inferred format: -duplicate title,album,artist (first is source, rest are targets)",
|
|
||||||
"- The source namespace must already exist in the file being tagged.",
|
|
||||||
"- Target namespaces that already have a value are skipped (not overwritten).",
|
|
||||||
"- You can also pass the target hash as a tag token: hash:<sha256>. This overrides -hash and is removed from the tag list.",
|
|
||||||
],
|
|
||||||
exec=self.run,
|
|
||||||
)
|
|
||||||
self.register()
|
|
||||||
|
|
||||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|
||||||
"""Add tags to a file with smart filtering for pipeline results."""
|
|
||||||
if should_show_help(args):
|
|
||||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Parse arguments
|
|
||||||
parsed = parse_cmdlet_args(args, self)
|
|
||||||
|
|
||||||
# Check for --all flag
|
|
||||||
include_temp = parsed.get("all", False)
|
|
||||||
|
|
||||||
# Normalize input to list
|
|
||||||
results = normalize_result_input(result)
|
|
||||||
|
|
||||||
# Filter by temp status (unless --all is set)
|
|
||||||
if not include_temp:
|
|
||||||
results = filter_results_by_temp(results, include_temp=False)
|
|
||||||
|
|
||||||
if not results:
|
|
||||||
log("No valid files to tag (all results were temporary; use --all to include temporary files)", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Get tags from arguments (or fallback to pipeline payload)
|
|
||||||
raw_tags = parsed.get("tags", [])
|
|
||||||
if isinstance(raw_tags, str):
|
|
||||||
raw_tags = [raw_tags]
|
|
||||||
|
|
||||||
# Fallback: if no tags provided explicitly, try to pull from first result payload
|
|
||||||
if not raw_tags and results:
|
|
||||||
first = results[0]
|
|
||||||
payload_tags = None
|
|
||||||
|
|
||||||
# Try multiple tag lookup strategies in order
|
|
||||||
tag_lookups = [
|
|
||||||
lambda x: getattr(x, "tags", None),
|
|
||||||
lambda x: x.get("tags") if isinstance(x, dict) else None,
|
|
||||||
]
|
|
||||||
|
|
||||||
for lookup in tag_lookups:
|
|
||||||
try:
|
|
||||||
payload_tags = lookup(first)
|
|
||||||
if payload_tags:
|
|
||||||
break
|
|
||||||
except (AttributeError, TypeError, KeyError):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if payload_tags:
|
|
||||||
if isinstance(payload_tags, str):
|
|
||||||
raw_tags = [payload_tags]
|
|
||||||
elif isinstance(payload_tags, list):
|
|
||||||
raw_tags = payload_tags
|
|
||||||
|
|
||||||
# Handle -list argument (convert to {list} syntax)
|
|
||||||
list_arg = parsed.get("list")
|
|
||||||
if list_arg:
|
|
||||||
for l in list_arg.split(','):
|
|
||||||
l = l.strip()
|
|
||||||
if l:
|
|
||||||
raw_tags.append(f"{{{l}}}")
|
|
||||||
|
|
||||||
# Parse and expand tags
|
|
||||||
tags_to_add = parse_tag_arguments(raw_tags)
|
|
||||||
tags_to_add = expand_tag_groups(tags_to_add)
|
|
||||||
|
|
||||||
# Allow hash override via namespaced token (e.g., "hash:abcdef...")
|
|
||||||
extracted_hash = None
|
|
||||||
filtered_tags: List[str] = []
|
|
||||||
for tag in tags_to_add:
|
|
||||||
if isinstance(tag, str) and tag.lower().startswith("hash:"):
|
|
||||||
_, _, hash_val = tag.partition(":")
|
|
||||||
if hash_val:
|
|
||||||
extracted_hash = normalize_hash(hash_val.strip())
|
|
||||||
continue
|
|
||||||
filtered_tags.append(tag)
|
|
||||||
tags_to_add = filtered_tags
|
|
||||||
|
|
||||||
if not tags_to_add:
|
|
||||||
log("No tags provided to add", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Get other flags (hash override can come from -hash or hash: token)
|
|
||||||
hash_override = normalize_hash(parsed.get("hash")) or extracted_hash
|
|
||||||
duplicate_arg = parsed.get("duplicate")
|
|
||||||
|
|
||||||
# Tags ARE provided - apply them to each store-backed result
|
|
||||||
total_added = 0
|
|
||||||
total_modified = 0
|
|
||||||
|
|
||||||
store_override = parsed.get("store")
|
|
||||||
|
|
||||||
store_registry = Store(config)
|
|
||||||
|
|
||||||
for res in results:
|
|
||||||
store_name: Optional[str]
|
|
||||||
raw_hash: Optional[str]
|
|
||||||
raw_path: Optional[str]
|
|
||||||
|
|
||||||
if isinstance(res, models.PipeObject):
|
|
||||||
store_name = store_override or res.store
|
|
||||||
raw_hash = res.hash
|
|
||||||
raw_path = res.path
|
|
||||||
elif isinstance(res, dict):
|
|
||||||
store_name = store_override or res.get("store")
|
|
||||||
raw_hash = res.get("hash")
|
|
||||||
raw_path = res.get("path")
|
|
||||||
else:
|
|
||||||
ctx.emit(res)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not store_name:
|
|
||||||
store_name = None
|
|
||||||
|
|
||||||
# If the item isn't in a configured store backend yet (e.g., store=PATH) but has a local file,
|
|
||||||
# treat add-tags as a pipeline mutation (carry tags forward for add-file) instead of a store write.
|
|
||||||
if not store_override:
|
|
||||||
store_name_str = str(store_name) if store_name is not None else ""
|
|
||||||
local_mode_requested = (not store_name_str) or (store_name_str.upper() == "PATH") or (store_name_str.lower() == "local")
|
|
||||||
is_known_backend = bool(store_name_str) and store_registry.is_available(store_name_str)
|
|
||||||
|
|
||||||
if local_mode_requested and raw_path:
|
|
||||||
try:
|
|
||||||
if Path(str(raw_path)).expanduser().exists():
|
|
||||||
existing_tags_list = _extract_item_tags(res)
|
|
||||||
existing_lower = {t.lower() for t in existing_tags_list if isinstance(t, str)}
|
|
||||||
|
|
||||||
item_tags_to_add = list(tags_to_add)
|
|
||||||
item_tags_to_add = collapse_namespace_tags(item_tags_to_add, "title", prefer="last")
|
|
||||||
|
|
||||||
if duplicate_arg:
|
|
||||||
parts = str(duplicate_arg).split(':')
|
|
||||||
source_ns = ""
|
|
||||||
targets: list[str] = []
|
|
||||||
|
|
||||||
if len(parts) > 1:
|
|
||||||
source_ns = parts[0]
|
|
||||||
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
|
|
||||||
else:
|
|
||||||
parts2 = str(duplicate_arg).split(',')
|
|
||||||
if len(parts2) > 1:
|
|
||||||
source_ns = parts2[0]
|
|
||||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
|
||||||
|
|
||||||
if source_ns and targets:
|
|
||||||
source_prefix = source_ns.lower() + ":"
|
|
||||||
for t in existing_tags_list:
|
|
||||||
if not t.lower().startswith(source_prefix):
|
|
||||||
continue
|
|
||||||
value = t.split(":", 1)[1]
|
|
||||||
for target_ns in targets:
|
|
||||||
new_tag = f"{target_ns}:{value}"
|
|
||||||
if new_tag.lower() not in existing_lower:
|
|
||||||
item_tags_to_add.append(new_tag)
|
|
||||||
|
|
||||||
removed_namespace_tags: list[str] = []
|
|
||||||
for new_tag in item_tags_to_add:
|
|
||||||
if not isinstance(new_tag, str) or ":" not in new_tag:
|
|
||||||
continue
|
|
||||||
ns = new_tag.split(":", 1)[0].strip()
|
|
||||||
if not ns:
|
|
||||||
continue
|
|
||||||
ns_prefix = ns.lower() + ":"
|
|
||||||
for t in existing_tags_list:
|
|
||||||
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
|
|
||||||
removed_namespace_tags.append(t)
|
|
||||||
removed_namespace_tags = sorted({t for t in removed_namespace_tags})
|
|
||||||
|
|
||||||
actual_tags_to_add = [
|
|
||||||
t
|
|
||||||
for t in item_tags_to_add
|
|
||||||
if isinstance(t, str) and t.lower() not in existing_lower
|
|
||||||
]
|
|
||||||
|
|
||||||
updated_tags_list = [t for t in existing_tags_list if t not in removed_namespace_tags]
|
|
||||||
updated_tags_list.extend(actual_tags_to_add)
|
|
||||||
|
|
||||||
_set_item_tags(res, updated_tags_list)
|
|
||||||
final_title = _extract_title_tag(updated_tags_list)
|
|
||||||
_apply_title_to_result(res, final_title)
|
|
||||||
|
|
||||||
total_added += len(actual_tags_to_add)
|
|
||||||
total_modified += 1 if (removed_namespace_tags or actual_tags_to_add) else 0
|
|
||||||
|
|
||||||
ctx.emit(res)
|
|
||||||
continue
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if local_mode_requested:
|
|
||||||
log("[add_tags] Error: Missing usable local path for tagging (or provide -store)", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if store_name_str and not is_known_backend:
|
|
||||||
log(f"[add_tags] Error: Unknown store '{store_name_str}'. Available: {store_registry.list_backends()}", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
resolved_hash = normalize_hash(hash_override) if hash_override else normalize_hash(raw_hash)
|
|
||||||
if not resolved_hash and raw_path:
|
|
||||||
try:
|
|
||||||
p = Path(str(raw_path))
|
|
||||||
stem = p.stem
|
|
||||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
|
||||||
resolved_hash = stem.lower()
|
|
||||||
elif p.exists() and p.is_file():
|
|
||||||
resolved_hash = sha256_file(p)
|
|
||||||
except Exception:
|
|
||||||
resolved_hash = None
|
|
||||||
|
|
||||||
if not resolved_hash:
|
|
||||||
log("[add_tags] Warning: Item missing usable hash (and could not derive from path); skipping", file=sys.stderr)
|
|
||||||
ctx.emit(res)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
backend = store_registry[str(store_name)]
|
|
||||||
except Exception as exc:
|
|
||||||
log(f"[add_tags] Error: Unknown store '{store_name}': {exc}", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
existing_tags, _src = backend.get_tag(resolved_hash, config=config)
|
|
||||||
except Exception:
|
|
||||||
existing_tags = []
|
|
||||||
|
|
||||||
existing_tags_list = [t for t in (existing_tags or []) if isinstance(t, str)]
|
|
||||||
existing_lower = {t.lower() for t in existing_tags_list}
|
|
||||||
original_title = _extract_title_tag(existing_tags_list)
|
|
||||||
|
|
||||||
# Per-item tag list (do not mutate shared list)
|
|
||||||
item_tags_to_add = list(tags_to_add)
|
|
||||||
item_tags_to_add = collapse_namespace_tags(item_tags_to_add, "title", prefer="last")
|
|
||||||
|
|
||||||
# Handle -duplicate logic (copy existing tags to new namespaces)
|
|
||||||
if duplicate_arg:
|
|
||||||
parts = str(duplicate_arg).split(':')
|
|
||||||
source_ns = ""
|
|
||||||
targets: list[str] = []
|
|
||||||
|
|
||||||
if len(parts) > 1:
|
|
||||||
source_ns = parts[0]
|
|
||||||
targets = [t.strip() for t in parts[1].split(',') if t.strip()]
|
|
||||||
else:
|
|
||||||
parts2 = str(duplicate_arg).split(',')
|
|
||||||
if len(parts2) > 1:
|
|
||||||
source_ns = parts2[0]
|
|
||||||
targets = [t.strip() for t in parts2[1:] if t.strip()]
|
|
||||||
|
|
||||||
if source_ns and targets:
|
|
||||||
source_prefix = source_ns.lower() + ":"
|
|
||||||
for t in existing_tags_list:
|
|
||||||
if not t.lower().startswith(source_prefix):
|
|
||||||
continue
|
|
||||||
value = t.split(":", 1)[1]
|
|
||||||
for target_ns in targets:
|
|
||||||
new_tag = f"{target_ns}:{value}"
|
|
||||||
if new_tag.lower() not in existing_lower:
|
|
||||||
item_tags_to_add.append(new_tag)
|
|
||||||
|
|
||||||
# Namespace replacement: delete old namespace:* when adding namespace:value
|
|
||||||
removed_namespace_tags: list[str] = []
|
|
||||||
for new_tag in item_tags_to_add:
|
|
||||||
if not isinstance(new_tag, str) or ":" not in new_tag:
|
|
||||||
continue
|
|
||||||
ns = new_tag.split(":", 1)[0].strip()
|
|
||||||
if not ns:
|
|
||||||
continue
|
|
||||||
ns_prefix = ns.lower() + ":"
|
|
||||||
for t in existing_tags_list:
|
|
||||||
if t.lower().startswith(ns_prefix) and t.lower() != new_tag.lower():
|
|
||||||
removed_namespace_tags.append(t)
|
|
||||||
|
|
||||||
removed_namespace_tags = sorted({t for t in removed_namespace_tags})
|
|
||||||
|
|
||||||
actual_tags_to_add = [t for t in item_tags_to_add if isinstance(t, str) and t.lower() not in existing_lower]
|
|
||||||
|
|
||||||
changed = False
|
|
||||||
if removed_namespace_tags:
|
|
||||||
try:
|
|
||||||
backend.delete_tag(resolved_hash, removed_namespace_tags, config=config)
|
|
||||||
changed = True
|
|
||||||
except Exception as exc:
|
|
||||||
log(f"[add_tags] Warning: Failed deleting namespace tags: {exc}", file=sys.stderr)
|
|
||||||
|
|
||||||
if actual_tags_to_add:
|
|
||||||
try:
|
|
||||||
backend.add_tag(resolved_hash, actual_tags_to_add, config=config)
|
|
||||||
changed = True
|
|
||||||
except Exception as exc:
|
|
||||||
log(f"[add_tags] Warning: Failed adding tags: {exc}", file=sys.stderr)
|
|
||||||
|
|
||||||
if changed:
|
|
||||||
total_added += len(actual_tags_to_add)
|
|
||||||
total_modified += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
refreshed_tags, _src2 = backend.get_tag(resolved_hash, config=config)
|
|
||||||
refreshed_list = [t for t in (refreshed_tags or []) if isinstance(t, str)]
|
|
||||||
except Exception:
|
|
||||||
refreshed_list = existing_tags_list
|
|
||||||
|
|
||||||
# Update the result's tags using canonical field
|
|
||||||
if isinstance(res, models.PipeObject):
|
|
||||||
res.tags = refreshed_list
|
|
||||||
elif isinstance(res, dict):
|
|
||||||
res["tags"] = refreshed_list
|
|
||||||
|
|
||||||
final_title = _extract_title_tag(refreshed_list)
|
|
||||||
_apply_title_to_result(res, final_title)
|
|
||||||
|
|
||||||
if final_title and (not original_title or final_title.lower() != original_title.lower()):
|
|
||||||
_refresh_result_table_title(final_title, resolved_hash, raw_path)
|
|
||||||
|
|
||||||
if changed:
|
|
||||||
_refresh_tags_view(res, resolved_hash, str(store_name), raw_path, config)
|
|
||||||
|
|
||||||
ctx.emit(res)
|
|
||||||
|
|
||||||
log(
|
|
||||||
f"[add_tags] Added {total_added} new tag(s) across {len(results)} item(s); modified {total_modified} item(s)",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
CMDLET = Add_Tag()
|
|
||||||
@@ -695,6 +695,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
|
|
||||||
pipe_obj = create_pipe_object_result(
|
pipe_obj = create_pipe_object_result(
|
||||||
source='screenshot',
|
source='screenshot',
|
||||||
|
store='PATH',
|
||||||
identifier=Path(screenshot_result.path).stem,
|
identifier=Path(screenshot_result.path).stem,
|
||||||
file_path=str(screenshot_result.path),
|
file_path=str(screenshot_result.path),
|
||||||
cmdlet_name='screen-shot',
|
cmdlet_name='screen-shot',
|
||||||
|
|||||||
@@ -238,7 +238,7 @@ class Search_Provider(Cmdlet):
|
|||||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||||
db.update_worker_status(worker_id, 'completed')
|
db.update_worker_status(worker_id, 'completed')
|
||||||
|
|
||||||
log(f"Found {len(results)} result(s) from {provider_name}", file=sys.stderr)
|
##log(f"Found {len(results)} result(s) from {provider_name}", file=sys.stderr)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Dict, Iterator, Optional
|
from typing import Any, Dict, Iterator, Optional
|
||||||
|
|
||||||
@@ -70,6 +71,7 @@ class PlaywrightTool:
|
|||||||
|
|
||||||
def _load_defaults(self) -> PlaywrightDefaults:
|
def _load_defaults(self) -> PlaywrightDefaults:
|
||||||
cfg = self._config
|
cfg = self._config
|
||||||
|
defaults = PlaywrightDefaults()
|
||||||
tool_block = _get_nested(cfg, "tool", "playwright")
|
tool_block = _get_nested(cfg, "tool", "playwright")
|
||||||
if not isinstance(tool_block, dict):
|
if not isinstance(tool_block, dict):
|
||||||
tool_block = {}
|
tool_block = {}
|
||||||
@@ -87,14 +89,14 @@ class PlaywrightTool:
|
|||||||
val = _get_nested(cfg, "playwright", name)
|
val = _get_nested(cfg, "playwright", name)
|
||||||
return fallback if val is None else val
|
return fallback if val is None else val
|
||||||
|
|
||||||
browser = str(_get("browser", PlaywrightDefaults.browser)).strip().lower() or "chromium"
|
browser = str(_get("browser", defaults.browser)).strip().lower() or "chromium"
|
||||||
if browser not in {"chromium", "firefox", "webkit"}:
|
if browser not in {"chromium", "firefox", "webkit"}:
|
||||||
browser = "chromium"
|
browser = "chromium"
|
||||||
|
|
||||||
headless_raw = _get("headless", PlaywrightDefaults.headless)
|
headless_raw = _get("headless", defaults.headless)
|
||||||
headless = bool(headless_raw)
|
headless = bool(headless_raw)
|
||||||
|
|
||||||
ua = str(_get("user_agent", PlaywrightDefaults.user_agent))
|
ua = str(_get("user_agent", defaults.user_agent))
|
||||||
|
|
||||||
def _int(name: str, fallback: int) -> int:
|
def _int(name: str, fallback: int) -> int:
|
||||||
raw = _get(name, fallback)
|
raw = _get(name, fallback)
|
||||||
@@ -103,11 +105,11 @@ class PlaywrightTool:
|
|||||||
except Exception:
|
except Exception:
|
||||||
return fallback
|
return fallback
|
||||||
|
|
||||||
vw = _int("viewport_width", PlaywrightDefaults.viewport_width)
|
vw = _int("viewport_width", defaults.viewport_width)
|
||||||
vh = _int("viewport_height", PlaywrightDefaults.viewport_height)
|
vh = _int("viewport_height", defaults.viewport_height)
|
||||||
nav_timeout = _int("navigation_timeout_ms", PlaywrightDefaults.navigation_timeout_ms)
|
nav_timeout = _int("navigation_timeout_ms", defaults.navigation_timeout_ms)
|
||||||
|
|
||||||
ignore_https = bool(_get("ignore_https_errors", PlaywrightDefaults.ignore_https_errors))
|
ignore_https = bool(_get("ignore_https_errors", defaults.ignore_https_errors))
|
||||||
|
|
||||||
return PlaywrightDefaults(
|
return PlaywrightDefaults(
|
||||||
browser=browser,
|
browser=browser,
|
||||||
@@ -128,6 +130,7 @@ class PlaywrightTool:
|
|||||||
f"detail: {detail}"
|
f"detail: {detail}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
def open_page(
|
def open_page(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
|
|||||||
Reference in New Issue
Block a user