f
This commit is contained in:
@@ -2751,58 +2751,8 @@ def register_url_with_local_library(
|
||||
Returns:
|
||||
True if url were registered, False otherwise
|
||||
"""
|
||||
|
||||
try:
|
||||
from SYS.config import get_local_storage_path
|
||||
from API.folder import API_folder_store
|
||||
|
||||
file_path = get_field(pipe_obj, "path")
|
||||
url_field = get_field(pipe_obj, "url", [])
|
||||
urls: List[str] = []
|
||||
if isinstance(url_field, str):
|
||||
urls = [u.strip() for u in url_field.split(",") if u.strip()]
|
||||
elif isinstance(url_field, (list, tuple)):
|
||||
urls = [u for u in url_field if isinstance(u, str) and u.strip()]
|
||||
|
||||
if not file_path or not urls:
|
||||
return False
|
||||
|
||||
path_obj = Path(file_path)
|
||||
if not path_obj.exists():
|
||||
return False
|
||||
|
||||
storage_path = get_local_storage_path(config)
|
||||
if not storage_path:
|
||||
return False
|
||||
|
||||
# Optimization: Don't open DB if file isn't in library root
|
||||
try:
|
||||
path_obj.resolve().relative_to(Path(storage_path).resolve())
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
with API_folder_store(storage_path) as db:
|
||||
file_hash = db.get_file_hash(path_obj)
|
||||
if not file_hash:
|
||||
return False
|
||||
metadata = db.get_metadata(file_hash) or {}
|
||||
existing_url = metadata.get("url") or []
|
||||
|
||||
# Add any new url
|
||||
changed = False
|
||||
for u in urls:
|
||||
if u not in existing_url:
|
||||
existing_url.append(u)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
metadata["url"] = existing_url
|
||||
db.save_metadata(path_obj, metadata)
|
||||
return True
|
||||
|
||||
return True # url already existed
|
||||
except Exception:
|
||||
return False
|
||||
# Folder store removed; local library URL registration is disabled.
|
||||
return False
|
||||
|
||||
|
||||
def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
|
||||
|
||||
@@ -7,7 +7,6 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
from SYS.logger import debug, log
|
||||
from Store.Folder import Folder
|
||||
from Store import Store
|
||||
from . import _shared as sh
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
@@ -280,61 +279,23 @@ class Delete_File(sh.Cmdlet):
|
||||
except Exception:
|
||||
size_bytes = None
|
||||
|
||||
# If lib_root is provided and this is from a folder store, use the Folder class
|
||||
if lib_root:
|
||||
try:
|
||||
folder = Folder(Path(lib_root), name=store or "local")
|
||||
if folder.delete_file(str(path)):
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item() or path.suffix.lstrip("."),
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
debug(f"Folder.delete_file failed: {exc}", file=sys.stderr)
|
||||
# Fallback to manual deletion
|
||||
try:
|
||||
if path.exists() and path.is_file():
|
||||
path.unlink()
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem)
|
||||
or "",
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item()
|
||||
or path.suffix.lstrip("."),
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Local delete failed: {exc}", file=sys.stderr)
|
||||
else:
|
||||
# No lib_root, just delete the file
|
||||
try:
|
||||
if path.exists() and path.is_file():
|
||||
path.unlink()
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item() or path.suffix.lstrip("."),
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Local delete failed: {exc}", file=sys.stderr)
|
||||
# Delete the local file directly
|
||||
try:
|
||||
if path.exists() and path.is_file():
|
||||
path.unlink()
|
||||
local_deleted = True
|
||||
deleted_rows.append(
|
||||
{
|
||||
"title":
|
||||
str(title_val).strip() if title_val else path.name,
|
||||
"store": store_label,
|
||||
"hash": hash_hex or sh.normalize_hash(path.stem) or "",
|
||||
"size_bytes": size_bytes,
|
||||
"ext": _get_ext_from_item() or path.suffix.lstrip("."),
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Local delete failed: {exc}", file=sys.stderr)
|
||||
|
||||
# Remove common sidecars regardless of file removal success
|
||||
for sidecar in (
|
||||
@@ -533,24 +494,6 @@ class Delete_File(sh.Cmdlet):
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# If no lib_root provided, try to get the first folder store from config
|
||||
if not lib_root:
|
||||
try:
|
||||
storage_config = config.get("storage",
|
||||
{})
|
||||
folder_config = storage_config.get("folder",
|
||||
{})
|
||||
if folder_config:
|
||||
# Get first folder store path
|
||||
for store_name, store_config in folder_config.items():
|
||||
if isinstance(store_config, dict):
|
||||
path = store_config.get("path")
|
||||
if path:
|
||||
lib_root = path
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
reason = " ".join(token for token in reason_tokens
|
||||
if str(token).strip()).strip()
|
||||
|
||||
|
||||
@@ -175,27 +175,6 @@ class Get_File(sh.Cmdlet):
|
||||
log(f"Error: Backend could not retrieve file for hash {file_hash}")
|
||||
return 1
|
||||
|
||||
# Folder store UX: without -path, just open the file in the default app.
|
||||
# Only export/copy when -path is explicitly provided.
|
||||
backend_name = type(backend).__name__
|
||||
is_folder_backend = backend_name.lower() == "folder"
|
||||
if is_folder_backend and not output_path:
|
||||
display_title = resolve_display_title() or source_path.stem or "Opened"
|
||||
ext_for_emit = metadata.get("ext") or source_path.suffix.lstrip(".")
|
||||
self._open_file_default(source_path)
|
||||
log(f"Opened: {source_path}", file=sys.stderr)
|
||||
ctx.emit(
|
||||
{
|
||||
"hash": file_hash,
|
||||
"store": store_name,
|
||||
"path": str(source_path),
|
||||
"title": str(display_title),
|
||||
"ext": str(ext_for_emit or ""),
|
||||
}
|
||||
)
|
||||
debug("[get-file] Completed successfully")
|
||||
return 0
|
||||
|
||||
# Otherwise: export/copy to output_dir.
|
||||
if output_path:
|
||||
output_dir = Path(output_path).expanduser()
|
||||
|
||||
@@ -2,7 +2,6 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, Optional
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from SYS.logger import log
|
||||
|
||||
@@ -19,12 +18,11 @@ get_hash_for_operation = sh.get_hash_for_operation
|
||||
fetch_hydrus_metadata = sh.fetch_hydrus_metadata
|
||||
should_show_help = sh.should_show_help
|
||||
get_field = sh.get_field
|
||||
from API.folder import API_folder_store
|
||||
from Store import Store
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="get-relationship",
|
||||
summary="Print relationships for the selected file (Hydrus or Local).",
|
||||
summary="Print relationships for the selected file (Hydrus).",
|
||||
usage='get-relationship [-query "hash:<sha256>"]',
|
||||
alias=[],
|
||||
arg=[
|
||||
@@ -32,155 +30,12 @@ CMDLET = Cmdlet(
|
||||
SharedArgs.STORE,
|
||||
],
|
||||
detail=[
|
||||
"- Lists relationship data as returned by Hydrus or Local DB.",
|
||||
"- Lists relationship data as returned by Hydrus.",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
if should_show_help(_args):
|
||||
log(f"Cmdlet: {CMDLET.name}\nSummary: {CMDLET.summary}\nUsage: {CMDLET.usage}")
|
||||
return 0
|
||||
|
||||
# Parse -query and -store override
|
||||
override_query: str | None = None
|
||||
override_store: str | None = None
|
||||
args_list = list(_args)
|
||||
i = 0
|
||||
while i < len(args_list):
|
||||
a = args_list[i]
|
||||
low = str(a).lower()
|
||||
if low in {"-query",
|
||||
"--query",
|
||||
"query"} and i + 1 < len(args_list):
|
||||
override_query = str(args_list[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-store",
|
||||
"--store",
|
||||
"store"} and i + 1 < len(args_list):
|
||||
override_store = str(args_list[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
i += 1
|
||||
|
||||
override_hash: str | None = (
|
||||
sh.parse_single_hash_query(override_query) if override_query else None
|
||||
)
|
||||
if override_query and not override_hash:
|
||||
log('get-relationship requires -query "hash:<sha256>"', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Handle @N selection which creates a list
|
||||
# This cmdlet is single-subject; require disambiguation when multiple items are provided.
|
||||
if isinstance(result, list):
|
||||
if len(result) == 0:
|
||||
result = None
|
||||
elif len(result) > 1 and not override_hash:
|
||||
log(
|
||||
'get-relationship expects a single item; select one row (e.g. @1) or pass -query "hash:<sha256>"',
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
result = result[0]
|
||||
|
||||
# Initialize results collection
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, store}
|
||||
source_title = "Unknown"
|
||||
|
||||
def _add_relationship(entry: Dict[str, Any]) -> None:
|
||||
"""Add relationship if not already present by hash or path."""
|
||||
for existing in found_relationships:
|
||||
if (entry.get("hash")
|
||||
and str(existing.get("hash",
|
||||
"")).lower() == str(entry["hash"]).lower()):
|
||||
return
|
||||
if (entry.get("path")
|
||||
and str(existing.get("path",
|
||||
"")).lower() == str(entry["path"]).lower()):
|
||||
return
|
||||
found_relationships.append(entry)
|
||||
|
||||
# Store/hash-first subject resolution
|
||||
store_name: Optional[str] = override_store
|
||||
if not store_name:
|
||||
store_name = get_field(result, "store")
|
||||
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash else normalize_hash(get_hash_for_operation(None,
|
||||
result))
|
||||
)
|
||||
|
||||
if not source_title or source_title == "Unknown":
|
||||
source_title = (
|
||||
get_field(result,
|
||||
"title") or get_field(result,
|
||||
"name")
|
||||
or (hash_hex[:16] + "..." if hash_hex else "Unknown")
|
||||
)
|
||||
|
||||
local_db_checked = False
|
||||
|
||||
if store_name and hash_hex:
|
||||
try:
|
||||
store = Store(config)
|
||||
backend = store[str(store_name)]
|
||||
|
||||
# Folder store relationships
|
||||
# IMPORTANT: only treat the Folder backend as a local DB store.
|
||||
# Other backends may expose a location() method but are not SQLite folder stores.
|
||||
if (type(backend).__name__ == "Folder" and hasattr(backend,
|
||||
"location")
|
||||
and callable(getattr(backend,
|
||||
"location"))):
|
||||
storage_path = Path(str(backend.location()))
|
||||
with API_folder_store(storage_path) as db:
|
||||
local_db_checked = True
|
||||
|
||||
# Update source title from tags if possible
|
||||
try:
|
||||
tags = db.get_tags(hash_hex)
|
||||
for t in tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
source_title = t[6:].strip()
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
metadata = db.get_metadata(hash_hex)
|
||||
rels = (metadata or {}).get("relationships")
|
||||
king_hashes: list[str] = []
|
||||
|
||||
# Forward relationships
|
||||
if isinstance(rels, dict):
|
||||
for rel_type, hashes in rels.items():
|
||||
if not isinstance(hashes, list):
|
||||
continue
|
||||
for related_hash in hashes:
|
||||
related_hash = normalize_hash(str(related_hash))
|
||||
if not related_hash or related_hash == hash_hex:
|
||||
continue
|
||||
|
||||
entry_type = (
|
||||
"king" if str(rel_type).lower() == "alt" else
|
||||
str(rel_type)
|
||||
)
|
||||
if entry_type == "king":
|
||||
king_hashes.append(related_hash)
|
||||
|
||||
related_title = related_hash[:16] + "..."
|
||||
try:
|
||||
rel_tags = db.get_tags(related_hash)
|
||||
for t in rel_tags:
|
||||
if isinstance(
|
||||
t,
|
||||
str) and t.lower().startswith("title:"):
|
||||
related_title = t[6:].strip()
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_add_relationship(
|
||||
@@ -270,10 +125,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as e:
|
||||
log(f"Error checking store relationships: {e}", file=sys.stderr)
|
||||
|
||||
# If we found local relationships, we can stop or merge with Hydrus?
|
||||
# For now, if we found local ones, let's show them.
|
||||
# But if the file is also in Hydrus, we might want those too.
|
||||
# Let's try Hydrus if we have a hash.
|
||||
# Fetch Hydrus relationships if we have a hash.
|
||||
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
@@ -281,7 +133,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
result))
|
||||
)
|
||||
|
||||
if hash_hex and not local_db_checked:
|
||||
if hash_hex:
|
||||
try:
|
||||
client = None
|
||||
store_label = "hydrus"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""search-file cmdlet: Search for files in storage backends (Folder, Hydrus)."""
|
||||
"""search-file cmdlet: Search for files in storage backends (Hydrus)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,12 +11,12 @@ import sys
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from ProviderCore.registry import get_search_provider, list_search_providers
|
||||
from SYS.config import get_local_storage_path
|
||||
from SYS.rich_display import (
|
||||
show_provider_config_panel,
|
||||
show_store_config_panel,
|
||||
show_available_providers_panel,
|
||||
)
|
||||
from SYS.database import insert_worker, update_worker, append_worker_stdout
|
||||
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
@@ -32,17 +32,52 @@ from SYS import pipeline as ctx
|
||||
|
||||
STORAGE_ORIGINS = {"local",
|
||||
"hydrus",
|
||||
"folder",
|
||||
"zerotier"}
|
||||
|
||||
|
||||
class _WorkerLogger:
|
||||
def __init__(self, worker_id: str) -> None:
|
||||
self.worker_id = worker_id
|
||||
|
||||
def __enter__(self) -> "_WorkerLogger":
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb) -> None: # type: ignore[override]
|
||||
return None
|
||||
|
||||
def insert_worker(
|
||||
self,
|
||||
worker_id: str,
|
||||
worker_type: str,
|
||||
title: str = "",
|
||||
description: str = "",
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
try:
|
||||
insert_worker(worker_id, worker_type, title=title, description=description)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def update_worker_status(self, worker_id: str, status: str) -> None:
|
||||
try:
|
||||
update_worker(worker_id, status=status)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def append_worker_stdout(self, worker_id: str, content: str) -> None:
|
||||
try:
|
||||
append_worker_stdout(worker_id, content)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class search_file(Cmdlet):
|
||||
"""Class-based search-file cmdlet for searching storage backends."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
name="search-file",
|
||||
summary="Search storage backends (Folder, Hydrus) or external providers (via -provider).",
|
||||
summary="Search storage backends (Hydrus) or external providers (via -provider).",
|
||||
usage="search-file [-query <query>] [-store BACKEND] [-limit N] [-provider NAME]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
@@ -65,7 +100,7 @@ class search_file(Cmdlet):
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
"Search across storage backends: Folder stores and Hydrus instances",
|
||||
"Search across storage backends: Hydrus instances",
|
||||
"Use -store to search a specific backend by name",
|
||||
"URL search: url:* (any URL) or url:<value> (URL substring)",
|
||||
"Extension search: ext:<value> (e.g., ext:png)",
|
||||
@@ -74,12 +109,12 @@ class search_file(Cmdlet):
|
||||
"Examples:",
|
||||
"search-file -query foo # Search all storage backends",
|
||||
"search-file -store home -query '*' # Search 'home' Hydrus instance",
|
||||
"search-file -store test -query 'video' # Search 'test' folder store",
|
||||
"search-file -store home -query 'video' # Search 'home' Hydrus instance",
|
||||
"search-file -query 'hash:deadbeef...' # Search by SHA256 hash",
|
||||
"search-file -query 'url:*' # Files that have any URL",
|
||||
"search-file -query 'url:youtube.com' # Files whose URL contains substring",
|
||||
"search-file -query 'ext:png' # Files whose metadata ext is png",
|
||||
"search-file -query 'system:filetype = png' # Hydrus: native; Folder: maps to metadata.ext",
|
||||
"search-file -query 'system:filetype = png' # Hydrus: native",
|
||||
"",
|
||||
"Provider search (-provider):",
|
||||
"search-file -provider youtube 'tutorial' # Search YouTube provider",
|
||||
@@ -210,49 +245,15 @@ class search_file(Cmdlet):
|
||||
return 1
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
library_root = get_local_storage_path(config or {}) if get_local_storage_path else None
|
||||
|
||||
if not library_root:
|
||||
try:
|
||||
from Store.registry import get_backend_instance
|
||||
# Try the first configured folder backend without instantiating all backends
|
||||
store_cfg = (config or {}).get("store") or {}
|
||||
folder_cfg = None
|
||||
for raw_store_type, instances in store_cfg.items():
|
||||
if _normalize_store_type(str(raw_store_type)) == "folder":
|
||||
folder_cfg = instances
|
||||
break
|
||||
if isinstance(folder_cfg, dict):
|
||||
for instance_name, instance_config in folder_cfg.items():
|
||||
try:
|
||||
backend = get_backend_instance(config, instance_name, suppress_debug=True)
|
||||
if backend and type(backend).__name__ == "Folder":
|
||||
library_root = expand_path(getattr(backend, "_location", None))
|
||||
if library_root:
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
db = None
|
||||
# Disable Folder DB usage for "external" searches when not using a folder store
|
||||
# db = None
|
||||
if library_root and False: # Disabled to prevent 'database is locked' errors during external searches
|
||||
try:
|
||||
from API.folder import API_folder_store
|
||||
|
||||
db = API_folder_store(library_root)
|
||||
db.__enter__()
|
||||
db.insert_worker(
|
||||
worker_id,
|
||||
"search-file",
|
||||
title=f"Search: {query}",
|
||||
description=f"Provider: {provider_name}, Query: {query}",
|
||||
pipe=ctx.get_current_command_text(),
|
||||
)
|
||||
except Exception:
|
||||
db = None
|
||||
try:
|
||||
insert_worker(
|
||||
worker_id,
|
||||
"search-file",
|
||||
title=f"Search: {query}",
|
||||
description=f"Provider: {provider_name}, Query: {query}",
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
results_list: List[Dict[str, Any]] = []
|
||||
@@ -381,9 +382,11 @@ class search_file(Cmdlet):
|
||||
|
||||
if not results:
|
||||
log(f"No results found for query: {query}", file=sys.stderr)
|
||||
if db is not None:
|
||||
db.append_worker_stdout(worker_id, json.dumps([], indent=2))
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
try:
|
||||
append_worker_stdout(worker_id, json.dumps([], indent=2))
|
||||
update_worker(worker_id, status="completed")
|
||||
except Exception:
|
||||
pass
|
||||
return 0
|
||||
|
||||
for search_result in results:
|
||||
@@ -415,9 +418,11 @@ class search_file(Cmdlet):
|
||||
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
if db is not None:
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
db.update_worker_status(worker_id, "completed")
|
||||
try:
|
||||
append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
update_worker(worker_id, status="completed")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
@@ -426,18 +431,11 @@ class search_file(Cmdlet):
|
||||
import traceback
|
||||
|
||||
debug(traceback.format_exc())
|
||||
if db is not None:
|
||||
try:
|
||||
db.update_worker_status(worker_id, "error")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
update_worker(worker_id, status="error")
|
||||
except Exception:
|
||||
pass
|
||||
return 1
|
||||
finally:
|
||||
if db is not None:
|
||||
try:
|
||||
db.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- Execution ------------------------------------------------------
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
@@ -591,37 +589,12 @@ class search_file(Cmdlet):
|
||||
log("Provide a search query", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
from API.folder import API_folder_store
|
||||
|
||||
worker_id = str(uuid.uuid4())
|
||||
|
||||
from Store import Store
|
||||
storage_registry = Store(config=config or {})
|
||||
|
||||
library_root = get_local_storage_path(config or {})
|
||||
if not library_root:
|
||||
# Fallback for search-file: if no global folder path is found,
|
||||
# try to use the specific backend mentioned in -store or the first available folder backend.
|
||||
if storage_backend:
|
||||
try:
|
||||
backend = storage_registry[storage_backend]
|
||||
if backend and type(backend).__name__ == "Folder":
|
||||
library_root = expand_path(getattr(backend, "_location", None))
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Try all backends until we find a Folder one
|
||||
for name in storage_registry.list_backends():
|
||||
try:
|
||||
backend = storage_registry[name]
|
||||
if type(backend).__name__ == "Folder":
|
||||
library_root = expand_path(getattr(backend, "_location", None))
|
||||
if library_root:
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not library_root:
|
||||
if not storage_registry.list_backends():
|
||||
# Internal refreshes should not trigger config panels or stop progress.
|
||||
if "-internal-refresh" in args_list:
|
||||
return 1
|
||||
@@ -635,11 +608,11 @@ class search_file(Cmdlet):
|
||||
progress.stop()
|
||||
except Exception:
|
||||
pass
|
||||
show_store_config_panel(["Folder Store"])
|
||||
show_store_config_panel(["Hydrus Network"])
|
||||
return 1
|
||||
|
||||
# Use context manager to ensure database is always closed
|
||||
with API_folder_store(library_root) as db:
|
||||
# Use a lightweight worker logger to track search results in the central DB
|
||||
with _WorkerLogger(worker_id) as db:
|
||||
try:
|
||||
if "-internal-refresh" not in args_list:
|
||||
db.insert_worker(
|
||||
@@ -713,18 +686,7 @@ class search_file(Cmdlet):
|
||||
|
||||
# Resolve a path/URL string if possible
|
||||
path_str: Optional[str] = None
|
||||
# IMPORTANT: avoid calling get_file() for remote backends.
|
||||
# For Hydrus, get_file() returns a browser URL (and may include access keys),
|
||||
# which should not be pulled during search/refresh.
|
||||
try:
|
||||
if type(resolved_backend).__name__ == "Folder":
|
||||
maybe_path = resolved_backend.get_file(h)
|
||||
if isinstance(maybe_path, Path):
|
||||
path_str = str(maybe_path)
|
||||
elif isinstance(maybe_path, str) and maybe_path:
|
||||
path_str = maybe_path
|
||||
except Exception:
|
||||
path_str = None
|
||||
# Avoid calling get_file() for remote backends during search/refresh.
|
||||
|
||||
meta_obj: Dict[str,
|
||||
Any] = {}
|
||||
|
||||
Reference in New Issue
Block a user