This commit is contained in:
nose
2025-11-27 18:35:06 -08:00
parent 9eff65d1af
commit ed417c8200
6 changed files with 143 additions and 93 deletions

20
CLI.py
View File

@@ -603,7 +603,17 @@ def _create_cmdlet_cli():
# Initialize debug logging if enabled
if config:
from helper.logger import set_debug
set_debug(config.get("debug", False))
debug_enabled = config.get("debug", False)
set_debug(debug_enabled)
# Also configure standard logging for libraries that use it (like local_library.py)
if debug_enabled:
import logging
logging.basicConfig(
level=logging.DEBUG,
format='[%(name)s] %(levelname)s: %(message)s',
stream=sys.stderr
)
# Handle seeds if provided
if seeds_json:
@@ -677,10 +687,16 @@ def _create_cmdlet_cli():
# Check MPV availability at startup
try:
from hydrus_health_check import check_mpv_availability, initialize_matrix_health_check, initialize_hydrus_health_check
from hydrus_health_check import (
check_mpv_availability,
initialize_matrix_health_check,
initialize_hydrus_health_check,
initialize_local_library_scan
)
check_mpv_availability()
initialize_hydrus_health_check(config)
initialize_matrix_health_check(config)
initialize_local_library_scan(config)
except Exception as e:
debug(f"⚠ Could not check service availability: {e}")
except Exception:

View File

@@ -194,30 +194,45 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
if index_arg:
try:
pl_id = int(index_arg)
result = db.get_playlist_by_id(pl_id)
if result is None:
debug(f"Playlist ID {pl_id} not found.")
return 1
name, items = result
current_playlist_name = name
# Queue items (replacing current playlist)
if items:
_queue_items(items, clear_first=True)
# Handle Delete Playlist (if -clear is also passed)
if clear_mode:
if db.delete_playlist(pl_id):
debug(f"Playlist ID {pl_id} deleted.")
# Clear index_arg so we fall through to list mode and show updated list
index_arg = None
# Don't return, let it list the remaining playlists
else:
debug(f"Failed to delete playlist ID {pl_id}.")
return 1
else:
# Empty playlist, just clear
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
# Switch to list mode to show the result
list_mode = True
index_arg = None
# Fall through to list logic
# Handle Load Playlist
result = db.get_playlist_by_id(pl_id)
if result is None:
debug(f"Playlist ID {pl_id} not found.")
return 1
name, items = result
current_playlist_name = name
# Queue items (replacing current playlist)
if items:
_queue_items(items, clear_first=True)
else:
# Empty playlist, just clear
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
# Switch to list mode to show the result
list_mode = True
index_arg = None
# Fall through to list logic
except ValueError:
debug(f"Invalid playlist ID: {index_arg}")
return 1
else:
# If we deleted or didn't have an index, list playlists
if not index_arg:
playlists = db.get_playlists()
if not playlists:
@@ -280,7 +295,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
return 1
# Handle piped input (add to playlist)
if result:
# Skip adding if -list is specified (user just wants to see current playlist)
if result and not list_mode:
# If result is a list of items, add them to playlist
items_to_add = []
if isinstance(result, list):

View File

@@ -238,16 +238,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
if not library_root:
log("No library root configured", file=sys.stderr)
return 1
db = LocalLibraryDB(library_root)
db.insert_worker(
worker_id,
"search",
title=f"Search: {query}",
description=f"Query: {query}",
pipe=ctx.get_current_command_text()
)
db = None
try:
db = LocalLibraryDB(library_root)
db.insert_worker(
worker_id,
"search",
title=f"Search: {query}",
description=f"Query: {query}",
pipe=ctx.get_current_command_text()
)
results_list = []
import result_table
import importlib
@@ -369,13 +371,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
log(f"Search failed: {exc}", file=sys.stderr)
import traceback
traceback.print_exc(file=sys.stderr)
db.update_worker_status(worker_id, 'error')
if db:
try:
db.update_worker_status(worker_id, 'error')
except Exception:
pass
return 1
finally:
# Always close the database connection
try:
db.close()
except Exception:
pass
if db:
try:
db.close()
except Exception:
pass

View File

@@ -330,9 +330,12 @@ class LocalStorageBackend(StorageBackend):
except Exception:
word_regex = None
else:
# Use word boundary for exact terms (backwards compatibility)
# Use custom boundary that treats underscores as separators
# \b treats _ as a word character, so "foo_bar" wouldn't match "bar" with \b
try:
word_regex = re.compile(r'\b' + re.escape(term) + r'\b', re.IGNORECASE)
# Match if not preceded or followed by alphanumeric chars
pattern = r'(?<![a-zA-Z0-9])' + re.escape(term) + r'(?![a-zA-Z0-9])'
word_regex = re.compile(pattern, re.IGNORECASE)
except Exception:
word_regex = None
@@ -459,71 +462,19 @@ class LocalStorageBackend(StorageBackend):
if results:
debug(f"Returning {len(results)} results from DB")
return results
else:
debug("No results found in DB, falling back to filesystem scan")
debug("No results found in DB")
return results
except Exception as e:
log(f"⚠️ Database search failed: {e}", file=sys.stderr)
debug(f"DB search exception details: {e}")
# Fallback to filesystem search if database search fails or returns nothing
debug("Starting filesystem scan...")
recursive = kwargs.get("recursive", True)
pattern = "**/*" if recursive else "*"
# Split query into terms for AND logic
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
if not terms:
terms = [query_lower]
count = 0
for file_path in search_dir.glob(pattern):
if not file_path.is_file():
continue
lower_name = file_path.name.lower()
if lower_name.endswith('.tags') or lower_name.endswith('.metadata') \
or lower_name.endswith('.notes') or lower_name.endswith('.tags.txt'):
continue
if not match_all:
# Check if ALL terms are present in the filename
# For single terms with wildcards, use fnmatch; otherwise use substring matching
if len(terms) == 1 and ('*' in terms[0] or '?' in terms[0]):
# Wildcard pattern matching for single term
from fnmatch import fnmatch
if not fnmatch(lower_name, terms[0]):
continue
else:
# Substring matching for all terms (AND logic)
if not all(term in lower_name for term in terms):
continue
size_bytes = file_path.stat().st_size
path_str = str(file_path)
results.append({
"name": file_path.stem,
"title": file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": path_str,
"target": path_str,
"origin": "local",
"size": size_bytes,
"size_bytes": size_bytes,
})
count += 1
if limit is not None and len(results) >= limit:
break
debug(f"Filesystem scan found {count} matches")
return []
except Exception as exc:
log(f"❌ Local search failed: {exc}", file=sys.stderr)
raise
return results
class HydrusStorageBackend(StorageBackend):
"""File storage backend for Hydrus client."""

View File

@@ -163,13 +163,25 @@ class LocalLibraryDB:
# Use check_same_thread=False to allow multi-threaded access
# This is safe because we're not sharing connections across threads;
# each thread will get its own cursor
self.connection = sqlite3.connect(str(self.db_path), check_same_thread=False)
# Set a generous timeout to avoid "database is locked" errors during heavy concurrency
self.connection = sqlite3.connect(str(self.db_path), check_same_thread=False, timeout=60.0)
self.connection.row_factory = sqlite3.Row
# Enable Write-Ahead Logging (WAL) for better concurrency
self.connection.execute("PRAGMA journal_mode=WAL")
# Enable foreign keys
self.connection.execute("PRAGMA foreign_keys = ON")
self._create_tables()
logger.info(f"Database initialized at {self.db_path}")
except Exception as e:
logger.error(f"Failed to initialize database: {e}", exc_info=True)
if self.connection:
try:
self.connection.close()
except Exception:
pass
self.connection = None
raise
def _create_tables(self) -> None:
@@ -1492,6 +1504,19 @@ class LocalLibrarySearchOptimizer:
except Exception as e:
logger.error(f"Failed to get playlist ID {playlist_id}: {e}")
return None
def delete_playlist(self, playlist_id: int) -> bool:
"""Delete a playlist by ID."""
if not self.db:
return False
try:
cursor = self.db.connection.cursor()
cursor.execute("DELETE FROM playlists WHERE id = ?", (playlist_id,))
self.db.connection.commit()
return cursor.rowcount > 0
except Exception as e:
logger.error(f"Failed to delete playlist ID {playlist_id}: {e}")
return False
if not self.db:
return []
return self.db.search_by_tag(tag, limit)

View File

@@ -540,3 +540,38 @@ def enable_matrix_features() -> None:
_MATRIX_AVAILABLE = True
_MATRIX_UNAVAILABLE_REASON = None
logger.info("[Matrix] Features manually enabled")
def initialize_local_library_scan(config: Dict[str, Any]) -> None:
"""Initialize and scan local library at startup.
This ensures that any new files in the local library folder are indexed
and their sidecar files are imported and cleaned up.
"""
from config import get_local_storage_path
from helper.local_library import LocalLibraryInitializer
logger.info("[Startup] Starting Local Library scan...")
try:
storage_path = get_local_storage_path(config)
if not storage_path:
debug("⚠️ Local Library: SKIPPED - No storage path configured", file=sys.stderr)
return
debug(f"Scanning local library at: {storage_path}", file=sys.stderr)
initializer = LocalLibraryInitializer(storage_path)
stats = initializer.scan_and_index()
# Log summary
new_files = stats.get('files_new', 0)
sidecars = stats.get('sidecars_imported', 0)
if new_files > 0 or sidecars > 0:
debug(f"✅ Local Library: Scanned - New files: {new_files}, Sidecars imported: {sidecars}", file=sys.stderr)
else:
debug("✅ Local Library: Up to date", file=sys.stderr)
except Exception as e:
logger.error(f"[Startup] Failed to scan local library: {e}", exc_info=True)
debug(f"⚠️ Local Library: ERROR - Scan failed: {e}", file=sys.stderr)