This commit is contained in:
nose
2025-12-06 00:10:19 -08:00
parent 5482ee5586
commit f29709d951
20 changed files with 1353 additions and 419 deletions

View File

@@ -381,6 +381,81 @@ class LocalStorageBackend(StorageBackend):
"size_bytes": size_bytes,
"tags": tags,
})
if limit is not None and len(results) >= limit:
return results
# Title-tag search: treat freeform terms as title namespace queries (AND across terms)
if terms:
title_hits: dict[int, dict[str, Any]] = {}
for term in terms:
cursor.execute(
"""
SELECT DISTINCT f.id, f.file_path, f.file_size
FROM files f
JOIN tags t ON f.id = t.file_id
WHERE LOWER(t.tag) LIKE ?
ORDER BY f.file_path
LIMIT ?
""",
(f"title:%{term}%", fetch_limit),
)
for file_id, file_path_str, size_bytes in cursor.fetchall():
if not file_path_str:
continue
entry = title_hits.get(file_id)
if entry:
entry["count"] += 1
if size_bytes is not None:
entry["size"] = size_bytes
else:
title_hits[file_id] = {
"path": file_path_str,
"size": size_bytes,
"count": 1,
}
if title_hits:
required = len(terms)
for file_id, info in title_hits.items():
if info.get("count") != required:
continue
file_path_str = info.get("path")
if not file_path_str or file_path_str in seen_files:
continue
file_path = Path(file_path_str)
if not file_path.exists():
continue
seen_files.add(file_path_str)
size_bytes = info.get("size")
if size_bytes is None:
try:
size_bytes = file_path.stat().st_size
except OSError:
size_bytes = None
cursor.execute(
"""
SELECT tag FROM tags WHERE file_id = ?
""",
(file_id,),
)
tags = [row[0] for row in cursor.fetchall()]
title_tag = next((t.split(':', 1)[1] for t in tags if t.lower().startswith('title:')), None)
results.append({
"name": file_path.stem,
"title": title_tag or file_path.stem,
"ext": file_path.suffix.lstrip('.'),
"path": str(file_path),
"target": str(file_path),
"origin": "local",
"size": size_bytes,
"size_bytes": size_bytes,
"tags": tags,
})
if limit is not None and len(results) >= limit:
return results
# Also search for simple tags (without namespace) containing the query
# Only perform tag search if single term, or if we want to support multi-term tag search
@@ -697,28 +772,35 @@ class HydrusStorageBackend(StorageBackend):
# debug(f"[HydrusBackend.search] Processing file_id={file_id}, tags type={type(tags_set)}")
if isinstance(tags_set, dict):
# debug(f"[HydrusBackend.search] Tags payload keys: {list(tags_set.keys())}")
# Collect both storage_tags and display_tags to capture siblings/parents and ensure title: is seen
def _collect(tag_list: Any) -> None:
nonlocal title, all_tags_str
if not isinstance(tag_list, list):
return
for tag in tag_list:
tag_text = str(tag) if tag else ""
if not tag_text:
continue
all_tags.append(tag_text)
all_tags_str += " " + tag_text.lower()
if tag_text.lower().startswith("title:") and title == f"Hydrus File {file_id}":
title = tag_text.split(":", 1)[1].strip()
for service_name, service_tags in tags_set.items():
# debug(f"[HydrusBackend.search] Processing service: {service_name}")
if isinstance(service_tags, dict):
storage_tags = service_tags.get("storage_tags", {})
if isinstance(storage_tags, dict):
for tag_type, tag_list in storage_tags.items():
# debug(f"[HydrusBackend.search] Tag type: {tag_type}, count: {len(tag_list) if isinstance(tag_list, list) else 0}")
if isinstance(tag_list, list):
for tag in tag_list:
tag_text = str(tag) if tag else ""
if tag_text:
# debug(f"[HydrusBackend.search] Tag: {tag_text}")
all_tags.append(tag_text)
all_tags_str += " " + tag_text.lower()
# Extract title: namespace
if tag_text.startswith("title:"):
title = tag_text[6:].strip() # Remove "title:" prefix
# debug(f"[HydrusBackend.search] ✓ Extracted title: {title}")
break
if title != f"Hydrus File {file_id}":
break
if not isinstance(service_tags, dict):
continue
storage_tags = service_tags.get("storage_tags", {})
if isinstance(storage_tags, dict):
for tag_list in storage_tags.values():
_collect(tag_list)
display_tags = service_tags.get("display_tags", [])
_collect(display_tags)
# Also consider top-level flattened tags payload if provided (Hydrus API sometimes includes it)
top_level_tags = meta.get("tags_flat", []) or meta.get("tags", [])
_collect(top_level_tags)
# Resolve extension from MIME type
mime_type = meta.get("mime")
@@ -796,202 +878,6 @@ class HydrusStorageBackend(StorageBackend):
import traceback
traceback.print_exc(file=sys.stderr)
raise
class DebridStorageBackend(StorageBackend):
"""File storage backend for Debrid services (AllDebrid, RealDebrid, etc.)."""
def __init__(self, api_key: Optional[str] = None) -> None:
"""Initialize Debrid storage backend.
Args:
api_key: API key for Debrid service (e.g., from config["Debrid"]["All-debrid"])
"""
self._api_key = api_key
def get_name(self) -> str:
return "debrid"
def upload(self, file_path: Path, **kwargs: Any) -> str:
"""Upload file to Debrid service.
Args:
file_path: Path to the file to upload
**kwargs: Debrid-specific options
Returns:
Debrid link/URL
Raises:
NotImplementedError: Debrid upload not yet implemented
"""
raise NotImplementedError("Debrid upload not yet implemented")
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
"""Search Debrid for files matching query.
Searches through available magnets in AllDebrid storage and returns
matching results with download links.
Args:
query: Search query string (filename or magnet name pattern)
limit: Maximum number of results to return (default: 50)
api_key: Optional override for API key (uses default if not provided)
Returns:
List of dicts with keys:
- 'name': File/magnet name
- 'title': Same as name (for compatibility)
- 'url': AllDebrid download link
- 'size': File size in bytes
- 'magnet_id': AllDebrid magnet ID
- 'origin': 'debrid'
- 'annotations': Status and seeders info
Example:
results = storage["debrid"].search("movie.mkv")
for result in results:
print(f"{result['name']} - {result['size']} bytes")
"""
api_key = kwargs.get("api_key") or self._api_key
if not api_key:
raise ValueError("'api_key' parameter required for Debrid search (not configured)")
limit = kwargs.get("limit", 50)
try:
from helper.alldebrid import AllDebridClient
debug(f"Searching AllDebrid for: {query}")
client = AllDebridClient(api_key=api_key)
# STEP 1: Get magnet status list
try:
response = client._request('magnet/status')
magnets_data = response.get('data', {})
magnets = magnets_data.get('magnets', [])
if not isinstance(magnets, list):
magnets = [magnets] if magnets else []
debug(f"[debrid_search] Got {len(magnets)} total magnets")
except Exception as e:
log(f"⚠ Failed to get magnets list: {e}", file=sys.stderr)
magnets = []
# Filter by query for relevant magnets
query_lower = query.lower()
matching_magnet_ids = []
magnet_info_map = {} # Store status info for later
# "*" means "match all" - include all magnets
match_all = query_lower == "*"
# Split query into terms for AND logic
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
if not terms:
terms = [query_lower]
for magnet in magnets:
filename = magnet.get('filename', '').lower()
status_code = magnet.get('statusCode', 0)
magnet_id = magnet.get('id')
# Only include ready or nearly-ready magnets (skip error states 5+)
if status_code not in [0, 1, 2, 3, 4]:
continue
# Match query against filename (or match all if query is "*")
if not match_all:
if not all(term in filename for term in terms):
continue
matching_magnet_ids.append(magnet_id)
magnet_info_map[magnet_id] = magnet
debug(f"[debrid_search] ✓ Matched magnet {magnet_id}: {filename}")
debug(f"[debrid_search] Found {len(matching_magnet_ids)} matching magnets")
results = []
# Return one result per magnet (not per file)
# This keeps search results clean and allows user to download entire magnet at once
for magnet_id in matching_magnet_ids:
magnet_status = magnet_info_map.get(magnet_id, {})
filename = magnet_status.get('filename', 'Unknown')
status = magnet_status.get('status', 'Unknown')
status_code = magnet_status.get('statusCode', 0)
size = magnet_status.get('size', 0)
seeders = magnet_status.get('seeders', 0)
# Format size nicely
size_label = f"{size / (1024**3):.2f}GB" if size > 0 else "Unknown"
# Create one result per magnet with aggregated info
results.append({
'name': filename,
'title': filename,
'url': '', # No direct file link for the magnet itself
'size': size,
'size_bytes': size,
'magnet_id': magnet_id,
'origin': 'debrid',
'annotations': [
status,
f"{seeders} seeders",
size_label,
],
'target': '', # Magnet ID is stored, user can then download it
})
debug(f"Found {len(results)} result(s) on AllDebrid")
return results[:limit]
except Exception as exc:
log(f"❌ Debrid search failed: {exc}", file=sys.stderr)
raise
def _flatten_file_tree(self, files: list[Any], prefix: str = '') -> list[Dict[str, Any]]:
"""Flatten AllDebrid's nested file tree structure.
AllDebrid returns files in a tree structure with folders ('e' key).
This flattens it to a list of individual files.
Args:
files: AllDebrid file tree structure
prefix: Current path prefix (used recursively)
Returns:
List of flattened file entries with 'name', 'size', 'link' keys
"""
result = []
if not isinstance(files, list):
return result
for item in files:
if not isinstance(item, dict):
continue
name = item.get('n', '')
# Check if it's a folder (has 'e' key with entries)
if 'e' in item:
# Recursively flatten subfolder
subfolder_path = f"{prefix}/{name}" if prefix else name
subitems = item.get('e', [])
result.extend(self._flatten_file_tree(subitems, subfolder_path))
else:
# It's a file - add it to results
file_path = f"{prefix}/{name}" if prefix else name
result.append({
'name': file_path,
'size': item.get('s', 0),
'link': item.get('l', ''),
})
return result
class MatrixStorageBackend(StorageBackend):
"""File storage backend for Matrix (Element) chat rooms."""
@@ -1344,7 +1230,6 @@ class FileStorage:
# Search with searchable backends (uses configured locations)
results = storage["hydrus"].search("music")
results = storage["local"].search("song") # Uses config["Local"]["path"]
results = storage["debrid"].search("movie")
"""
def __init__(self, config: Optional[Dict[str, Any]] = None) -> None:
@@ -1356,13 +1241,11 @@ class FileStorage:
config = config or {}
# Extract backend-specific settings from config
from config import get_local_storage_path, get_debrid_api_key
from config import get_local_storage_path
local_path = get_local_storage_path(config)
local_path_str = str(local_path) if local_path else None
debrid_api_key = get_debrid_api_key(config)
self._backends: Dict[str, StorageBackend] = {}
# Always include local backend (even if no default path configured)
@@ -1372,10 +1255,6 @@ class FileStorage:
# Include Hydrus backend (configuration optional)
self._backends["hydrus"] = HydrusStorageBackend(config=config)
# Include Debrid backend (API key optional - will raise on use if not provided)
if debrid_api_key:
self._backends["debrid"] = DebridStorageBackend(api_key=debrid_api_key)
# Include Matrix backend
self._backends["matrix"] = MatrixStorageBackend()