This commit is contained in:
nose
2025-12-16 01:45:01 -08:00
parent a03eb0d1be
commit 9873280f0e
36 changed files with 4911 additions and 1225 deletions

View File

@@ -11,7 +11,7 @@ import subprocess
import sys
import time
from SYS.logger import log, debug
from SYS.logger import log
from SYS.utils_constant import ALL_SUPPORTED_EXTENSIONS as GLOBAL_SUPPORTED_EXTENSIONS
import tempfile
import logging
@@ -360,24 +360,24 @@ class HydrusNetwork:
hashes = self._ensure_hashes(file_hashes)
if len(hashes) == 1:
body = {"hash": hashes[0], "url_to_add": url}
return self._post("/add_url/associate_url", data=body)
return self._post("/add_urls/associate_url", data=body)
results: dict[str, Any] = {}
for file_hash in hashes:
body = {"hash": file_hash, "url_to_add": url}
results[file_hash] = self._post("/add_url/associate_url", data=body)
results[file_hash] = self._post("/add_urls/associate_url", data=body)
return {"batched": results}
def delete_url(self, file_hashes: Union[str, Iterable[str]], url: str) -> dict[str, Any]:
hashes = self._ensure_hashes(file_hashes)
if len(hashes) == 1:
body = {"hash": hashes[0], "url_to_delete": url}
return self._post("/add_url/associate_url", data=body)
return self._post("/add_urls/associate_url", data=body)
results: dict[str, Any] = {}
for file_hash in hashes:
body = {"hash": file_hash, "url_to_delete": url}
results[file_hash] = self._post("/add_url/associate_url", data=body)
results[file_hash] = self._post("/add_urls/associate_url", data=body)
return {"batched": results}
def set_notes(
@@ -436,35 +436,61 @@ class HydrusNetwork:
def set_relationship(self, hash_a: str, hash_b: str, relationship: Union[str, int], do_default_content_merge: bool = False) -> dict[str, Any]:
"""Set a relationship between two files in Hydrus.
This wraps Hydrus Client API: POST /manage_file_relationships/set_file_relationships.
Hydrus relationship enum (per Hydrus developer API docs):
- 0: set as potential duplicates
- 1: set as false positives
- 2: set as same quality (duplicates)
- 3: set as alternates
- 4: set A as better (duplicates)
Args:
hash_a: First file hash
hash_b: Second file hash
relationship: Relationship type - can be string ("king", "alt", "related", etc)
or integer (0-4):
- 0 = duplicates
- 1 = alternate
- 2 = not_related
- 3 = related
- 4 = king
do_default_content_merge: Whether to perform default content merge
hash_a: First file SHA256 hex
hash_b: Second file SHA256 hex
relationship: Relationship type as string or integer enum (0-4)
do_default_content_merge: Whether to perform default duplicate content merge
Returns:
Response from Hydrus API
"""
# Convert string relationship types to integers
if isinstance(relationship, str):
rel_map = {
"duplicates": 0,
"duplicate": 0,
"alt": 1,
"alternate": 1,
"not_related": 2,
"not related": 2,
# Potential duplicates
"potential": 0,
"potentials": 0,
"potential duplicate": 0,
"potential duplicates": 0,
# False positives
"false positive": 1,
"false_positive": 1,
"false positives": 1,
"false_positives": 1,
"not related": 1,
"not_related": 1,
# Duplicates (same quality)
"duplicate": 2,
"duplicates": 2,
"same quality": 2,
"same_quality": 2,
"equal": 2,
# Alternates
"alt": 3,
"alternate": 3,
"alternates": 3,
"alternative": 3,
"related": 3,
# Better/worse (duplicates)
"better": 4,
"a better": 4,
"a_better": 4,
# Back-compat: some older call sites used 'king' for primary.
# Hydrus does not accept 'king' as a relationship; this maps to 'A is better'.
"king": 4,
}
relationship = rel_map.get(relationship.lower(), 3) # Default to "related" (3)
relationship = rel_map.get(relationship.lower().strip(), 3) # Default to alternates
body = {
"relationships": [

View File

@@ -13,7 +13,7 @@ from SYS.logger import log, debug
import time
import logging
from typing import Any, Dict, Optional, Set, List, Sequence, Tuple
from urllib.parse import urlencode, urlparse
from urllib.parse import urlparse
from .HTTP import HTTPClient
logger = logging.getLogger(__name__)
@@ -51,11 +51,34 @@ def _ping_alldebrid(base_url: str) -> Tuple[bool, Optional[str]]:
class AllDebridClient:
"""Client for AllDebrid API."""
# Try both v4 and v3 APIs
BASE_url = [
"https://api.alldebrid.com/v4",
"https://api.alldebrid.com/v3",
]
# Default to v4 for most endpoints.
# Some endpoints have a newer /v4.1/ variant (e.g., magnet/status, user/hosts, pin/get).
BASE_URL = "https://api.alldebrid.com/v4"
BASE_URL_V41 = "https://api.alldebrid.com/v4.1"
# Endpoints documented as POST in v4 API.
_POST_ENDPOINTS: Set[str] = {
"pin/check",
"user/verif",
"user/verif/resend",
"user/notification/clear",
"link/infos",
"link/redirector",
"link/unlock",
"link/streaming",
"link/delayed",
"magnet/upload",
"magnet/upload/file",
"magnet/status", # v4.1 variant exists; method stays POST
"magnet/files",
"magnet/delete",
"magnet/restart",
"user/links/save",
"user/links/delete",
"user/history/delete",
"voucher/get",
"voucher/generate",
}
def __init__(self, api_key: str):
"""Initialize AllDebrid client with API key.
@@ -66,7 +89,7 @@ class AllDebridClient:
self.api_key = api_key.strip()
if not self.api_key:
raise AllDebridError("AllDebrid API key is empty")
self.base_url = self.BASE_url[0] # Start with v4
self.base_url = self.BASE_URL # Start with v4
# Init-time availability validation (cached per process)
fingerprint = f"base:{self.base_url}" # /ping does not require the api key
@@ -80,7 +103,13 @@ class AllDebridClient:
if not ok:
raise AllDebridError(reason or "AllDebrid unavailable")
def _request(self, endpoint: str, params: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
def _request(
self,
endpoint: str,
params: Optional[Dict[str, Any]] = None,
*,
method: Optional[str] = None,
) -> Dict[str, Any]:
"""Make a request to AllDebrid API.
Args:
@@ -95,25 +124,38 @@ class AllDebridClient:
"""
if params is None:
params = {}
# Add API key to params
params['apikey'] = self.api_key
# Determine HTTP method (v4 docs default to POST for most write/unlock endpoints).
if method is None:
method = "POST" if endpoint in self._POST_ENDPOINTS else "GET"
method = str(method).upper().strip() or "GET"
# Auth header is the preferred mechanism per v4.1 docs.
# Keep apikey in params too for backward compatibility.
request_params: Dict[str, Any] = dict(params)
request_params["apikey"] = self.api_key
url = f"{self.base_url}/{endpoint}"
query_string = urlencode(params)
full_url = f"{url}?{query_string}"
logger.debug(f"[AllDebrid] {endpoint} request to {full_url[:80]}...")
# Avoid logging full URLs with query params (can leak apikey).
logger.debug(f"[AllDebrid] {method} {endpoint} @ {self.base_url}")
try:
# Pass timeout to HTTPClient init, not to get()
with HTTPClient(timeout=30.0, headers={'User-Agent': 'downlow/1.0'}) as client:
headers = {
"User-Agent": "downlow/1.0",
"Authorization": f"Bearer {self.api_key}",
}
# Pass timeout to HTTPClient init.
with HTTPClient(timeout=30.0, headers=headers) as client:
try:
response = client.get(full_url)
if method == "POST":
response = client.post(url, data=request_params)
else:
response = client.get(url, params=request_params)
response.raise_for_status()
except Exception as req_err:
# Log detailed error info
logger.error(f"[AllDebrid] Request error to {full_url[:80]}: {req_err}", exc_info=True)
logger.error(f"[AllDebrid] Request error to {endpoint}: {req_err}", exc_info=True)
if hasattr(req_err, 'response') and req_err.response is not None: # type: ignore
try:
error_body = req_err.response.content.decode('utf-8') # type: ignore
@@ -190,13 +232,26 @@ class AllDebridClient:
Raises:
AllDebridError: If request fails
"""
try:
response = self._request('host', {'name': hostname})
if response.get('status') == 'success':
return response.get('data', {})
# The v4 API does not expose a `/host` endpoint. Use `/hosts/domains` and
# check membership.
if not hostname:
return {}
try:
host = str(hostname).strip().lower()
if host.startswith("www."):
host = host[4:]
domains = self.get_supported_hosters()
if not domains:
return {}
for category in ("hosts", "streams", "redirectors"):
values = domains.get(category)
if isinstance(values, list) and any(str(d).lower() == host for d in values):
return {"supported": True, "category": category, "domain": host}
return {"supported": False, "domain": host}
except AllDebridError:
raise
except Exception as exc:
@@ -212,7 +267,8 @@ class AllDebridClient:
AllDebridError: If request fails
"""
try:
response = self._request('user/profile')
# v4 endpoint is `/user`
response = self._request('user')
if response.get('status') == 'success':
return response.get('data', {})
@@ -227,7 +283,8 @@ class AllDebridClient:
"""Get list of all supported hosters from AllDebrid API.
Returns:
Dict mapping domain to host info (status, name, etc)
Dict with keys `hosts`, `streams`, `redirectors` each containing an array
of domains.
Raises:
AllDebridError: If request fails
@@ -237,7 +294,6 @@ class AllDebridClient:
if response.get('status') == 'success':
data = response.get('data', {})
# The API returns hosts keyed by domain
return data if isinstance(data, dict) else {}
return {}
@@ -334,7 +390,7 @@ class AllDebridClient:
# Use v4.1 endpoint for better response format
# Temporarily override base_url for this request
old_base = self.base_url
self.base_url = "https://api.alldebrid.com/v4.1"
self.base_url = self.BASE_URL_V41
try:
response = self._request('magnet/status', {'id': str(magnet_id)})
@@ -358,8 +414,48 @@ class AllDebridClient:
raise
except Exception as exc:
raise AllDebridError(f"Failed to get magnet status: {exc}")
def magnet_list(self) -> List[Dict[str, Any]]:
"""List magnets stored in the AllDebrid account.
The AllDebrid API returns an array of magnets when calling the status
endpoint without an id.
Returns:
List of magnet objects.
"""
try:
# Use v4.1 endpoint for better response format
old_base = self.base_url
self.base_url = self.BASE_URL_V41
try:
response = self._request('magnet/status')
finally:
self.base_url = old_base
if response.get('status') != 'success':
return []
data = response.get('data', {})
magnets = data.get('magnets', [])
if isinstance(magnets, list):
return [m for m in magnets if isinstance(m, dict)]
# Some API variants may return a dict.
if isinstance(magnets, dict):
# If it's a single magnet dict, wrap it; if it's an id->magnet mapping, return values.
if 'id' in magnets:
return [magnets]
return [m for m in magnets.values() if isinstance(m, dict)]
return []
except AllDebridError:
raise
except Exception as exc:
raise AllDebridError(f"Failed to list magnets: {exc}")
def magnet_status_live(self, magnet_id: int, session: int = None, counter: int = 0) -> Dict[str, Any]:
def magnet_status_live(self, magnet_id: int, session: Optional[int] = None, counter: int = 0) -> Dict[str, Any]:
"""Get live status of a magnet using delta sync mode.
The live mode endpoint provides real-time progress by only sending
@@ -388,21 +484,32 @@ class AllDebridClient:
raise AllDebridError(f"Invalid magnet ID: {magnet_id}")
try:
# For single magnet queries, just use regular endpoint with ID
# The "live mode" with session/counter is for multi-magnet dashboards
# where bandwidth savings from diffs matter
response = self._request('magnet/status', {'id': magnet_id})
# v4.1 is the up-to-date endpoint for magnet/status.
old_base = self.base_url
self.base_url = self.BASE_URL_V41
try:
payload: Dict[str, Any] = {"id": str(magnet_id)}
if session is not None:
payload["session"] = str(int(session))
payload["counter"] = str(int(counter))
response = self._request('magnet/status', payload)
finally:
self.base_url = old_base
if response.get('status') == 'success':
data = response.get('data', {})
magnets = data.get('magnets', [])
# Handle list response
# For specific magnet id, return the first match from the array.
if isinstance(magnets, list) and len(magnets) > 0:
return magnets[0]
# Some API variants may return a dict.
if isinstance(magnets, dict) and magnets:
return magnets
raise AllDebridError(f"No magnet found with ID {magnet_id}")
raise AllDebridError(f"API error: {response.get('error', 'Unknown')}")
except AllDebridError:
raise
@@ -784,28 +891,65 @@ def unlock_link_cmdlet(result: Any, args: Sequence[str], config: Dict[str, Any])
Returns:
0 on success, 1 on failure
"""
try:
from .link_utils import (
extract_link,
get_api_key,
add_direct_link_to_result,
)
except ImportError as e:
log(f"Required modules unavailable: {e}", file=sys.stderr)
return 1
def _extract_link_from_args_or_result(result_obj: Any, argv: Sequence[str]) -> Optional[str]:
# Prefer an explicit URL in args.
for a in argv or []:
if isinstance(a, str) and a.startswith(("http://", "https://")):
return a.strip()
# Fall back to common pipeline fields.
if isinstance(result_obj, dict):
for key in ("url", "source_url", "path", "target"):
v = result_obj.get(key)
if isinstance(v, str) and v.startswith(("http://", "https://")):
return v.strip()
return None
def _get_alldebrid_api_key_from_config(cfg: Dict[str, Any]) -> Optional[str]:
# Current config format
try:
provider_cfg = cfg.get("provider") if isinstance(cfg, dict) else None
ad_cfg = provider_cfg.get("alldebrid") if isinstance(provider_cfg, dict) else None
api_key = ad_cfg.get("api_key") if isinstance(ad_cfg, dict) else None
if isinstance(api_key, str) and api_key.strip():
return api_key.strip()
except Exception:
pass
# Legacy config format fallback (best-effort)
try:
debrid_cfg = cfg.get("Debrid") if isinstance(cfg, dict) else None
api_key = None
if isinstance(debrid_cfg, dict):
api_key = debrid_cfg.get("All-debrid") or debrid_cfg.get("AllDebrid")
if isinstance(api_key, str) and api_key.strip():
return api_key.strip()
except Exception:
pass
return None
def _add_direct_link_to_result(result_obj: Any, direct_link: str, original_link: str) -> None:
if not isinstance(direct_link, str) or not direct_link.strip():
return
if isinstance(result_obj, dict):
# Keep original and promote unlocked link to the fields commonly used downstream.
result_obj.setdefault("source_url", original_link)
result_obj["url"] = direct_link
result_obj["path"] = direct_link
# Get link from args or result
link = extract_link(result, args)
link = _extract_link_from_args_or_result(result, args)
if not link:
log("No valid URL provided", file=sys.stderr)
return 1
# Get AllDebrid API key from config
api_key = get_api_key(config, "AllDebrid", "Debrid.All-debrid")
api_key = _get_alldebrid_api_key_from_config(config)
if not api_key:
log("AllDebrid API key not configured in Debrid.All-debrid", file=sys.stderr)
log("AllDebrid API key not configured (provider.alldebrid.api_key)", file=sys.stderr)
return 1
# Try to unlock the link
@@ -816,7 +960,7 @@ def unlock_link_cmdlet(result: Any, args: Sequence[str], config: Dict[str, Any])
debug(f"✓ Direct link: {direct_link}")
# Update result with direct link
add_direct_link_to_result(result, direct_link, link)
_add_direct_link_to_result(result, direct_link, link)
# Return the updated result via pipeline context
# Note: The cmdlet wrapper will handle emitting to pipeline

View File

@@ -609,12 +609,153 @@ class API_folder_store:
try:
metadata[field] = json.loads(metadata[field])
except (json.JSONDecodeError, TypeError):
metadata[field] = [] if field == 'url' else []
metadata[field] = [] if field == 'url' else {}
# Ensure relationships is always a dict
if metadata.get('relationships') is None:
metadata['relationships'] = {}
if not isinstance(metadata.get('relationships'), dict):
metadata['relationships'] = {}
return metadata
except Exception as e:
logger.error(f"Error getting metadata for hash {file_hash}: {e}", exc_info=True)
return None
def set_relationship_by_hash(self, file_hash: str, related_file_hash: str, rel_type: str = "alt", *, bidirectional: bool = True) -> None:
"""Set a relationship between two files by hash.
This is the store/hash-first API. It avoids any dependency on local filesystem
paths and only requires that both hashes exist in the DB.
"""
try:
file_hash = str(file_hash or "").strip().lower()
related_file_hash = str(related_file_hash or "").strip().lower()
rel_type = str(rel_type or "alt").strip() or "alt"
if not file_hash or not related_file_hash:
raise ValueError("Missing file hash for relationship")
if file_hash == related_file_hash:
return
cursor = self.connection.cursor()
# Ensure both hashes exist in files table (metadata has FK to files)
cursor.execute("SELECT 1 FROM files WHERE hash = ?", (file_hash,))
if not cursor.fetchone():
raise ValueError(f"Hash not found in store DB: {file_hash}")
cursor.execute("SELECT 1 FROM files WHERE hash = ?", (related_file_hash,))
if not cursor.fetchone():
raise ValueError(f"Hash not found in store DB: {related_file_hash}")
# Load current relationships for the main file
cursor.execute("SELECT relationships FROM metadata WHERE hash = ?", (file_hash,))
row = cursor.fetchone()
relationships_str = row[0] if row else None
try:
relationships = json.loads(relationships_str) if relationships_str else {}
except (json.JSONDecodeError, TypeError):
relationships = {}
if not isinstance(relationships, dict):
relationships = {}
relationships.setdefault(rel_type, [])
if not isinstance(relationships[rel_type], list):
relationships[rel_type] = []
if related_file_hash not in relationships[rel_type]:
relationships[rel_type].append(related_file_hash)
cursor.execute(
"""
INSERT INTO metadata (hash, relationships)
VALUES (?, ?)
ON CONFLICT(hash) DO UPDATE SET
relationships = excluded.relationships,
time_modified = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
""",
(file_hash, json.dumps(relationships)),
)
if bidirectional:
# Update the related file as well
cursor.execute("SELECT relationships FROM metadata WHERE hash = ?", (related_file_hash,))
row2 = cursor.fetchone()
relationships_str2 = row2[0] if row2 else None
try:
reverse_relationships = json.loads(relationships_str2) if relationships_str2 else {}
except (json.JSONDecodeError, TypeError):
reverse_relationships = {}
if not isinstance(reverse_relationships, dict):
reverse_relationships = {}
reverse_relationships.setdefault(rel_type, [])
if not isinstance(reverse_relationships[rel_type], list):
reverse_relationships[rel_type] = []
if file_hash not in reverse_relationships[rel_type]:
reverse_relationships[rel_type].append(file_hash)
cursor.execute(
"""
INSERT INTO metadata (hash, relationships)
VALUES (?, ?)
ON CONFLICT(hash) DO UPDATE SET
relationships = excluded.relationships,
time_modified = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
""",
(related_file_hash, json.dumps(reverse_relationships)),
)
self.connection.commit()
except Exception as e:
logger.error(f"Error setting relationship by hash: {e}", exc_info=True)
raise
def find_files_pointing_to_hash(self, target_hash: str) -> List[Dict[str, Any]]:
"""Find all files that have a relationship pointing to the target hash."""
try:
target_hash = str(target_hash or "").strip().lower()
if not target_hash:
return []
cursor = self.connection.cursor()
cursor.execute(
"""
SELECT f.hash, f.file_path, m.relationships
FROM metadata m
JOIN files f ON m.hash = f.hash
WHERE m.relationships LIKE ?
""",
(f"%{target_hash}%",),
)
results: List[Dict[str, Any]] = []
for row in cursor.fetchall():
src_hash = row[0]
src_path = row[1]
rels_json = row[2]
try:
rels = json.loads(rels_json) if rels_json else {}
except (json.JSONDecodeError, TypeError):
continue
if not isinstance(rels, dict):
continue
for r_type, hashes in rels.items():
if not isinstance(hashes, list):
continue
if target_hash in [str(h or "").strip().lower() for h in hashes]:
results.append({
"hash": src_hash,
"path": src_path,
"type": r_type,
})
return results
except Exception as e:
logger.error(f"Error finding files pointing to hash {target_hash}: {e}", exc_info=True)
return []
def save_metadata(self, file_path: Path, metadata: Dict[str, Any]) -> None:
"""Save metadata for a file."""
@@ -961,7 +1102,7 @@ class API_folder_store:
logger.error(f"Error updating metadata for hash {file_hash}: {e}", exc_info=True)
raise
def set_relationship(self, file_path: Path, related_file_path: Path, rel_type: str = "alt") -> None:
def set_relationship(self, file_path: Path, related_file_path: Path, rel_type: str = "alt", *, bidirectional: bool = True) -> None:
"""Set a relationship between two local files.
Args:
@@ -1018,47 +1159,50 @@ class API_folder_store:
logger.debug(f"Set {rel_type} relationship: {str_path} ({file_hash}) -> {str_related_path} ({related_file_hash})")
# Set reverse relationship (bidirectional)
# For 'alt' and 'related', the reverse is the same
# For 'king', the reverse is 'subject' (or we just use 'alt' for simplicity as Hydrus does)
# Let's use the same type for now to keep it simple and consistent with Hydrus 'alternates'
reverse_type = rel_type
# Update the related file
cursor.execute("""
SELECT relationships FROM metadata WHERE hash = ?
""", (related_file_hash,))
row = cursor.fetchone()
relationships_str = row[0] if row else None
try:
if relationships_str:
reverse_relationships = json.loads(relationships_str)
else:
if bidirectional:
# Set reverse relationship (bidirectional)
# For 'alt' and 'related', the reverse is the same
# For 'king', the reverse is 'subject' (or we just use 'alt' for simplicity as Hydrus does)
# Let's use the same type for now to keep it simple and consistent with Hydrus 'alternates'
reverse_type = rel_type
# Update the related file
cursor.execute("""
SELECT relationships FROM metadata WHERE hash = ?
""", (related_file_hash,))
row = cursor.fetchone()
relationships_str = row[0] if row else None
try:
if relationships_str:
reverse_relationships = json.loads(relationships_str)
else:
reverse_relationships = {}
except (json.JSONDecodeError, TypeError):
reverse_relationships = {}
except (json.JSONDecodeError, TypeError):
reverse_relationships = {}
if not isinstance(reverse_relationships, dict):
reverse_relationships = {}
if reverse_type not in reverse_relationships:
reverse_relationships[reverse_type] = []
if file_hash not in reverse_relationships[reverse_type]:
reverse_relationships[reverse_type].append(file_hash)
# Save the updated reverse relationships
cursor.execute("""
INSERT INTO metadata (hash, relationships)
VALUES (?, ?)
ON CONFLICT(hash) DO UPDATE SET
relationships = excluded.relationships,
time_modified = CURRENT_TIMESTAMP
""", (related_file_hash, json.dumps(reverse_relationships)))
self.connection.commit()
if not isinstance(reverse_relationships, dict):
reverse_relationships = {}
if reverse_type not in reverse_relationships:
reverse_relationships[reverse_type] = []
if file_hash not in reverse_relationships[reverse_type]:
reverse_relationships[reverse_type].append(file_hash)
# Save the updated reverse relationships
cursor.execute("""
INSERT INTO metadata (hash, relationships)
VALUES (?, ?)
ON CONFLICT(hash) DO UPDATE SET
relationships = excluded.relationships,
time_modified = CURRENT_TIMESTAMP
""", (related_file_hash, json.dumps(reverse_relationships)))
self.connection.commit()
else:
self.connection.commit()
except Exception as e:
logger.error(f"Error setting relationship: {e}", exc_info=True)
@@ -1074,44 +1218,22 @@ class API_folder_store:
List of dicts with {path, type} for files pointing to target
"""
try:
# Get the hash of the target file
target_hash = sha256_file(target_path)
# Prefer the DB's stored identity hash for the target.
target_hash = None
try:
target_hash = self.get_file_hash(target_path)
except Exception:
target_hash = None
# Fall back to hashing bytes if the path isn't known to the DB.
if not target_hash:
target_hash = sha256_file(target_path)
if not target_hash:
logger.warning(f"Cannot find files pointing to {target_path}: unable to compute hash")
return []
cursor = self.connection.cursor()
# Scan all metadata (this might be slow on huge DBs but fine for local library)
# We select file_path and relationships json
cursor.execute("""
SELECT f.file_path, m.relationships
FROM metadata m
JOIN files f ON m.hash = f.hash
WHERE m.relationships LIKE ?
""", (f"%{target_hash}%",))
results = []
for row in cursor.fetchall():
f_path = row[0]
rels_json = row[1]
try:
rels = json.loads(rels_json)
if isinstance(rels, dict):
for r_type, hashes in rels.items():
if isinstance(hashes, list):
# Check if target hash is in this relationship type
if target_hash in hashes:
results.append({
"path": f_path,
"type": r_type
})
except (json.JSONDecodeError, TypeError):
continue
return results
return self.find_files_pointing_to_hash(target_hash)
except Exception as e:
logger.error(f"Error finding files pointing to {target_path}: {e}", exc_info=True)
return []
@@ -2630,7 +2752,7 @@ class LocalLibrarySearchOptimizer:
return None
return self.db.search_hash(file_hash)
def set_relationship(self, file_path: Path, related_file_path: Path, rel_type: str = "alt") -> None:
def set_relationship(self, file_path: Path, related_file_path: Path, rel_type: str = "alt", *, bidirectional: bool = True) -> None:
"""Set a relationship between two files in the database.
Delegates to LocalLibraryDB.set_relationship().
@@ -2642,7 +2764,7 @@ class LocalLibrarySearchOptimizer:
"""
if not self.db:
return
self.db.set_relationship(file_path, related_file_path, rel_type)
self.db.set_relationship(file_path, related_file_path, rel_type, bidirectional=bidirectional)
def find_files_pointing_to(self, target_path: Path) -> List[Dict[str, Any]]:
"""Find all files that have a relationship pointing to the target path."""