241 lines
8.1 KiB
Python
241 lines
8.1 KiB
Python
|
|
from __future__ import annotations
|
||
|
|
|
||
|
|
from typing import Any, Dict, Sequence, List, Optional
|
||
|
|
import json
|
||
|
|
import sys
|
||
|
|
|
||
|
|
from helper.logger import log
|
||
|
|
|
||
|
|
from . import register
|
||
|
|
import models
|
||
|
|
import pipeline as ctx
|
||
|
|
from helper import hydrus as hydrus_wrapper
|
||
|
|
from ._shared import Cmdlet, CmdletArg, normalize_hash, fmt_bytes
|
||
|
|
|
||
|
|
CMDLET = Cmdlet(
|
||
|
|
name="get-relationship",
|
||
|
|
summary="Print Hydrus relationships for the selected file.",
|
||
|
|
usage="get-relationship [-hash <sha256>]",
|
||
|
|
args=[
|
||
|
|
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
|
||
|
|
],
|
||
|
|
details=[
|
||
|
|
"- Lists relationship data as returned by Hydrus.",
|
||
|
|
],
|
||
|
|
)
|
||
|
|
|
||
|
|
@register(["get-rel", "get-relationship", "get-relationships", "get-file-relationships"]) # aliases
|
||
|
|
def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||
|
|
# Help
|
||
|
|
try:
|
||
|
|
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in _args):
|
||
|
|
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||
|
|
return 0
|
||
|
|
except Exception:
|
||
|
|
pass
|
||
|
|
|
||
|
|
# Parse -hash override
|
||
|
|
override_hash: str | None = None
|
||
|
|
args_list = list(_args)
|
||
|
|
i = 0
|
||
|
|
while i < len(args_list):
|
||
|
|
a = args_list[i]
|
||
|
|
low = str(a).lower()
|
||
|
|
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args_list):
|
||
|
|
override_hash = str(args_list[i + 1]).strip()
|
||
|
|
break
|
||
|
|
i += 1
|
||
|
|
|
||
|
|
# Handle @N selection which creates a list - extract the first item
|
||
|
|
if isinstance(result, list) and len(result) > 0:
|
||
|
|
result = result[0]
|
||
|
|
|
||
|
|
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||
|
|
if not hash_hex:
|
||
|
|
log("Selected result does not include a Hydrus hash", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
try:
|
||
|
|
client = hydrus_wrapper.get_client(config)
|
||
|
|
except Exception as exc:
|
||
|
|
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
|
||
|
|
if client is None:
|
||
|
|
log("Hydrus client unavailable", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
try:
|
||
|
|
rel = client.get_file_relationships(hash_hex)
|
||
|
|
except Exception as exc:
|
||
|
|
log(f"Hydrus relationships fetch failed: {exc}", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
if not rel:
|
||
|
|
log("No relationships found.")
|
||
|
|
return 0
|
||
|
|
|
||
|
|
# Extract file_relationships from response
|
||
|
|
file_rels = rel.get("file_relationships", {})
|
||
|
|
if not file_rels:
|
||
|
|
log("No relationships found.")
|
||
|
|
return 0
|
||
|
|
|
||
|
|
# Get the relationships dict for this specific hash
|
||
|
|
this_file_rels = file_rels.get(hash_hex)
|
||
|
|
if not this_file_rels:
|
||
|
|
log("No relationships found.")
|
||
|
|
return 0
|
||
|
|
|
||
|
|
# Extract related hashes from all relationship types
|
||
|
|
# Keys "0", "1", "3", "8" are relationship type IDs
|
||
|
|
# Values are lists of hashes
|
||
|
|
related_hashes = []
|
||
|
|
for rel_type_id, hash_list in this_file_rels.items():
|
||
|
|
# Skip non-numeric keys and metadata keys
|
||
|
|
if rel_type_id in {"is_king", "king", "king_is_on_file_domain", "king_is_local"}:
|
||
|
|
continue
|
||
|
|
if isinstance(hash_list, list):
|
||
|
|
for rel_hash in hash_list:
|
||
|
|
if isinstance(rel_hash, str) and rel_hash and rel_hash != hash_hex:
|
||
|
|
related_hashes.append(rel_hash)
|
||
|
|
|
||
|
|
# Remove duplicates while preserving order
|
||
|
|
seen = set()
|
||
|
|
unique_hashes = []
|
||
|
|
for h in related_hashes:
|
||
|
|
if h not in seen:
|
||
|
|
seen.add(h)
|
||
|
|
unique_hashes.append(h)
|
||
|
|
|
||
|
|
if not unique_hashes:
|
||
|
|
log("No related files found.")
|
||
|
|
return 0
|
||
|
|
|
||
|
|
# Fetch metadata for all related files
|
||
|
|
try:
|
||
|
|
metadata_payload = client.fetch_file_metadata(
|
||
|
|
hashes=unique_hashes,
|
||
|
|
include_service_keys_to_tags=True,
|
||
|
|
include_duration=True,
|
||
|
|
include_size=True,
|
||
|
|
include_mime=True,
|
||
|
|
)
|
||
|
|
except Exception as exc:
|
||
|
|
log(f"Hydrus metadata fetch failed: {exc}", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
|
||
|
|
metadata_list = metadata_payload.get("metadata") if isinstance(metadata_payload, dict) else None
|
||
|
|
if not isinstance(metadata_list, list):
|
||
|
|
log("Hydrus metadata response was not a list", file=sys.stderr)
|
||
|
|
return 1
|
||
|
|
|
||
|
|
# Build metadata map by hash
|
||
|
|
meta_by_hash: Dict[str, Dict[str, Any]] = {}
|
||
|
|
for item in metadata_list:
|
||
|
|
if isinstance(item, dict):
|
||
|
|
item_hash = normalize_hash(item.get("hash"))
|
||
|
|
if item_hash:
|
||
|
|
meta_by_hash[item_hash] = item
|
||
|
|
|
||
|
|
# Helper functions for formatting
|
||
|
|
def _format_duration(seconds: Optional[float]) -> str:
|
||
|
|
if seconds is None:
|
||
|
|
return ""
|
||
|
|
try:
|
||
|
|
s = int(seconds)
|
||
|
|
hours = s // 3600
|
||
|
|
minutes = (s % 3600) // 60
|
||
|
|
secs = s % 60
|
||
|
|
if hours > 0:
|
||
|
|
return f"{hours}:{minutes:02d}:{secs:02d}"
|
||
|
|
else:
|
||
|
|
return f"{minutes}:{secs:02d}"
|
||
|
|
except Exception:
|
||
|
|
return ""
|
||
|
|
|
||
|
|
def _get_title(meta: Dict[str, Any]) -> str:
|
||
|
|
# Try to extract title from tags
|
||
|
|
tags_payload = meta.get("tags")
|
||
|
|
if isinstance(tags_payload, dict):
|
||
|
|
for service_data in tags_payload.values():
|
||
|
|
if isinstance(service_data, dict):
|
||
|
|
storage_tags = service_data.get("storage_tags")
|
||
|
|
if isinstance(storage_tags, dict):
|
||
|
|
for tag_list in storage_tags.values():
|
||
|
|
if isinstance(tag_list, list):
|
||
|
|
for tag in tag_list:
|
||
|
|
tag_str = str(tag).lower()
|
||
|
|
if tag_str.startswith("title:"):
|
||
|
|
return str(tag)[6:].strip()
|
||
|
|
# Fallback to hash prefix
|
||
|
|
h = meta.get("hash")
|
||
|
|
return str(h)[:12] if h else "unknown"
|
||
|
|
|
||
|
|
def _get_mime_type(meta: Dict[str, Any]) -> str:
|
||
|
|
mime = meta.get("mime", "")
|
||
|
|
if not mime:
|
||
|
|
return ""
|
||
|
|
# Extract type from mime (e.g., "video/mp4" -> "video")
|
||
|
|
parts = str(mime).split("/")
|
||
|
|
return parts[0] if parts else ""
|
||
|
|
|
||
|
|
# Print header and separator
|
||
|
|
log("# | Title | Type | Duration | Size")
|
||
|
|
log("--+---------------------------+-------+----------+--------")
|
||
|
|
|
||
|
|
# Create result objects for each related file
|
||
|
|
results: List[Any] = []
|
||
|
|
|
||
|
|
# Print each related file
|
||
|
|
for idx, rel_hash in enumerate(unique_hashes, start=1):
|
||
|
|
meta = meta_by_hash.get(rel_hash)
|
||
|
|
if not meta:
|
||
|
|
continue
|
||
|
|
|
||
|
|
title = _get_title(meta)
|
||
|
|
mime_type = _get_mime_type(meta)
|
||
|
|
|
||
|
|
# Get duration
|
||
|
|
duration_value = meta.get("duration")
|
||
|
|
if duration_value is None and isinstance(meta.get("metadata"), dict):
|
||
|
|
duration_value = meta["metadata"].get("duration")
|
||
|
|
duration_str = _format_duration(duration_value)
|
||
|
|
|
||
|
|
# Get size
|
||
|
|
size = meta.get("size") or meta.get("file_size")
|
||
|
|
size_str = fmt_bytes(size) if size else ""
|
||
|
|
|
||
|
|
# Format and print row
|
||
|
|
title_display = title[:25].ljust(25)
|
||
|
|
type_display = mime_type[:5].ljust(5)
|
||
|
|
duration_display = duration_str[:8].ljust(8)
|
||
|
|
size_display = size_str[:7].ljust(7)
|
||
|
|
|
||
|
|
log(f"{idx:2d} | {title_display} | {type_display} | {duration_display} | {size_display}")
|
||
|
|
|
||
|
|
# Create result object for pipeline
|
||
|
|
result_obj = type("RelatedFile", (), {
|
||
|
|
"hash_hex": rel_hash,
|
||
|
|
"title": title,
|
||
|
|
"media_kind": mime_type or "other",
|
||
|
|
"size": size,
|
||
|
|
"duration": duration_value,
|
||
|
|
"known_urls": [],
|
||
|
|
"annotations": [],
|
||
|
|
"columns": [
|
||
|
|
("Title", title),
|
||
|
|
("Type", mime_type),
|
||
|
|
("Duration", duration_str),
|
||
|
|
("Size", size_str),
|
||
|
|
],
|
||
|
|
})()
|
||
|
|
results.append(result_obj)
|
||
|
|
|
||
|
|
# Emit results to pipeline
|
||
|
|
try:
|
||
|
|
ctx._PIPE_EMITS.extend(results)
|
||
|
|
except Exception:
|
||
|
|
pass
|
||
|
|
|
||
|
|
return 0
|
||
|
|
|
||
|
|
|