df
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
This commit is contained in:
@@ -29,7 +29,7 @@ from Store import Store
|
||||
CMDLET = Cmdlet(
|
||||
name="get-relationship",
|
||||
summary="Print relationships for the selected file (Hydrus or Local).",
|
||||
usage="get-relationship [-query \"hash:<sha256>\"]",
|
||||
usage='get-relationship [-query "hash:<sha256>"]',
|
||||
alias=[],
|
||||
arg=[
|
||||
SharedArgs.QUERY,
|
||||
@@ -40,6 +40,7 @@ CMDLET = Cmdlet(
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
if should_show_help(_args):
|
||||
@@ -64,44 +65,63 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
i += 1
|
||||
|
||||
override_hash: str | None = sh.parse_single_hash_query(override_query) if override_query else None
|
||||
override_hash: str | None = (
|
||||
sh.parse_single_hash_query(override_query) if override_query else None
|
||||
)
|
||||
if override_query and not override_hash:
|
||||
log("get-relationship requires -query \"hash:<sha256>\"", file=sys.stderr)
|
||||
log('get-relationship requires -query "hash:<sha256>"', file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
# Handle @N selection which creates a list
|
||||
# This cmdlet is single-subject; require disambiguation when multiple items are provided.
|
||||
if isinstance(result, list):
|
||||
if len(result) == 0:
|
||||
result = None
|
||||
elif len(result) > 1 and not override_hash:
|
||||
log("get-relationship expects a single item; select one row (e.g. @1) or pass -query \"hash:<sha256>\"", file=sys.stderr)
|
||||
log(
|
||||
'get-relationship expects a single item; select one row (e.g. @1) or pass -query "hash:<sha256>"',
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
result = result[0]
|
||||
|
||||
|
||||
# Initialize results collection
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, store}
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, store}
|
||||
source_title = "Unknown"
|
||||
|
||||
def _add_relationship(entry: Dict[str, Any]) -> None:
|
||||
"""Add relationship if not already present by hash or path."""
|
||||
for existing in found_relationships:
|
||||
if entry.get("hash") and str(existing.get("hash", "")).lower() == str(entry["hash"]).lower():
|
||||
if (
|
||||
entry.get("hash")
|
||||
and str(existing.get("hash", "")).lower() == str(entry["hash"]).lower()
|
||||
):
|
||||
return
|
||||
if entry.get("path") and str(existing.get("path", "")).lower() == str(entry["path"]).lower():
|
||||
if (
|
||||
entry.get("path")
|
||||
and str(existing.get("path", "")).lower() == str(entry["path"]).lower()
|
||||
):
|
||||
return
|
||||
found_relationships.append(entry)
|
||||
|
||||
|
||||
# Store/hash-first subject resolution
|
||||
store_name: Optional[str] = override_store
|
||||
if not store_name:
|
||||
store_name = get_field(result, "store")
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_hash_for_operation(None, result))
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_hash_for_operation(None, result))
|
||||
)
|
||||
|
||||
if not source_title or source_title == "Unknown":
|
||||
source_title = get_field(result, "title") or get_field(result, "name") or (hash_hex[:16] + "..." if hash_hex else "Unknown")
|
||||
source_title = (
|
||||
get_field(result, "title")
|
||||
or get_field(result, "name")
|
||||
or (hash_hex[:16] + "..." if hash_hex else "Unknown")
|
||||
)
|
||||
|
||||
local_db_checked = False
|
||||
|
||||
@@ -113,7 +133,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Folder store relationships
|
||||
# IMPORTANT: only treat the Folder backend as a local DB store.
|
||||
# Other backends may expose a location() method but are not SQLite folder stores.
|
||||
if type(backend).__name__ == "Folder" and hasattr(backend, "location") and callable(getattr(backend, "location")):
|
||||
if (
|
||||
type(backend).__name__ == "Folder"
|
||||
and hasattr(backend, "location")
|
||||
and callable(getattr(backend, "location"))
|
||||
):
|
||||
storage_path = Path(str(backend.location()))
|
||||
with API_folder_store(storage_path) as db:
|
||||
local_db_checked = True
|
||||
@@ -142,7 +166,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if not related_hash or related_hash == hash_hex:
|
||||
continue
|
||||
|
||||
entry_type = "king" if str(rel_type).lower() == "alt" else str(rel_type)
|
||||
entry_type = (
|
||||
"king" if str(rel_type).lower() == "alt" else str(rel_type)
|
||||
)
|
||||
if entry_type == "king":
|
||||
king_hashes.append(related_hash)
|
||||
|
||||
@@ -156,13 +182,15 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_add_relationship({
|
||||
"hash": related_hash,
|
||||
"type": entry_type,
|
||||
"title": related_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
})
|
||||
_add_relationship(
|
||||
{
|
||||
"hash": related_hash,
|
||||
"type": entry_type,
|
||||
"title": related_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
}
|
||||
)
|
||||
|
||||
# Reverse relationships (alts pointing to this hash)
|
||||
try:
|
||||
@@ -187,13 +215,15 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pass
|
||||
|
||||
entry_type = "alt" if rel_type == "alt" else (rel_type or "related")
|
||||
_add_relationship({
|
||||
"hash": child_hash,
|
||||
"type": entry_type,
|
||||
"title": child_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
})
|
||||
_add_relationship(
|
||||
{
|
||||
"hash": child_hash,
|
||||
"type": entry_type,
|
||||
"title": child_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
}
|
||||
)
|
||||
|
||||
# Siblings (alts that share the same king)
|
||||
for king_hash in king_hashes:
|
||||
@@ -218,24 +248,30 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pass
|
||||
|
||||
entry_type = "alt" if sib_type == "alt" else (sib_type or "related")
|
||||
_add_relationship({
|
||||
"hash": sib_hash,
|
||||
"type": entry_type,
|
||||
"title": sib_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
})
|
||||
_add_relationship(
|
||||
{
|
||||
"hash": sib_hash,
|
||||
"type": entry_type,
|
||||
"title": sib_title,
|
||||
"path": None,
|
||||
"store": str(store_name),
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log(f"Error checking store relationships: {e}", file=sys.stderr)
|
||||
|
||||
# If we found local relationships, we can stop or merge with Hydrus?
|
||||
# For now, if we found local ones, let's show them.
|
||||
# For now, if we found local ones, let's show them.
|
||||
# But if the file is also in Hydrus, we might want those too.
|
||||
# Let's try Hydrus if we have a hash.
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_hash_for_operation(None, result))
|
||||
|
||||
hash_hex = (
|
||||
normalize_hash(override_hash)
|
||||
if override_hash
|
||||
else normalize_hash(get_hash_for_operation(None, result))
|
||||
)
|
||||
|
||||
if hash_hex and not local_db_checked:
|
||||
try:
|
||||
client = None
|
||||
@@ -274,7 +310,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if backend_obj is not None and hasattr(backend_obj, "get_tag"):
|
||||
try:
|
||||
tag_result = backend_obj.get_tag(h)
|
||||
tags = tag_result[0] if isinstance(tag_result, tuple) and tag_result else tag_result
|
||||
tags = (
|
||||
tag_result[0]
|
||||
if isinstance(tag_result, tuple) and tag_result
|
||||
else tag_result
|
||||
)
|
||||
if isinstance(tags, list):
|
||||
for t in tags:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
@@ -308,10 +348,14 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if isinstance(storage, dict):
|
||||
for group in storage.values():
|
||||
if isinstance(group, list):
|
||||
tag_candidates.extend([str(x) for x in group if isinstance(x, str)])
|
||||
tag_candidates.extend(
|
||||
[str(x) for x in group if isinstance(x, str)]
|
||||
)
|
||||
display = svc_data.get("display_tags")
|
||||
if isinstance(display, list):
|
||||
tag_candidates.extend([str(x) for x in display if isinstance(x, str)])
|
||||
tag_candidates.extend(
|
||||
[str(x) for x in display if isinstance(x, str)]
|
||||
)
|
||||
flat = meta.get("tags_flat")
|
||||
if isinstance(flat, list):
|
||||
tag_candidates.extend([str(x) for x in flat if isinstance(x, str)])
|
||||
@@ -331,7 +375,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if rel:
|
||||
file_rels = rel.get("file_relationships", {})
|
||||
this_file_rels = file_rels.get(hash_hex)
|
||||
|
||||
|
||||
if this_file_rels:
|
||||
# Map Hydrus relationship IDs to names.
|
||||
# For /manage_file_relationships/get_file_relationships, the Hydrus docs define:
|
||||
@@ -353,16 +397,25 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Some Hydrus responses provide a direct king hash under the 'king' key.
|
||||
if key == "king":
|
||||
king_hash = normalize_hash(rel_value) if isinstance(rel_value, str) else None
|
||||
king_hash = (
|
||||
normalize_hash(rel_value)
|
||||
if isinstance(rel_value, str)
|
||||
else None
|
||||
)
|
||||
if king_hash and king_hash != hash_hex:
|
||||
if not any(str(r.get('hash', '')).lower() == king_hash for r in found_relationships):
|
||||
found_relationships.append({
|
||||
"hash": king_hash,
|
||||
"type": "king",
|
||||
"title": _resolve_related_title(king_hash),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
})
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == king_hash
|
||||
for r in found_relationships
|
||||
):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": king_hash,
|
||||
"type": "king",
|
||||
"title": _resolve_related_title(king_hash),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
rel_name = rel_map.get(key, f"type-{key}")
|
||||
@@ -370,29 +423,43 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# The relationship value is typically a list of hashes.
|
||||
if isinstance(rel_value, list):
|
||||
for rel_hash in rel_value:
|
||||
rel_hash_norm = normalize_hash(rel_hash) if isinstance(rel_hash, str) else None
|
||||
rel_hash_norm = (
|
||||
normalize_hash(rel_hash)
|
||||
if isinstance(rel_hash, str)
|
||||
else None
|
||||
)
|
||||
if not rel_hash_norm or rel_hash_norm == hash_hex:
|
||||
continue
|
||||
if not any(str(r.get('hash', '')).lower() == rel_hash_norm for r in found_relationships):
|
||||
found_relationships.append({
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
})
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == rel_hash_norm
|
||||
for r in found_relationships
|
||||
):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
}
|
||||
)
|
||||
# Defensive: sometimes the API may return a single hash string.
|
||||
elif isinstance(rel_value, str):
|
||||
rel_hash_norm = normalize_hash(rel_value)
|
||||
if rel_hash_norm and rel_hash_norm != hash_hex:
|
||||
if not any(str(r.get('hash', '')).lower() == rel_hash_norm for r in found_relationships):
|
||||
found_relationships.append({
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
})
|
||||
if not any(
|
||||
str(r.get("hash", "")).lower() == rel_hash_norm
|
||||
for r in found_relationships
|
||||
):
|
||||
found_relationships.append(
|
||||
{
|
||||
"hash": rel_hash_norm,
|
||||
"type": rel_name,
|
||||
"title": _resolve_related_title(rel_hash_norm),
|
||||
"path": None,
|
||||
"store": store_label,
|
||||
}
|
||||
)
|
||||
except Exception as exc:
|
||||
# Only log error if we didn't find local relationships either
|
||||
if not found_relationships:
|
||||
@@ -402,66 +469,67 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
from rich.panel import Panel
|
||||
from rich_display import stdout_console
|
||||
|
||||
title = source_title or (hash_hex[:16] + "..." if hash_hex else "Item")
|
||||
stdout_console().print(Panel(f"{title} has no relationships", title="Relationships"))
|
||||
except Exception:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
|
||||
# Display results
|
||||
table = ResultTable(f"Relationships: {source_title}").init_command("get-relationship", [])
|
||||
|
||||
|
||||
# Sort by type then title
|
||||
# Custom sort order: King first, then Derivative, then others
|
||||
def type_sort_key(item):
|
||||
t = item['type'].lower()
|
||||
if t == 'king':
|
||||
t = item["type"].lower()
|
||||
if t == "king":
|
||||
return 0
|
||||
elif t == 'derivative':
|
||||
elif t == "derivative":
|
||||
return 1
|
||||
elif t in {'alternative', 'alternate', 'alt'}:
|
||||
elif t in {"alternative", "alternate", "alt"}:
|
||||
return 2
|
||||
elif t == 'duplicate':
|
||||
elif t == "duplicate":
|
||||
return 3
|
||||
else:
|
||||
return 4
|
||||
|
||||
found_relationships.sort(key=lambda x: (type_sort_key(x), x['title']))
|
||||
|
||||
|
||||
found_relationships.sort(key=lambda x: (type_sort_key(x), x["title"]))
|
||||
|
||||
pipeline_results = []
|
||||
|
||||
|
||||
for i, item in enumerate(found_relationships):
|
||||
row = table.add_row()
|
||||
row.add_column("Type", item['type'].title())
|
||||
row.add_column("Title", item['title'])
|
||||
row.add_column("Type", item["type"].title())
|
||||
row.add_column("Title", item["title"])
|
||||
# row.add_column("Hash", item['hash'][:16] + "...") # User requested removal
|
||||
row.add_column("Store", item['store'])
|
||||
|
||||
row.add_column("Store", item["store"])
|
||||
|
||||
# Create result object for pipeline
|
||||
res_obj = {
|
||||
"title": item['title'],
|
||||
"hash": item['hash'],
|
||||
"file_hash": item['hash'],
|
||||
"relationship_type": item['type'],
|
||||
"store": item['store']
|
||||
"title": item["title"],
|
||||
"hash": item["hash"],
|
||||
"file_hash": item["hash"],
|
||||
"relationship_type": item["type"],
|
||||
"store": item["store"],
|
||||
}
|
||||
# Target is always hash in store/hash-first mode
|
||||
res_obj["target"] = item['hash']
|
||||
|
||||
res_obj["target"] = item["hash"]
|
||||
|
||||
pipeline_results.append(res_obj)
|
||||
|
||||
|
||||
# Set selection args
|
||||
table.set_row_selection_args(i, ["-store", str(item['store']), "-query", f"hash:{item['hash']}"])
|
||||
table.set_row_selection_args(
|
||||
i, ["-store", str(item["store"]), "-query", f"hash:{item['hash']}"]
|
||||
)
|
||||
|
||||
ctx.set_last_result_table(table, pipeline_results)
|
||||
from rich_display import stdout_console
|
||||
|
||||
stdout_console().print(table)
|
||||
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET.exec = _run
|
||||
CMDLET.register()
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user