jjlj
This commit is contained in:
189
result_table.py
189
result_table.py
@@ -251,8 +251,22 @@ class ResultTable:
|
||||
def _add_search_result(self, row: ResultRow, result: Any) -> None:
|
||||
"""Extract and add SearchResult fields to row."""
|
||||
# Core fields
|
||||
if hasattr(result, 'title') and result.title:
|
||||
row.add_column("Title", result.title)
|
||||
title = getattr(result, 'title', '')
|
||||
origin = getattr(result, 'origin', '').lower()
|
||||
|
||||
# Handle extension separation for local files
|
||||
extension = ""
|
||||
if title and origin == 'local':
|
||||
path_obj = Path(title)
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
title = path_obj.stem
|
||||
|
||||
if title:
|
||||
row.add_column("Title", title)
|
||||
|
||||
# Extension column
|
||||
row.add_column("Ext", extension)
|
||||
|
||||
if hasattr(result, 'origin') and result.origin:
|
||||
row.add_column("Source", result.origin)
|
||||
@@ -263,18 +277,6 @@ class ResultTable:
|
||||
if hasattr(result, 'media_kind') and result.media_kind:
|
||||
row.add_column("Type", result.media_kind)
|
||||
|
||||
# Target (file path or URL)
|
||||
if hasattr(result, 'target') and result.target:
|
||||
# Truncate long paths for display
|
||||
target_str = str(result.target)
|
||||
if len(target_str) > 60:
|
||||
target_str = "..." + target_str[-57:]
|
||||
row.add_column("Target", target_str)
|
||||
|
||||
# Hash
|
||||
if hasattr(result, 'hash_hex') and result.hash_hex:
|
||||
row.add_column("Hash", result.hash_hex[:16] + "...") # First 16 chars
|
||||
|
||||
# Tags summary
|
||||
if hasattr(result, 'tag_summary') and result.tag_summary:
|
||||
tags_str = str(result.tag_summary)
|
||||
@@ -305,6 +307,7 @@ class ResultTable:
|
||||
|
||||
Shows only essential columns:
|
||||
- Title (required)
|
||||
- Ext (extension)
|
||||
- Origin (source backend)
|
||||
- Size (formatted MB, integer only)
|
||||
|
||||
@@ -313,9 +316,23 @@ class ResultTable:
|
||||
"""
|
||||
# Title (required - use origin as fallback)
|
||||
title = getattr(item, 'title', None) or getattr(item, 'origin', 'Unknown')
|
||||
origin = getattr(item, 'origin', '').lower()
|
||||
|
||||
# Handle extension separation for local files
|
||||
extension = ""
|
||||
if title and origin == 'local':
|
||||
# Try to split extension
|
||||
path_obj = Path(title)
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
title = path_obj.stem
|
||||
|
||||
if title:
|
||||
row.add_column("Title", title[:90] + ("..." if len(title) > 90 else ""))
|
||||
|
||||
# Extension column - always add to maintain column order
|
||||
row.add_column("Ext", extension)
|
||||
|
||||
# Storage (source backend - hydrus, local, debrid, etc)
|
||||
if hasattr(item, 'origin') and item.origin:
|
||||
row.add_column("Storage", item.origin)
|
||||
@@ -364,9 +381,6 @@ class ResultTable:
|
||||
file_str = "..." + file_str[-57:]
|
||||
row.add_column("Path", file_str)
|
||||
|
||||
if hasattr(obj, 'file_hash') and obj.file_hash:
|
||||
row.add_column("Hash", obj.file_hash[:16] + "...")
|
||||
|
||||
# Tags
|
||||
if hasattr(obj, 'tags') and obj.tags:
|
||||
tags_str = ", ".join(obj.tags[:3]) # First 3 tags
|
||||
@@ -406,7 +420,10 @@ class ResultTable:
|
||||
# Helper to determine if a field should be hidden from display
|
||||
def is_hidden_field(field_name: Any) -> bool:
|
||||
# Hide internal/metadata fields
|
||||
hidden_fields = {'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra'}
|
||||
hidden_fields = {
|
||||
'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra',
|
||||
'target', 'hash', 'hash_hex', 'file_hash'
|
||||
}
|
||||
if isinstance(field_name, str):
|
||||
if field_name.startswith('__'):
|
||||
return True
|
||||
@@ -417,6 +434,30 @@ class ResultTable:
|
||||
# Strip out hidden metadata fields (prefixed with __)
|
||||
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
||||
|
||||
# Handle extension separation for local files
|
||||
origin = str(visible_data.get('origin', '') or visible_data.get('source', '')).lower()
|
||||
|
||||
# Debug logging
|
||||
# print(f"DEBUG: Processing dict result. Origin: {origin}, Keys: {list(visible_data.keys())}")
|
||||
|
||||
if origin == 'local':
|
||||
# Find title field
|
||||
title_field = next((f for f in ['title', 'name', 'filename'] if f in visible_data), None)
|
||||
if title_field:
|
||||
title_val = str(visible_data[title_field])
|
||||
path_obj = Path(title_val)
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
visible_data[title_field] = path_obj.stem
|
||||
visible_data['ext'] = extension
|
||||
# print(f"DEBUG: Split extension. Title: {visible_data[title_field]}, Ext: {extension}")
|
||||
else:
|
||||
visible_data['ext'] = ""
|
||||
|
||||
# Ensure 'ext' is present so it gets picked up by priority_groups in correct order
|
||||
if 'ext' not in visible_data:
|
||||
visible_data['ext'] = ""
|
||||
|
||||
# Track which fields we've already added to avoid duplicates
|
||||
added_fields = set()
|
||||
column_count = 0 # Track total columns added
|
||||
@@ -467,10 +508,9 @@ class ResultTable:
|
||||
# Priority field groups - uses first matching field in each group
|
||||
priority_groups = [
|
||||
('title | name | filename', ['title', 'name', 'filename']),
|
||||
('origin | source', ['origin', 'source']),
|
||||
('ext', ['ext']),
|
||||
('origin | source | store', ['origin', 'source', 'store']),
|
||||
('type | media_kind | kind', ['type', 'media_kind', 'kind']),
|
||||
('target | path | url', ['target', 'path', 'url']),
|
||||
('hash | hash_hex | file_hash', ['hash', 'hash_hex', 'file_hash']),
|
||||
('tags | tag_summary', ['tags', 'tag_summary']),
|
||||
('detail | description', ['detail', 'description']),
|
||||
]
|
||||
@@ -485,7 +525,12 @@ class ResultTable:
|
||||
if len(value_str) > 60:
|
||||
value_str = value_str[:57] + "..."
|
||||
|
||||
row.add_column(field.replace('_', ' ').title(), value_str)
|
||||
# Special case for Origin/Source -> Store to match user preference
|
||||
col_name = field.replace('_', ' ').title()
|
||||
if field in ['origin', 'source']:
|
||||
col_name = "Store"
|
||||
|
||||
row.add_column(col_name, value_str)
|
||||
added_fields.add(field)
|
||||
column_count += 1
|
||||
break # Use first match in this group, skip rest
|
||||
@@ -509,106 +554,6 @@ class ResultTable:
|
||||
# Don't display it
|
||||
added_fields.add('_selection_args')
|
||||
|
||||
# Helper to determine if a field should be hidden from display
|
||||
def is_hidden_field(field_name: Any) -> bool:
|
||||
# Hide internal/metadata fields
|
||||
hidden_fields = {'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra'}
|
||||
if isinstance(field_name, str):
|
||||
if field_name.startswith('__'):
|
||||
return True
|
||||
if field_name in hidden_fields:
|
||||
return True
|
||||
return False
|
||||
|
||||
# Strip out hidden metadata fields (prefixed with __)
|
||||
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
||||
|
||||
# Track which fields we've already added to avoid duplicates
|
||||
added_fields = set()
|
||||
column_count = 0 # Track total columns added
|
||||
|
||||
# Helper function to format values
|
||||
def format_value(value: Any) -> str:
|
||||
if isinstance(value, list):
|
||||
formatted = ", ".join(str(v) for v in value[:3])
|
||||
if len(value) > 3:
|
||||
formatted += f", +{len(value) - 3} more"
|
||||
return formatted
|
||||
return str(value)
|
||||
|
||||
# Special handling for 'columns' field from search providers
|
||||
# If present, use it to populate row columns dynamically
|
||||
if 'columns' in visible_data and isinstance(visible_data['columns'], list) and visible_data['columns']:
|
||||
try:
|
||||
for col_name, col_value in visible_data['columns']:
|
||||
# Skip the "#" column as ResultTable already adds row numbers
|
||||
if col_name == '#':
|
||||
continue
|
||||
if column_count >= self.max_columns:
|
||||
break
|
||||
col_value_str = format_value(col_value)
|
||||
if len(col_value_str) > 60:
|
||||
col_value_str = col_value_str[:57] + "..."
|
||||
row.add_column(col_name, col_value_str)
|
||||
added_fields.add(col_name.lower())
|
||||
column_count += 1
|
||||
# Mark 'columns' as handled so we don't add it as a field
|
||||
added_fields.add('columns')
|
||||
# Also mark common fields that shouldn't be re-displayed if they're in columns
|
||||
# This prevents showing both "Store" (from columns) and "Origin" (from data fields)
|
||||
added_fields.add('origin')
|
||||
added_fields.add('source')
|
||||
added_fields.add('target')
|
||||
added_fields.add('path')
|
||||
added_fields.add('media_kind')
|
||||
added_fields.add('detail')
|
||||
added_fields.add('annotations')
|
||||
added_fields.add('full_metadata') # Don't display full metadata as column
|
||||
except Exception:
|
||||
# Fall back to regular field handling if columns format is unexpected
|
||||
pass
|
||||
|
||||
# Only add priority groups if we haven't already filled columns from 'columns' field
|
||||
if column_count == 0:
|
||||
# Priority field groups - uses first matching field in each group
|
||||
priority_groups = [
|
||||
('title | name | filename', ['title', 'name', 'filename']),
|
||||
('origin | source', ['origin', 'source']),
|
||||
('type | media_kind | kind', ['type', 'media_kind', 'kind']),
|
||||
('target | path | url', ['target', 'path', 'url']),
|
||||
('hash | hash_hex | file_hash', ['hash', 'hash_hex', 'file_hash']),
|
||||
('tags | tag_summary', ['tags', 'tag_summary']),
|
||||
('detail | description', ['detail', 'description']),
|
||||
]
|
||||
|
||||
# Add priority field groups first - use first match in each group
|
||||
for _group_label, field_options in priority_groups:
|
||||
if column_count >= self.max_columns:
|
||||
break
|
||||
for field in field_options:
|
||||
if field in visible_data and field not in added_fields:
|
||||
value_str = format_value(visible_data[field])
|
||||
if len(value_str) > 60:
|
||||
value_str = value_str[:57] + "..."
|
||||
|
||||
row.add_column(field.replace('_', ' ').title(), value_str)
|
||||
added_fields.add(field)
|
||||
column_count += 1
|
||||
break # Use first match in this group, skip rest
|
||||
|
||||
# Add remaining fields only if we haven't hit max_columns (and no explicit columns were set)
|
||||
if column_count < self.max_columns:
|
||||
for key, value in visible_data.items():
|
||||
if column_count >= self.max_columns:
|
||||
break
|
||||
if key not in added_fields: # Only add if not already added
|
||||
value_str = format_value(value)
|
||||
if len(value_str) > 40:
|
||||
value_str = value_str[:37] + "..."
|
||||
row.add_column(key.replace('_', ' ').title(), value_str)
|
||||
added_fields.add(key) # Track in added_fields to prevent re-adding
|
||||
column_count += 1
|
||||
|
||||
def _add_generic_object(self, row: ResultRow, obj: Any) -> None:
|
||||
"""Extract and add fields from generic objects."""
|
||||
if hasattr(obj, '__dict__'):
|
||||
|
||||
Reference in New Issue
Block a user