dfdsf
This commit is contained in:
@@ -194,6 +194,13 @@ class ResultTable:
|
||||
"""If True, skip automatic sorting so display order matches input order."""
|
||||
self.no_choice: bool = False
|
||||
"""When True, suppress row numbers/selection to make the table non-interactive."""
|
||||
self.table: Optional[str] = None
|
||||
"""Table type (e.g., 'youtube', 'soulseek') for context-aware selection logic."""
|
||||
|
||||
def set_table(self, table: str) -> "ResultTable":
|
||||
"""Set the table type for context-aware selection logic."""
|
||||
self.table = table
|
||||
return self
|
||||
|
||||
def set_no_choice(self, no_choice: bool = True) -> "ResultTable":
|
||||
"""Mark the table as non-interactive (no row numbers, no selection parsing)."""
|
||||
@@ -393,11 +400,11 @@ class ResultTable:
|
||||
|
||||
# Core fields (legacy fallback)
|
||||
title = getattr(result, 'title', '')
|
||||
origin = getattr(result, 'origin', '').lower()
|
||||
table = str(getattr(result, 'table', '') or '').lower()
|
||||
|
||||
# Handle extension separation for local files
|
||||
extension = ""
|
||||
if title and origin == 'local':
|
||||
if title and table == 'local':
|
||||
path_obj = Path(title)
|
||||
if path_obj.suffix:
|
||||
extension = path_obj.suffix.lstrip('.')
|
||||
@@ -409,8 +416,8 @@ class ResultTable:
|
||||
# Extension column
|
||||
row.add_column("Ext", extension)
|
||||
|
||||
if hasattr(result, 'origin') and result.origin:
|
||||
row.add_column("Source", result.origin)
|
||||
if hasattr(result, 'table') and getattr(result, 'table', None):
|
||||
row.add_column("Source", str(getattr(result, 'table')))
|
||||
|
||||
if hasattr(result, 'detail') and result.detail:
|
||||
row.add_column("Detail", result.detail)
|
||||
@@ -448,19 +455,19 @@ class ResultTable:
|
||||
Shows only essential columns:
|
||||
- Title (required)
|
||||
- Ext (extension)
|
||||
- Origin (source backend)
|
||||
- Storage (source backend)
|
||||
- Size (formatted MB, integer only)
|
||||
|
||||
All other fields are stored in item but not displayed to keep table compact.
|
||||
Use @row# syntax to pipe full item data to next command.
|
||||
"""
|
||||
# Title (required - use origin as fallback)
|
||||
title = getattr(item, 'title', None) or getattr(item, 'origin', 'Unknown')
|
||||
origin = getattr(item, 'origin', '').lower()
|
||||
# Title (required)
|
||||
title = getattr(item, 'title', None) or 'Unknown'
|
||||
table = str(getattr(item, 'table', '') or getattr(item, 'store', '') or '').lower()
|
||||
|
||||
# Handle extension separation for local files
|
||||
extension = ""
|
||||
if title and origin == 'local':
|
||||
if title and table == 'local':
|
||||
# Try to split extension
|
||||
path_obj = Path(title)
|
||||
if path_obj.suffix:
|
||||
@@ -474,8 +481,10 @@ class ResultTable:
|
||||
row.add_column("Ext", extension)
|
||||
|
||||
# Storage (source backend - hydrus, local, debrid, etc)
|
||||
if hasattr(item, 'origin') and item.origin:
|
||||
row.add_column("Storage", item.origin)
|
||||
if getattr(item, 'table', None):
|
||||
row.add_column("Storage", str(getattr(item, 'table')))
|
||||
elif getattr(item, 'store', None):
|
||||
row.add_column("Storage", str(getattr(item, 'store')))
|
||||
|
||||
# Size (for files) - integer MB only
|
||||
if hasattr(item, 'size_bytes') and item.size_bytes:
|
||||
@@ -499,8 +508,6 @@ class ResultTable:
|
||||
# Source/Store (where the tags come from)
|
||||
if hasattr(item, 'source') and item.source:
|
||||
row.add_column("Store", item.source)
|
||||
elif hasattr(item, 'origin') and item.origin:
|
||||
row.add_column("Store", item.origin)
|
||||
|
||||
|
||||
def _add_pipe_object(self, row: ResultRow, obj: Any) -> None:
|
||||
@@ -549,7 +556,7 @@ class ResultTable:
|
||||
|
||||
Priority field groups (uses first match within each group):
|
||||
- title | name | filename
|
||||
- origin | source
|
||||
- store | table | source
|
||||
- type | media_kind | kind
|
||||
- target | path | url
|
||||
- hash | hash_hex | file_hash
|
||||
@@ -574,12 +581,12 @@ class ResultTable:
|
||||
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
||||
|
||||
# Handle extension separation for local files
|
||||
origin = str(visible_data.get('origin', '') or visible_data.get('source', '')).lower()
|
||||
store_val = str(visible_data.get('store', '') or visible_data.get('table', '') or visible_data.get('source', '')).lower()
|
||||
|
||||
# Debug logging
|
||||
# print(f"DEBUG: Processing dict result. Origin: {origin}, Keys: {list(visible_data.keys())}")
|
||||
# print(f"DEBUG: Processing dict result. Store: {store_val}, Keys: {list(visible_data.keys())}")
|
||||
|
||||
if origin == 'local':
|
||||
if store_val == 'local':
|
||||
# Find title field
|
||||
title_field = next((f for f in ['title', 'name', 'filename'] if f in visible_data), None)
|
||||
if title_field:
|
||||
@@ -629,8 +636,8 @@ class ResultTable:
|
||||
# Mark 'columns' as handled so we don't add it as a field
|
||||
added_fields.add('columns')
|
||||
# Also mark common fields that shouldn't be re-displayed if they're in columns
|
||||
# This prevents showing both "Store" (from columns) and "Origin" (from data fields)
|
||||
added_fields.add('origin')
|
||||
# This prevents showing both "Store" (from columns) and "Store" (from data fields)
|
||||
added_fields.add('table')
|
||||
added_fields.add('source')
|
||||
added_fields.add('target')
|
||||
added_fields.add('path')
|
||||
@@ -649,7 +656,7 @@ class ResultTable:
|
||||
('title', ['title']),
|
||||
('ext', ['ext']),
|
||||
('size', ['size', 'size_bytes']),
|
||||
('store', ['store', 'origin', 'source']),
|
||||
('store', ['store', 'table', 'source']),
|
||||
]
|
||||
|
||||
# Add priority field groups first - use first match in each group
|
||||
@@ -668,7 +675,7 @@ class ResultTable:
|
||||
value_str = value_str[:57] + "..."
|
||||
|
||||
# Map field names to display column names
|
||||
if field in ['store', 'origin', 'source']:
|
||||
if field in ['store', 'table', 'source']:
|
||||
col_name = "Store"
|
||||
elif field in ['size', 'size_bytes']:
|
||||
col_name = "Size (Mb)"
|
||||
|
||||
Reference in New Issue
Block a user