This commit is contained in:
2026-01-20 00:31:44 -08:00
parent fcab85455d
commit 1f65f9de2a
5 changed files with 104 additions and 21 deletions

12
CLI.py
View File

@@ -1233,6 +1233,10 @@ class CmdletExecutor:
)
except Exception:
pass
try:
progress_ui.complete_all_pipes()
except Exception:
pass
try:
progress_ui.stop()
except Exception:
@@ -1280,6 +1284,10 @@ class CmdletExecutor:
)
except Exception:
pass
try:
progress_ui.complete_all_pipes()
except Exception:
pass
try:
progress_ui.stop()
except Exception:
@@ -1312,6 +1320,10 @@ class CmdletExecutor:
)
except Exception:
pass
try:
progress_ui.complete_all_pipes()
except Exception:
pass
try:
progress_ui.stop()
except Exception:

View File

@@ -1376,16 +1376,18 @@ class PipelineLiveProgress:
total = self._pipe_totals[pipe_index]
active = self._subtask_active_index[pipe_index]
# If a stage emits more than expected, extend totals dynamically.
if done >= total:
total = done + 1
self._pipe_totals[pipe_index] = total
pipe_task = self._pipe_tasks[pipe_index]
pipe_progress.update(pipe_task, total=total)
# If a stage emits more than expected, or if we have no subtasks yet, extend dynamically.
if (done >= total) or (not self._subtask_ids[pipe_index]):
if done >= total:
total = done + 1
self._pipe_totals[pipe_index] = total
pipe_task = self._pipe_tasks[pipe_index]
pipe_progress.update(pipe_task, total=total)
# Add a placeholder subtask.
# Add a placeholder/next subtask.
label = _pipeline_progress_item_label(emitted)
sub_id = subtasks.add_task(
f"{self._pipe_labels[pipe_index]}: {_pipeline_progress_item_label(emitted)}"
f"{self._pipe_labels[pipe_index]}: {label}"
)
subtasks.stop_task(sub_id)
subtasks.update(sub_id, visible=False)
@@ -1457,6 +1459,9 @@ class PipelineLiveProgress:
assert overall is not None
total = self._pipe_totals[pipe_index]
if total < 1:
total = 1
self._pipe_totals[pipe_index] = total
done = self._pipe_done[pipe_index]
# Ensure the pipe bar finishes even if cmdlet didnt emit per item.
@@ -1499,6 +1504,16 @@ class PipelineLiveProgress:
self._update_overall()
def complete_all_pipes(self) -> None:
"""Mark every configured pipe as finished so UI bars reach 100%."""
if not self._enabled:
return
for idx in range(len(self._pipe_labels)):
try:
self.finish_pipe(idx)
except Exception:
pass
class PipelineStageContext:
"""Context information for the current pipeline stage."""

View File

@@ -2831,6 +2831,8 @@ class PipelineExecutor:
total_stages=len(stages),
pipe_index=pipe_idx,
worker_id=session.worker_id if session else None,
on_emit=(lambda x: progress_ui.on_emit(pipe_idx, x))
if progress_ui is not None and pipe_idx is not None else None,
)
# Set context for the current run
@@ -2839,6 +2841,12 @@ class PipelineExecutor:
ctx.set_current_stage_text(" ".join(stage_tokens))
ctx.clear_emits()
if progress_ui is not None and pipe_idx is not None:
# Start the pipe task in the UI. For most cmdlets we assume 1 item
# initially; cmdlets that process multiple items (like search)
# should call begin_pipe themselves with the actual count.
progress_ui.begin_pipe(pipe_idx, total_items=1)
# RUN THE CMDLET
cmd_fn(piped_result, stage_args, config)
@@ -2848,10 +2856,9 @@ class PipelineExecutor:
piped_result = stage_emits if len(stage_emits) > 1 else stage_emits[0]
else:
piped_result = None
if progress_ui is not None and pipe_idx is not None:
progress_ui.complete_stage(pipe_idx)
finally:
if progress_ui is not None and pipe_idx is not None:
progress_ui.finish_pipe(pipe_idx)
if session:
try:
session.close()
@@ -2870,6 +2877,10 @@ class PipelineExecutor:
finally:
# Stop Live progress and clear pipeline-level live progress
if progress_ui is not None:
try:
progress_ui.complete_all_pipes()
except Exception:
pass
try:
progress_ui.stop()
except Exception:

View File

@@ -87,6 +87,7 @@ class Get_Metadata(Cmdlet):
url: list[str],
hash_value: Optional[str],
pages: Optional[int] = None,
tag: Optional[List[str]] = None,
) -> Dict[str,
Any]:
"""Build a table row dict with metadata fields."""
@@ -142,6 +143,7 @@ class Get_Metadata(Cmdlet):
"imported": imported_label,
"hash": hash_value,
"url": url,
"tag": tag or [],
"columns": columns,
}
@@ -220,16 +222,30 @@ class Get_Metadata(Cmdlet):
# Extract title from tags if available
title = get_field(result, "title") or file_hash[:16]
if not get_field(result, "title"):
# Try to get title from tags
try:
tags, _ = backend.get_tag(file_hash)
for tag in tags:
if tag.lower().startswith("title:"):
title = tag.split(":", 1)[1]
break
except Exception:
pass
# Get tags from input result
item_tags = get_field(result, "tag") or get_field(result, "tags") or []
if not isinstance(item_tags, list):
item_tags = [str(item_tags)]
else:
item_tags = [str(t) for t in item_tags]
# Try to enrich tags and title from backend
try:
backend_tags, _ = backend.get_tag(file_hash)
if backend_tags:
for t in backend_tags:
ts = str(t)
if ts not in item_tags:
item_tags.append(ts)
# Also check for title tag if we don't have a title yet
if not get_field(result, "title") and ts.lower().startswith("title:"):
parts = ts.split(":", 1)
if len(parts) > 1:
title = parts[1].strip()
except Exception:
pass
# Extract metadata fields
mime_type = metadata.get("mime") or metadata.get("ext", "")
@@ -292,6 +308,7 @@ class Get_Metadata(Cmdlet):
url=url,
hash_value=file_hash,
pages=pages,
tag=item_tags,
)
table_title = f"get-metadata: {title}" if title else "get-metadata"

View File

@@ -648,6 +648,34 @@ class Get_Url(Cmdlet):
# Prepare metadata for the detail view
metadata = extract_item_metadata(result)
# Enrich the metadata with tags if missing
if not metadata.get("Tags"):
try:
from cmdlet._shared import get_field
item_tags = get_field(result, "tag") or get_field(result, "tags") or []
row_tags = []
if isinstance(item_tags, list):
row_tags.extend([str(t) for t in item_tags])
elif isinstance(item_tags, str):
row_tags.append(item_tags)
# Also collect from backend
if file_hash and store_name:
try:
# Re-use existing backend variable
if backend and hasattr(backend, "get_tag"):
b_tags, _ = backend.get_tag(file_hash)
if b_tags:
row_tags.extend([str(t) for t in b_tags])
except Exception:
pass
if row_tags:
row_tags = sorted(list(set(row_tags)))
metadata["Tags"] = ", ".join(row_tags)
except Exception:
pass
if file_hash:
metadata["Hash"] = file_hash
if store_name: