This commit is contained in:
2026-01-31 19:00:04 -08:00
parent dcf16e0cc4
commit 6513a3ad04
25 changed files with 617 additions and 397 deletions

View File

@@ -95,9 +95,9 @@ def _resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
if res: if res:
return res return res
except Exception: except Exception:
pass logger.exception("Failed while probing certifi helper inner block")
except Exception: except Exception:
pass logger.exception("Failed while invoking cert helper function")
return None return None
# Prefer helpful modules if available (use safe checks to avoid first-chance import errors) # Prefer helpful modules if available (use safe checks to avoid first-chance import errors)
@@ -107,7 +107,7 @@ def _resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
try: try:
os.environ["SSL_CERT_FILE"] = path os.environ["SSL_CERT_FILE"] = path
except Exception: except Exception:
pass logger.exception("Failed to set SSL_CERT_FILE environment variable")
logger.info(f"SSL_CERT_FILE not set; using bundle from {mod_name}: {path}") logger.info(f"SSL_CERT_FILE not set; using bundle from {mod_name}: {path}")
return path return path
@@ -120,11 +120,11 @@ def _resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
try: try:
os.environ["SSL_CERT_FILE"] = path os.environ["SSL_CERT_FILE"] = path
except Exception: except Exception:
pass logger.exception("Failed to set SSL_CERT_FILE environment variable during certifi fallback")
logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}") logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}")
return path return path
except Exception: except Exception:
pass logger.exception("Failed to probe certifi for trust bundle")
# Fallback to certifi # Fallback to certifi
try: try:
@@ -135,11 +135,11 @@ def _resolve_verify_value(verify_ssl: bool) -> Union[bool, str]:
try: try:
os.environ["SSL_CERT_FILE"] = path os.environ["SSL_CERT_FILE"] = path
except Exception: except Exception:
pass logger.exception("Failed to set SSL_CERT_FILE environment variable during certifi fallback")
logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}") logger.info(f"SSL_CERT_FILE not set; using certifi bundle: {path}")
return path return path
except Exception: except Exception:
pass logger.exception("Failed to probe certifi for trust bundle")
return True return True
@@ -400,7 +400,7 @@ class HTTPClient:
try: try:
progress_callback(0, total_bytes) progress_callback(0, total_bytes)
except Exception: except Exception:
pass logger.exception("Error in progress_callback initial call")
with open(path, "wb") as f: with open(path, "wb") as f:
for chunk in response.iter_bytes(chunk_size): for chunk in response.iter_bytes(chunk_size):
@@ -415,7 +415,7 @@ class HTTPClient:
try: try:
progress_callback(bytes_downloaded, total_bytes) progress_callback(bytes_downloaded, total_bytes)
except Exception: except Exception:
pass logger.exception("Error in progress_callback final call")
return path return path
@@ -496,7 +496,7 @@ class HTTPClient:
if 400 <= e.response.status_code < 500: if 400 <= e.response.status_code < 500:
try: try:
response_text = e.response.text[:500] response_text = e.response.text[:500]
except: except Exception:
response_text = "<unable to read response>" response_text = "<unable to read response>"
if log_http_errors: if log_http_errors:
logger.error( logger.error(
@@ -506,7 +506,7 @@ class HTTPClient:
last_exception = e last_exception = e
try: try:
response_text = e.response.text[:200] response_text = e.response.text[:200]
except: except Exception:
response_text = "<unable to read response>" response_text = "<unable to read response>"
logger.warning( logger.warning(
f"HTTP {e.response.status_code} on attempt {attempt + 1}/{self.retries}: {url} - {response_text}" f"HTTP {e.response.status_code} on attempt {attempt + 1}/{self.retries}: {url} - {response_text}"
@@ -715,7 +715,7 @@ def download_direct_file(
except DownloadError: except DownloadError:
raise raise
except Exception: except Exception:
pass logger.exception("Unexpected error while probing URL content")
suggested = _sanitize_filename(suggested_filename) if suggested_filename else "" suggested = _sanitize_filename(suggested_filename) if suggested_filename else ""
if suggested: if suggested:
@@ -727,12 +727,14 @@ def download_direct_file(
try: try:
detected_ext = Path(str(filename)).suffix detected_ext = Path(str(filename)).suffix
except Exception: except Exception:
logger.exception("Failed to detect file extension from filename")
detected_ext = "" detected_ext = ""
filename = suggested + detected_ext if detected_ext else suggested filename = suggested + detected_ext if detected_ext else suggested
try: try:
has_ext = bool(filename and Path(str(filename)).suffix) has_ext = bool(filename and Path(str(filename)).suffix)
except Exception: except Exception:
logger.exception("Failed to determine if filename has extension")
has_ext = False has_ext = False
if filename and (not has_ext): if filename and (not has_ext):
@@ -823,7 +825,7 @@ def download_direct_file(
total=total_val, total=total_val,
) )
except Exception: except Exception:
pass logger.exception("Error updating pipeline progress transfer")
if progress_bar is not None: if progress_bar is not None:
progress_bar.update( progress_bar.update(
@@ -842,7 +844,7 @@ def download_direct_file(
if progress_bar is not None: if progress_bar is not None:
progress_bar.finish() progress_bar.finish()
except Exception: except Exception:
pass logger.exception("Failed to finish progress bar")
try: try:
if pipeline_progress is not None and transfer_started[0] and hasattr( if pipeline_progress is not None and transfer_started[0] and hasattr(
@@ -850,7 +852,7 @@ def download_direct_file(
): ):
pipeline_progress.finish_transfer(label=str(filename or "download")) pipeline_progress.finish_transfer(label=str(filename or "download"))
except Exception: except Exception:
pass logger.exception("Failed to finish pipeline transfer")
if not quiet: if not quiet:
debug(f"✓ Downloaded in {elapsed:.1f}s") debug(f"✓ Downloaded in {elapsed:.1f}s")
@@ -871,7 +873,7 @@ def download_direct_file(
try: try:
hash_value = sha256_file(file_path) hash_value = sha256_file(file_path)
except Exception: except Exception:
pass logger.exception("Failed to compute SHA256 of downloaded file")
tags: List[str] = [] tags: List[str] = []
if extract_ytdlp_tags is not None: if extract_ytdlp_tags is not None:
@@ -908,14 +910,14 @@ def download_direct_file(
if progress_bar is not None: if progress_bar is not None:
progress_bar.finish() progress_bar.finish()
except Exception: except Exception:
pass logger.exception("Failed to finish progress bar during HTTP error handling")
try: try:
if pipeline_progress is not None and transfer_started[0] and hasattr( if pipeline_progress is not None and transfer_started[0] and hasattr(
pipeline_progress, "finish_transfer" pipeline_progress, "finish_transfer"
): ):
pipeline_progress.finish_transfer(label=str(filename or "download")) pipeline_progress.finish_transfer(label=str(filename or "download"))
except Exception: except Exception:
pass logger.exception("Failed to finish pipeline transfer during HTTP error handling")
log(f"Download error: {exc}", file=sys.stderr) log(f"Download error: {exc}", file=sys.stderr)
if debug_logger is not None: if debug_logger is not None:
@@ -930,14 +932,14 @@ def download_direct_file(
if progress_bar is not None: if progress_bar is not None:
progress_bar.finish() progress_bar.finish()
except Exception: except Exception:
pass logger.exception("Failed to finish progress bar during error handling")
try: try:
if pipeline_progress is not None and transfer_started[0] and hasattr( if pipeline_progress is not None and transfer_started[0] and hasattr(
pipeline_progress, "finish_transfer" pipeline_progress, "finish_transfer"
): ):
pipeline_progress.finish_transfer(label=str(filename or "download")) pipeline_progress.finish_transfer(label=str(filename or "download"))
except Exception: except Exception:
pass logger.exception("Failed to finish pipeline transfer during error handling")
log(f"Error downloading file: {exc}", file=sys.stderr) log(f"Error downloading file: {exc}", file=sys.stderr)
if debug_logger is not None: if debug_logger is not None:
@@ -1163,7 +1165,7 @@ class AsyncHTTPClient:
last_exception = e last_exception = e
try: try:
response_text = e.response.text[:200] response_text = e.response.text[:200]
except: except Exception:
response_text = "<unable to read response>" response_text = "<unable to read response>"
logger.warning( logger.warning(
f"HTTP {e.response.status_code} on attempt {attempt + 1}/{self.retries}: {url} - {response_text}" f"HTTP {e.response.status_code} on attempt {attempt + 1}/{self.retries}: {url} - {response_text}"

View File

@@ -178,7 +178,7 @@ class AllDebridClient:
logger.error( logger.error(
f"[AllDebrid] Response body: {error_body[:200]}" f"[AllDebrid] Response body: {error_body[:200]}"
) )
except: except Exception:
pass pass
raise raise

View File

@@ -4,6 +4,10 @@ import subprocess
import sys import sys
import shutil import shutil
from SYS.logger import log, debug from SYS.logger import log, debug
import logging
logger = logging.getLogger(__name__)
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple
@@ -651,7 +655,7 @@ def write_tags(
) )
sidecar = media_path.parent / f"{fallback_base}.tag" sidecar = media_path.parent / f"{fallback_base}.tag"
except Exception: except Exception:
pass logger.exception("Failed to determine fallback .tag sidecar base for %s", media_path)
# Write via consolidated function # Write via consolidated function
try: try:
@@ -1258,15 +1262,16 @@ def embed_metadata_in_file(
stderr_text = result.stderr.decode("utf-8", errors="replace")[:200] stderr_text = result.stderr.decode("utf-8", errors="replace")[:200]
debug(f"FFmpeg stderr: {stderr_text}", file=sys.stderr) debug(f"FFmpeg stderr: {stderr_text}", file=sys.stderr)
except Exception: except Exception:
pass logger.exception("Failed to decode FFmpeg stderr for %s", file_path)
return False return False
except Exception as exc: except Exception as exc:
if temp_file.exists(): if temp_file.exists():
try: try:
temp_file.unlink() temp_file.unlink()
except Exception: except Exception:
pass logger.exception("Failed to remove FFmpeg temp file %s after error", temp_file)
debug(f"❌ Error embedding metadata: {exc}", file=sys.stderr) debug(f"❌ Error embedding metadata: {exc}", file=sys.stderr)
logger.exception("Error embedding metadata into %s", file_path)
return False return False
@@ -2236,7 +2241,7 @@ def enrich_playlist_entries(entries: list, extractor: str) -> list:
enriched.append(full_info) enriched.append(full_info)
continue continue
except Exception: except Exception:
pass logger.exception("Failed to fetch full metadata for entry URL: %s", entry_url)
# Fallback to original entry if fetch failed # Fallback to original entry if fetch failed
enriched.append(entry) enriched.append(entry)
@@ -2306,7 +2311,7 @@ def extract_title_from_tags(tags_list: List[str]) -> Optional[str]:
if extracted: if extracted:
return extracted return extracted
except Exception: except Exception:
pass logger.exception("extract_title failed while extracting title from tags")
for t in tags_list: for t in tags_list:
if isinstance(t, str) and t.lower().startswith("title:"): if isinstance(t, str) and t.lower().startswith("title:"):
@@ -2563,9 +2568,9 @@ def scrape_url_metadata(
} }
) )
except json_module.JSONDecodeError: except json_module.JSONDecodeError:
pass logger.debug("Failed to decode flat playlist line %d as JSON: %r", idx, line[:200])
except Exception: except Exception:
pass # Silently ignore if we can't get playlist entries logger.exception("yt-dlp flat-playlist extraction failed for URL: %s", url)
# Fallback: if still no tags detected, get from first item # Fallback: if still no tags detected, get from first item
if not tags: if not tags:
@@ -2751,6 +2756,7 @@ def apply_mutagen_metadata(path: Path, metadata: dict[str, str], fmt: str) -> No
audio[target_key] = [value] audio[target_key] = [value]
changed = True changed = True
except Exception: # pragma: no cover - best effort only except Exception: # pragma: no cover - best effort only
logger.exception("mutagen: failed to set field %s for %s", target_key, path)
continue continue
if not changed: if not changed:
return return
@@ -2758,6 +2764,7 @@ def apply_mutagen_metadata(path: Path, metadata: dict[str, str], fmt: str) -> No
audio.save() audio.save()
except Exception as exc: # pragma: no cover - best effort only except Exception as exc: # pragma: no cover - best effort only
log(f"mutagen save failed: {exc}", file=sys.stderr) log(f"mutagen save failed: {exc}", file=sys.stderr)
logger.exception("mutagen save failed for %s", path)
def build_ffmpeg_command( def build_ffmpeg_command(

View File

@@ -8,7 +8,10 @@ import os
import shutil import shutil
import sys import sys
import time import time
import logging
from threading import RLock from threading import RLock
logger = logging.getLogger(__name__)
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Protocol, TextIO from typing import Any, Callable, Dict, List, Optional, Protocol, TextIO
@@ -474,7 +477,7 @@ class ProgressBar:
total=int(total) total=int(total)
) )
except Exception: except Exception:
pass logger.exception("Failed to update pipeline UI transfer in ProgressBar._ensure_started")
return return
if self._progress is not None and self._task_id is not None: if self._progress is not None and self._task_id is not None:
@@ -506,7 +509,7 @@ class ProgressBar:
else: else:
return return
except Exception: except Exception:
pass logger.exception("Failed to initialize pipeline Live UI integration in ProgressBar._ensure_started")
stream = file if file is not None else sys.stderr stream = file if file is not None else sys.stderr
# Use shared stderr console when rendering to stderr (cooperates with PipelineLiveProgress). # Use shared stderr console when rendering to stderr (cooperates with PipelineLiveProgress).
@@ -516,6 +519,7 @@ class ProgressBar:
console = stderr_console() console = stderr_console()
except Exception: except Exception:
logger.exception("Failed to acquire shared stderr Console from SYS.rich_display; using fallback Console")
console = Console(file=stream) console = Console(file=stream)
else: else:
console = Console(file=stream) console = Console(file=stream)
@@ -558,7 +562,7 @@ class ProgressBar:
int) and total > 0 else None, int) and total > 0 else None,
) )
except Exception: except Exception:
pass logger.exception("Failed to update pipeline UI transfer in ProgressBar.update")
return return
if self._progress is None or self._task_id is None: if self._progress is None or self._task_id is None:
@@ -582,7 +586,7 @@ class ProgressBar:
try: try:
self._pipeline_ui.finish_transfer(label=self._pipeline_label) self._pipeline_ui.finish_transfer(label=self._pipeline_label)
except Exception: except Exception:
pass logger.exception("Failed to finish pipeline UI transfer in ProgressBar.finish")
finally: finally:
self._pipeline_ui = None self._pipeline_ui = None
self._pipeline_label = None self._pipeline_label = None
@@ -681,7 +685,7 @@ class ProgressFileReader:
# EOF # EOF
self._finish() self._finish()
except Exception: except Exception:
pass logger.exception("Error while reading and updating ProgressFileReader")
return chunk return chunk
def seek(self, offset: int, whence: int = 0) -> Any: def seek(self, offset: int, whence: int = 0) -> Any:
@@ -695,7 +699,7 @@ class ProgressFileReader:
else: else:
self._read = pos self._read = pos
except Exception: except Exception:
pass logger.exception("Failed to determine file position in ProgressFileReader.seek")
return out return out
def tell(self) -> Any: def tell(self) -> Any:
@@ -705,7 +709,7 @@ class ProgressFileReader:
try: try:
self._finish() self._finish()
except Exception: except Exception:
pass logger.exception("Failed to finish ProgressFileReader progress in close")
return self._f.close() return self._f.close()
def __getattr__(self, name: str) -> Any: def __getattr__(self, name: str) -> Any:
@@ -825,6 +829,7 @@ class PipelineLiveProgress:
try: try:
value = str(text or "").strip() value = str(text or "").strip()
except Exception: except Exception:
logger.exception("Failed to compute active subtask text in PipelineLiveProgress.set_active_subtask_text")
value = "" value = ""
self._active_subtask_text = value or None self._active_subtask_text = value or None
@@ -1011,10 +1016,11 @@ class PipelineLiveProgress:
else: else:
stop_fn() stop_fn()
except Exception: except Exception:
logger.exception("Failed to stop Live with clear parameter; retrying without clear")
try: try:
stop_fn() stop_fn()
except Exception: except Exception:
pass logger.exception("Failed to stop Live on retry")
self._live = None self._live = None
self._console = None self._console = None
@@ -1043,9 +1049,9 @@ class PipelineLiveProgress:
subtasks.stop_task(sub_id) subtasks.stop_task(sub_id)
subtasks.update(sub_id, visible=False) subtasks.update(sub_id, visible=False)
except Exception: except Exception:
pass logger.exception("Failed to stop or hide subtask %s in PipelineLiveProgress._hide_pipe_subtasks", sub_id)
except Exception: except Exception:
pass logger.exception("Failed to hide pipe subtasks for index %s", pipe_index)
def set_pipe_status_text(self, pipe_index: int, text: str) -> None: def set_pipe_status_text(self, pipe_index: int, text: str) -> None:
"""Set a status line under the pipe bars for the given pipe.""" """Set a status line under the pipe bars for the given pipe."""
@@ -1071,20 +1077,21 @@ class PipelineLiveProgress:
try: try:
self._hide_pipe_subtasks(pidx) self._hide_pipe_subtasks(pidx)
except Exception: except Exception:
pass logger.exception("Failed to hide pipe subtasks while setting status text for pipe %s", pidx)
task_id = self._status_tasks.get(pidx) task_id = self._status_tasks.get(pidx)
if task_id is None: if task_id is None:
try: try:
task_id = prog.add_task(msg) task_id = prog.add_task(msg)
except Exception: except Exception:
logger.exception("Failed to add status task for pipe %s in set_pipe_status_text", pidx)
return return
self._status_tasks[pidx] = task_id self._status_tasks[pidx] = task_id
try: try:
prog.update(task_id, description=msg, refresh=True) prog.update(task_id, description=msg, refresh=True)
except Exception: except Exception:
pass logger.exception("Failed to update status task %s in set_pipe_status_text", task_id)
def clear_pipe_status_text(self, pipe_index: int) -> None: def clear_pipe_status_text(self, pipe_index: int) -> None:
if not self._enabled: if not self._enabled:
@@ -1104,7 +1111,7 @@ class PipelineLiveProgress:
try: try:
prog.remove_task(task_id) prog.remove_task(task_id)
except Exception: except Exception:
pass logger.exception("Failed to remove pipe status task %s in clear_pipe_status_text", task_id)
def set_pipe_percent(self, pipe_index: int, percent: int) -> None: def set_pipe_percent(self, pipe_index: int, percent: int) -> None:
"""Update the pipe bar as a percent (only when single-item mode is enabled).""" """Update the pipe bar as a percent (only when single-item mode is enabled)."""
@@ -1127,7 +1134,7 @@ class PipelineLiveProgress:
pipe_progress.update(pipe_task, completed=pct, total=100, refresh=True) pipe_progress.update(pipe_task, completed=pct, total=100, refresh=True)
self._update_overall() self._update_overall()
except Exception: except Exception:
pass logger.exception("Failed to set pipe percent for pipe %s in set_pipe_percent", pipe_index)
def _update_overall(self) -> None: def _update_overall(self) -> None:
"""Update the overall pipeline progress task.""" """Update the overall pipeline progress task."""
@@ -1142,6 +1149,7 @@ class PipelineLiveProgress:
if self._pipe_done[i] >= max(1, self._pipe_totals[i]) if self._pipe_done[i] >= max(1, self._pipe_totals[i])
) )
except Exception: except Exception:
logger.exception("Failed to compute completed pipes in _update_overall")
completed = 0 completed = 0
try: try:
@@ -1151,7 +1159,7 @@ class PipelineLiveProgress:
description=f"Pipeline: {completed}/{len(self._pipe_labels)} pipes completed", description=f"Pipeline: {completed}/{len(self._pipe_labels)} pipes completed",
) )
except Exception: except Exception:
pass logger.exception("Failed to update overall pipeline task in _update_overall")
# Auto-stop Live rendering once all pipes are complete so the progress # Auto-stop Live rendering once all pipes are complete so the progress
# UI clears itself even if callers forget to stop it explicitly. # UI clears itself even if callers forget to stop it explicitly.
@@ -1161,7 +1169,7 @@ class PipelineLiveProgress:
if total_pipes > 0 and completed >= total_pipes: if total_pipes > 0 and completed >= total_pipes:
self.stop() self.stop()
except Exception: except Exception:
pass logger.exception("Failed to auto-stop Live UI after all pipes completed")
def begin_pipe_steps(self, pipe_index: int, *, total_steps: int) -> None: def begin_pipe_steps(self, pipe_index: int, *, total_steps: int) -> None:
"""Initialize step tracking for a pipe. """Initialize step tracking for a pipe.
@@ -1187,11 +1195,11 @@ class PipelineLiveProgress:
try: try:
self.clear_pipe_status_text(pidx) self.clear_pipe_status_text(pidx)
except Exception: except Exception:
pass logger.exception("Failed to clear pipe status text in begin_pipe_steps for %s", pidx)
try: try:
self.set_pipe_percent(pidx, 0) self.set_pipe_percent(pidx, 0)
except Exception: except Exception:
pass logger.exception("Failed to set initial pipe percent in begin_pipe_steps for %s", pidx)
def advance_pipe_step(self, pipe_index: int, text: str) -> None: def advance_pipe_step(self, pipe_index: int, text: str) -> None:
"""Advance the pipe's step counter by one. """Advance the pipe's step counter by one.
@@ -1226,14 +1234,14 @@ class PipelineLiveProgress:
try: try:
self.set_pipe_status_text(pidx, line) self.set_pipe_status_text(pidx, line)
except Exception: except Exception:
pass logger.exception("Failed to set pipe status text in advance_pipe_step for pipe %s", pidx)
# Percent mapping only applies when the pipe is in percent mode (single-item). # Percent mapping only applies when the pipe is in percent mode (single-item).
try: try:
pct = 100 if done >= total else int(round((done / max(1, total)) * 100.0)) pct = 100 if done >= total else int(round((done / max(1, total)) * 100.0))
self.set_pipe_percent(pidx, pct) self.set_pipe_percent(pidx, pct)
except Exception: except Exception:
pass logger.exception("Failed to set pipe percent in advance_pipe_step for pipe %s", pidx)
def begin_transfer(self, *, label: str, total: Optional[int] = None) -> None: def begin_transfer(self, *, label: str, total: Optional[int] = None) -> None:
if not self._enabled: if not self._enabled:
@@ -1247,14 +1255,14 @@ class PipelineLiveProgress:
if total is not None and total > 0: if total is not None and total > 0:
self._transfers.update(self._transfer_tasks[key], total=int(total)) self._transfers.update(self._transfer_tasks[key], total=int(total))
except Exception: except Exception:
pass logger.exception("Failed to update existing transfer task total for %s in begin_transfer", key)
return return
task_total = int(total) if isinstance(total, int) and total > 0 else None task_total = int(total) if isinstance(total, int) and total > 0 else None
try: try:
task_id = self._transfers.add_task(key, total=task_total) task_id = self._transfers.add_task(key, total=task_total)
self._transfer_tasks[key] = task_id self._transfer_tasks[key] = task_id
except Exception: except Exception:
pass logger.exception("Failed to add transfer task %s in begin_transfer", key)
def update_transfer( def update_transfer(
self, self,
@@ -1282,7 +1290,7 @@ class PipelineLiveProgress:
kwargs["total"] = int(total) kwargs["total"] = int(total)
self._transfers.update(task_id, refresh=True, **kwargs) self._transfers.update(task_id, refresh=True, **kwargs)
except Exception: except Exception:
pass logger.exception("Failed to update transfer '%s'", key)
def finish_transfer(self, *, label: str) -> None: def finish_transfer(self, *, label: str) -> None:
if self._transfers is None: if self._transfers is None:
@@ -1294,7 +1302,7 @@ class PipelineLiveProgress:
try: try:
self._transfers.remove_task(task_id) self._transfers.remove_task(task_id)
except Exception: except Exception:
pass logger.exception("Failed to remove transfer task '%s' in finish_transfer", key)
def _ensure_pipe(self, pipe_index: int) -> bool: def _ensure_pipe(self, pipe_index: int) -> bool:
if not self._enabled: if not self._enabled:
@@ -1330,12 +1338,12 @@ class PipelineLiveProgress:
try: try:
self.clear_pipe_status_text(pipe_index) self.clear_pipe_status_text(pipe_index)
except Exception: except Exception:
pass logger.exception("Failed to clear pipe status text during begin_pipe for %s", pipe_index)
try: try:
self._pipe_step_total.pop(pipe_index, None) self._pipe_step_total.pop(pipe_index, None)
self._pipe_step_done.pop(pipe_index, None) self._pipe_step_done.pop(pipe_index, None)
except Exception: except Exception:
pass logger.exception("Failed to reset pipe step totals during begin_pipe for %s", pipe_index)
# If this pipe will process exactly one item, allow percent-based updates. # If this pipe will process exactly one item, allow percent-based updates.
percent_mode = bool(int(total_items) == 1) percent_mode = bool(int(total_items) == 1)
@@ -1351,7 +1359,7 @@ class PipelineLiveProgress:
try: try:
pipe_progress.start_task(pipe_task) pipe_progress.start_task(pipe_task)
except Exception: except Exception:
pass logger.exception("Failed to start pipe task timer in begin_pipe for %s", pipe_index)
self._update_overall() self._update_overall()
@@ -1386,6 +1394,7 @@ class PipelineLiveProgress:
"description", "description",
"") or "").strip() or None "") or "").strip() or None
except Exception: except Exception:
logger.exception("Failed to set active subtask text for first subtask %s in begin_pipe", first)
self._active_subtask_text = None self._active_subtask_text = None
def on_emit(self, pipe_index: int, emitted: Any) -> None: def on_emit(self, pipe_index: int, emitted: Any) -> None:
@@ -1429,7 +1438,7 @@ class PipelineLiveProgress:
f"{self._pipe_labels[pipe_index]}: {_pipeline_progress_item_label(emitted)}", f"{self._pipe_labels[pipe_index]}: {_pipeline_progress_item_label(emitted)}",
) )
except Exception: except Exception:
pass logger.exception("Failed to update subtask description for current %s in on_emit", current)
subtasks.stop_task(current) subtasks.stop_task(current)
subtasks.update(current, visible=False) subtasks.update(current, visible=False)
@@ -1448,12 +1457,12 @@ class PipelineLiveProgress:
try: try:
self.clear_pipe_status_text(pipe_index) self.clear_pipe_status_text(pipe_index)
except Exception: except Exception:
pass logger.exception("Failed to clear pipe status text after emit for %s", pipe_index)
try: try:
self._pipe_step_total.pop(pipe_index, None) self._pipe_step_total.pop(pipe_index, None)
self._pipe_step_done.pop(pipe_index, None) self._pipe_step_done.pop(pipe_index, None)
except Exception: except Exception:
pass logger.exception("Failed to pop pipe step totals after emit for %s", pipe_index)
# Start next subtask spinner. # Start next subtask spinner.
next_index = active + 1 next_index = active + 1
@@ -1468,6 +1477,7 @@ class PipelineLiveProgress:
"description", "description",
"") or "").strip() or None "") or "").strip() or None
except Exception: except Exception:
logger.exception("Failed to set active subtask text for next subtask %s in on_emit", nxt)
self._active_subtask_text = None self._active_subtask_text = None
else: else:
self._active_subtask_text = None self._active_subtask_text = None
@@ -1504,7 +1514,7 @@ class PipelineLiveProgress:
subtasks.stop_task(sub_id) subtasks.stop_task(sub_id)
subtasks.update(sub_id, visible=False) subtasks.update(sub_id, visible=False)
except Exception: except Exception:
pass logger.exception("Failed to stop or hide subtask %s during finish_pipe for pipe %s", sub_id, pipe_index)
# If we just finished the active pipe, clear the title context. # If we just finished the active pipe, clear the title context.
self._active_subtask_text = None self._active_subtask_text = None
@@ -1513,19 +1523,19 @@ class PipelineLiveProgress:
try: try:
self.clear_pipe_status_text(pipe_index) self.clear_pipe_status_text(pipe_index)
except Exception: except Exception:
pass logger.exception("Failed to clear pipe status text during finish_pipe for %s", pipe_index)
try: try:
self._pipe_step_total.pop(pipe_index, None) self._pipe_step_total.pop(pipe_index, None)
self._pipe_step_done.pop(pipe_index, None) self._pipe_step_done.pop(pipe_index, None)
except Exception: except Exception:
pass logger.exception("Failed to pop pipe step totals during finish_pipe for %s", pipe_index)
# Stop the per-pipe timer once the pipe is finished. # Stop the per-pipe timer once the pipe is finished.
try: try:
pipe_task = self._pipe_tasks[pipe_index] pipe_task = self._pipe_tasks[pipe_index]
pipe_progress.stop_task(pipe_task) pipe_progress.stop_task(pipe_task)
except Exception: except Exception:
pass logger.exception("Failed to stop pipe task %s during finish_pipe", pipe_index)
self._update_overall() self._update_overall()
@@ -1537,7 +1547,7 @@ class PipelineLiveProgress:
try: try:
self.finish_pipe(idx) self.finish_pipe(idx)
except Exception: except Exception:
pass logger.exception("Failed to finish pipe %s in complete_all_pipes", idx)
class PipelineStageContext: class PipelineStageContext:
@@ -1568,7 +1578,7 @@ class PipelineStageContext:
try: try:
cb(obj) cb(obj)
except Exception: except Exception:
pass logger.exception("Error in PipelineStageContext.emit callback")
def get_current_command_text(self) -> str: def get_current_command_text(self) -> str:
"""Get the current command text (for backward compatibility).""" """Get the current command text (for backward compatibility)."""

View File

@@ -10,6 +10,8 @@ from contextvars import ContextVar
from typing import Any, Dict, List, Optional, Sequence, Callable from typing import Any, Dict, List, Optional, Sequence, Callable
from SYS.models import PipelineStageContext from SYS.models import PipelineStageContext
from SYS.logger import log, debug, is_debug_enabled from SYS.logger import log, debug, is_debug_enabled
import logging
logger = logging.getLogger(__name__)
from SYS.worker import WorkerManagerRegistry, WorkerStages from SYS.worker import WorkerManagerRegistry, WorkerStages
from SYS.cli_parsing import SelectionSyntax, SelectionFilterSyntax from SYS.cli_parsing import SelectionSyntax, SelectionFilterSyntax
from SYS.rich_display import stdout_console from SYS.rich_display import stdout_console
@@ -62,7 +64,7 @@ def suspend_live_progress():
try: try:
ui.resume() ui.resume()
except Exception: except Exception:
pass logger.exception("Failed to resume live progress UI after suspend")
def _is_selectable_table(table: Any) -> bool: def _is_selectable_table(table: Any) -> bool:
@@ -237,7 +239,7 @@ def print_if_visible(*args: Any, file=None, **kwargs: Any) -> None:
if should_print: if should_print:
log(*args, **kwargs) if file is None else log(*args, file=file, **kwargs) log(*args, **kwargs) if file is None else log(*args, file=file, **kwargs)
except Exception: except Exception:
pass logger.exception("Error in print_if_visible")
def store_value(key: str, value: Any) -> None: def store_value(key: str, value: Any) -> None:
@@ -253,7 +255,7 @@ def store_value(key: str, value: Any) -> None:
state = _get_pipeline_state() state = _get_pipeline_state()
state.pipeline_values[text] = value state.pipeline_values[text] = value
except Exception: except Exception:
pass logger.exception("Failed to store pipeline value '%s'", key)
def load_value(key: str, default: Any = None) -> Any: def load_value(key: str, default: Any = None) -> Any:
@@ -330,7 +332,7 @@ def set_pending_pipeline_tail(
state.pending_pipeline_source = clean_source if clean_source else None state.pending_pipeline_source = clean_source if clean_source else None
except Exception: except Exception:
# Keep existing pending tail on failure # Keep existing pending tail on failure
pass logger.exception("Failed to set pending pipeline tail; keeping existing pending tail")
def get_pending_pipeline_tail() -> List[List[str]]: def get_pending_pipeline_tail() -> List[List[str]]:
@@ -627,24 +629,9 @@ def set_last_result_table(
if result_table.rows and len(sorted_items) == len(result_table.rows): if result_table.rows and len(sorted_items) == len(result_table.rows):
state.last_result_items = sorted_items state.last_result_items = sorted_items
except Exception: except Exception:
pass logger.exception("Failed to sort result_table and reorder items")
def set_last_result_table_overlay(
result_table: Optional[Any],
items: Optional[List[Any]] = None,
subject: Optional[Any] = None
) -> None:
"""
Set a result table as an overlay (display only, no history).
"""
state = _get_pipeline_state()
state.display_table = result_table
state.display_items = items or []
state.display_subject = subject
# Sort table by Title/Name column alphabetically if available
if ( if (
result_table is not None result_table is not None
and hasattr(result_table, "sort_by_title") and hasattr(result_table, "sort_by_title")
@@ -662,23 +649,7 @@ def set_last_result_table_overlay(
if len(sorted_items) == len(result_table.rows): if len(sorted_items) == len(result_table.rows):
state.display_items = sorted_items state.display_items = sorted_items
except Exception: except Exception:
pass logger.exception("Failed to sort overlay result_table and reorder items")
def set_last_result_table_preserve_history(
result_table: Optional[Any],
items: Optional[List[Any]] = None,
subject: Optional[Any] = None
) -> None:
"""
Update the last result table WITHOUT adding to history.
"""
state = _get_pipeline_state()
# Update current table WITHOUT pushing to history
state.last_result_table = result_table
state.last_result_items = items or []
state.last_result_subject = subject
@@ -747,7 +718,7 @@ def restore_previous_result_table() -> bool:
try: try:
debug_table_state("restore_previous_result_table") debug_table_state("restore_previous_result_table")
except Exception: except Exception:
pass logger.exception("Failed to debug_table_state during restore_previous_result_table")
return True return True
@@ -805,7 +776,7 @@ def restore_next_result_table() -> bool:
try: try:
debug_table_state("restore_next_result_table") debug_table_state("restore_next_result_table")
except Exception: except Exception:
pass logger.exception("Failed to debug_table_state during restore_next_result_table")
return True return True
@@ -926,7 +897,7 @@ def debug_table_state(label: str = "") -> None:
f"history={len(state.result_table_history or [])} forward={len(state.result_table_forward or [])} last_selection={list(state.last_selection or [])}" f"history={len(state.result_table_history or [])} forward={len(state.result_table_forward or [])} last_selection={list(state.last_selection or [])}"
) )
except Exception: except Exception:
pass logger.exception("Failed to debug_table_state buffers summary")
def get_last_selectable_result_items() -> List[Any]: def get_last_selectable_result_items() -> List[Any]:
@@ -1133,7 +1104,7 @@ class PipelineExecutor:
if self._config_loader is not None: if self._config_loader is not None:
return self._config_loader.load() return self._config_loader.load()
except Exception: except Exception:
pass logger.exception("Failed to use config_loader.load(); falling back to SYS.config.load_config")
try: try:
from SYS.config import load_config from SYS.config import load_config
@@ -1209,7 +1180,7 @@ class PipelineExecutor:
if hasattr(ctx, "clear_pipeline_stop"): if hasattr(ctx, "clear_pipeline_stop"):
ctx.clear_pipeline_stop() ctx.clear_pipeline_stop()
except Exception: except Exception:
pass logger.exception("Failed to clear pipeline stop via ctx.clear_pipeline_stop")
@staticmethod @staticmethod
def _maybe_seed_current_stage_table(ctx: Any) -> None: def _maybe_seed_current_stage_table(ctx: Any) -> None:
@@ -1231,7 +1202,7 @@ class PipelineExecutor:
if last_table: if last_table:
ctx.set_current_stage_table(last_table) ctx.set_current_stage_table(last_table)
except Exception: except Exception:
pass logger.exception("Failed to seed current_stage_table from display or last table")
@staticmethod @staticmethod
def _maybe_apply_pending_pipeline_tail(ctx: Any, def _maybe_apply_pending_pipeline_tail(ctx: Any,
@@ -1290,13 +1261,13 @@ class PipelineExecutor:
if hasattr(ctx, "clear_pending_pipeline_tail"): if hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail() ctx.clear_pending_pipeline_tail()
except Exception: except Exception:
pass logger.exception("Failed to clear pending pipeline tail after appending pending tail")
else: else:
try: try:
if hasattr(ctx, "clear_pending_pipeline_tail"): if hasattr(ctx, "clear_pending_pipeline_tail"):
ctx.clear_pending_pipeline_tail() ctx.clear_pending_pipeline_tail()
except Exception: except Exception:
pass logger.exception("Failed to clear pending pipeline tail (source mismatch branch)")
return stages return stages
def _apply_quiet_background_flag(self, config: Any) -> Any: def _apply_quiet_background_flag(self, config: Any) -> Any:
@@ -1410,7 +1381,7 @@ class PipelineExecutor:
if isinstance(meta, dict): if isinstance(meta, dict):
_add(meta.get("provider")) _add(meta.get("provider"))
except Exception: except Exception:
pass logger.exception("Failed to inspect current_table/table metadata in _maybe_run_class_selector")
for item in selected_items or []: for item in selected_items or []:
if isinstance(item, dict): if isinstance(item, dict):
@@ -1443,7 +1414,7 @@ class PipelineExecutor:
if prefix and is_known_provider_name(prefix): if prefix and is_known_provider_name(prefix):
_add(prefix) _add(prefix)
except Exception: except Exception:
pass logger.exception("Failed while computing provider prefix heuristics in _maybe_run_class_selector")
if get_provider is not None: if get_provider is not None:
for key in candidates: for key in candidates:
@@ -1453,7 +1424,7 @@ class PipelineExecutor:
continue continue
except Exception: except Exception:
# If the predicate fails for any reason, fall back to legacy behavior. # If the predicate fails for any reason, fall back to legacy behavior.
pass logger.exception("is_known_provider_name predicate failed for key %s; falling back", key)
try: try:
provider = get_provider(key, config) provider = get_provider(key, config)
except Exception: except Exception:
@@ -1511,7 +1482,7 @@ class PipelineExecutor:
if handled: if handled:
return True return True
except Exception: except Exception:
pass logger.exception("Failed while running store-based selector logic in _maybe_run_class_selector")
return False return False
@@ -1544,7 +1515,7 @@ class PipelineExecutor:
try: try:
worker_manager.append_stdout(worker_id, text + "\n", channel="log") worker_manager.append_stdout(worker_id, text + "\n", channel="log")
except Exception: except Exception:
pass logger.exception("Failed to append pipeline event to worker stdout for %s", worker_id)
@staticmethod @staticmethod
def _maybe_open_url_selection( def _maybe_open_url_selection(
@@ -1632,7 +1603,7 @@ class PipelineExecutor:
kwargs["output"] = output_fn kwargs["output"] = output_fn
ensure_background_notifier(worker_manager, **kwargs) ensure_background_notifier(worker_manager, **kwargs)
except Exception: except Exception:
pass logger.exception("Failed to enable background notifier for session_worker_ids=%r", session_worker_ids)
@staticmethod @staticmethod
def _get_raw_stage_texts(ctx: Any) -> List[str]: def _get_raw_stage_texts(ctx: Any) -> List[str]:
@@ -1691,7 +1662,7 @@ class PipelineExecutor:
if last_table is not None: if last_table is not None:
ctx.set_current_stage_table(last_table) ctx.set_current_stage_table(last_table)
except Exception: except Exception:
pass logger.exception("Failed to sync current_stage_table from display/last table in _maybe_apply_initial_selection")
source_cmd = None source_cmd = None
source_args_raw = None source_args_raw = None
@@ -1836,7 +1807,7 @@ class PipelineExecutor:
f"@N expansion: {source_cmd} + selected_args={selected_row_args} + source_args={source_args}", f"@N expansion: {source_cmd} + selected_args={selected_row_args} + source_args={source_args}",
) )
except Exception: except Exception:
pass logger.exception("Failed to record pipeline log step for @N expansion (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
stage_table = None stage_table = None
try: try:
@@ -1939,7 +1910,7 @@ class PipelineExecutor:
continue continue
seen_track_ids.add(tid) seen_track_ids.add(tid)
except Exception: except Exception:
pass logger.exception("Failed to extract/parse track metadata in album processing")
track_items.append(tr) track_items.append(tr)
if track_items: if track_items:
@@ -1969,7 +1940,7 @@ class PipelineExecutor:
f"Applied @N selection {' | '.join(selection_parts)}", f"Applied @N selection {' | '.join(selection_parts)}",
) )
except Exception: except Exception:
pass logger.exception("Failed to record Applied @N selection log step (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
# Auto-insert downloader stages for provider tables. # Auto-insert downloader stages for provider tables.
try: try:
@@ -1979,6 +1950,7 @@ class PipelineExecutor:
if current_table is None: if current_table is None:
current_table = ctx.get_last_result_table() current_table = ctx.get_last_result_table()
except Exception: except Exception:
logger.exception("Failed to determine current_table for selection auto-insert; defaulting to None")
current_table = None current_table = None
table_type = None table_type = None
try: try:
@@ -1990,6 +1962,7 @@ class PipelineExecutor:
if current_table and hasattr(current_table, "table") else None if current_table and hasattr(current_table, "table") else None
) )
except Exception: except Exception:
logger.exception("Failed to compute table_type from current_table; using fallback attribute access")
table_type = ( table_type = (
current_table.table current_table.table
if current_table and hasattr(current_table, "table") else None if current_table and hasattr(current_table, "table") else None
@@ -2100,7 +2073,7 @@ class PipelineExecutor:
try: try:
print(f"Auto-running selection via {auto_stage[0]}") print(f"Auto-running selection via {auto_stage[0]}")
except Exception: except Exception:
pass logger.exception("Failed to print auto-run selection message for %s", auto_stage[0])
# Append the auto stage now. If the user also provided a selection # Append the auto stage now. If the user also provided a selection
# (e.g., @1 | add-file ...), we want to attach the row selection # (e.g., @1 | add-file ...), we want to attach the row selection
# args *to the auto-inserted stage* so the download command receives # args *to the auto-inserted stage* so the download command receives
@@ -2137,44 +2110,54 @@ class PipelineExecutor:
tail = [str(x) for x in inserted[1:]] tail = [str(x) for x in inserted[1:]]
stages[-1] = [cmd] + [str(x) for x in row_args] + tail stages[-1] = [cmd] + [str(x) for x in row_args] + tail
except Exception: except Exception:
pass logger.exception("Failed to attach selection args to auto-inserted stage")
# If no auto stage inserted and there are selection-action tokens available # If no auto stage inserted and there are selection-action tokens available
# for the single selected row, apply it as the pipeline stage so a bare # for the single selected row, apply it as the pipeline stage so a bare
# `@N` runs the intended action (e.g., get-file for hash-backed rows). # `@N` runs the intended action (e.g., get-file for hash-backed rows).
if not stages and selection_indices and len(selection_indices) == 1: if not stages and selection_indices and len(selection_indices) == 1:
try:
idx = selection_indices[0]
debug(f"@N initial selection idx={idx} last_items={len(ctx.get_last_result_items() or [])}")
row_action = None
try:
row_action = ctx.get_current_stage_table_row_selection_action(idx)
except Exception:
row_action = None
if not row_action:
try: try:
items = ctx.get_last_result_items() or [] idx = selection_indices[0]
if 0 <= idx < len(items): debug(f"@N initial selection idx={idx} last_items={len(ctx.get_last_result_items() or [])}")
maybe = items[idx]
# Provide explicit debug output about the payload selected
try:
if isinstance(maybe, dict):
debug(f"@N payload: hash={maybe.get('hash')} store={maybe.get('store')} _selection_args={maybe.get('_selection_args')} _selection_action={maybe.get('_selection_action')}")
else:
debug(f"@N payload object type: {type(maybe).__name__}")
except Exception:
pass
if isinstance(maybe, dict):
candidate = maybe.get("_selection_action")
if isinstance(candidate, (list, tuple)):
row_action = [str(x) for x in candidate if x is not None]
debug(f"@N restored row_action from payload: {row_action}")
except Exception:
row_action = None row_action = None
try:
row_action = ctx.get_current_stage_table_row_selection_action(idx)
except Exception:
logger.exception("Failed to get current_stage_table row selection action for idx %s", idx)
row_action = None
if row_action: if not row_action:
debug(f"@N applying row action -> {row_action}") try:
items = ctx.get_last_result_items() or []
if 0 <= idx < len(items):
maybe = items[idx]
try:
if isinstance(maybe, dict):
debug(f"@N payload: hash={maybe.get('hash')} store={maybe.get('store')} _selection_args={maybe.get('_selection_args')} _selection_action={maybe.get('_selection_action')}")
else:
debug(f"@N payload object type: {type(maybe).__name__}")
except Exception:
logger.exception("Failed to debug selection payload for index %s", idx)
if isinstance(maybe, dict):
candidate = maybe.get("_selection_action")
if isinstance(candidate, (list, tuple)):
row_action = [str(x) for x in candidate if x is not None]
except Exception:
row_action = None
if row_action:
debug(f"@N applying row action -> {row_action}")
stages.append(row_action)
if pipeline_session and worker_manager:
try:
worker_manager.log_step(
pipeline_session.worker_id,
f"@N applied row action -> {' '.join(row_action)}",
)
except Exception:
logger.exception("Failed to record pipeline log step for applied row action (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
except Exception:
logger.exception("Failed to apply single-row selection action")
stages.append(row_action) stages.append(row_action)
if pipeline_session and worker_manager: if pipeline_session and worker_manager:
try: try:
@@ -2183,9 +2166,7 @@ class PipelineExecutor:
f"@N applied row action -> {' '.join(row_action)}", f"@N applied row action -> {' '.join(row_action)}",
) )
except Exception: except Exception:
pass logger.exception("Failed to record pipeline log step for applied row action (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
except Exception:
pass
else: else:
first_cmd = stages[0][0] if stages and stages[0] else None first_cmd = stages[0][0] if stages and stages[0] else None
if isinstance(table_type, str) and table_type.startswith("metadata.") and first_cmd not in ( if isinstance(table_type, str) and table_type.startswith("metadata.") and first_cmd not in (
@@ -2234,7 +2215,7 @@ class PipelineExecutor:
tail = [str(x) for x in inserted[1:]] tail = [str(x) for x in inserted[1:]]
stages[0] = [cmd] + [str(x) for x in row_args] + tail stages[0] = [cmd] + [str(x) for x in row_args] + tail
except Exception: except Exception:
pass logger.exception("Failed to attach selection args to inserted auto stage (alternate branch)")
# After inserting/appending an auto-stage, continue processing so later # After inserting/appending an auto-stage, continue processing so later
# selection-expansion logic can still run (e.g., for example selectors). # selection-expansion logic can still run (e.g., for example selectors).
@@ -2304,7 +2285,7 @@ class PipelineExecutor:
continue continue
i += 1 i += 1
except Exception: except Exception:
pass logger.exception("Failed to inspect add-file stage tokens for potential directory; skipping Live progress")
if not name: if not name:
continue continue
# Display-only: avoid Live progress for relationship viewing. # Display-only: avoid Live progress for relationship viewing.
@@ -2342,7 +2323,7 @@ class PipelineExecutor:
if hasattr(_pipeline_ctx, "set_live_progress"): if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(progress_ui) _pipeline_ctx.set_live_progress(progress_ui)
except Exception: except Exception:
pass logger.exception("Failed to register PipelineLiveProgress with pipeline context")
pipe_index_by_stage = { pipe_index_by_stage = {
stage_idx: pipe_idx stage_idx: pipe_idx
for pipe_idx, stage_idx in enumerate(pipe_stage_indices) for pipe_idx, stage_idx in enumerate(pipe_stage_indices)
@@ -2366,7 +2347,7 @@ class PipelineExecutor:
if hasattr(ctx, "set_current_stage_table"): if hasattr(ctx, "set_current_stage_table"):
ctx.set_current_stage_table(None) ctx.set_current_stage_table(None)
except Exception: except Exception:
pass logger.exception("Failed to clear current_stage_table in execute_tokens")
# Preflight (URL-duplicate prompts, etc.) should be cached within a single # Preflight (URL-duplicate prompts, etc.) should be cached within a single
# pipeline run, not across independent pipelines. # pipeline run, not across independent pipelines.
@@ -2374,7 +2355,7 @@ class PipelineExecutor:
ctx.store_value("preflight", ctx.store_value("preflight",
{}) {})
except Exception: except Exception:
pass logger.exception("Failed to set preflight cache in execute_tokens")
stages = self._split_stages(tokens) stages = self._split_stages(tokens)
if not stages: if not stages:
@@ -2482,7 +2463,7 @@ class PipelineExecutor:
try: try:
ctx.set_last_items(pipe_items) ctx.set_last_items(pipe_items)
except Exception: except Exception:
pass logger.exception("Failed to set last items after @ selection")
if pipeline_session and worker_manager: if pipeline_session and worker_manager:
try: try:
worker_manager.log_step( worker_manager.log_step(
@@ -2490,7 +2471,7 @@ class PipelineExecutor:
"@ used last result items" "@ used last result items"
) )
except Exception: except Exception:
pass logger.exception("Failed to record pipeline log step for '@ used last result items' (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
continue continue
subject = ctx.get_last_result_subject() subject = ctx.get_last_result_subject()
@@ -2505,7 +2486,7 @@ class PipelineExecutor:
list) else [subject] list) else [subject]
ctx.set_last_items(subject_items) ctx.set_last_items(subject_items)
except Exception: except Exception:
pass logger.exception("Failed to set last_items from subject during @ handling")
if pipeline_session and worker_manager: if pipeline_session and worker_manager:
try: try:
worker_manager.log_step( worker_manager.log_step(
@@ -2513,7 +2494,7 @@ class PipelineExecutor:
"@ used current table subject" "@ used current table subject"
) )
except Exception: except Exception:
pass logger.exception("Failed to record pipeline log step for '@ used current table subject' (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))
continue continue
if cmd_name.startswith("@"): # selection stage if cmd_name.startswith("@"): # selection stage
@@ -2550,7 +2531,7 @@ class PipelineExecutor:
ctx.set_current_stage_table(display_table) ctx.set_current_stage_table(display_table)
stage_table = display_table stage_table = display_table
except Exception: except Exception:
pass logger.exception("Failed to set current_stage_table from display table during selection processing")
if not stage_table and display_table is not None: if not stage_table and display_table is not None:
stage_table = display_table stage_table = display_table
@@ -2561,7 +2542,7 @@ class PipelineExecutor:
if hasattr(ctx, "debug_table_state"): if hasattr(ctx, "debug_table_state"):
ctx.debug_table_state(f"selection {selection_token}") ctx.debug_table_state(f"selection {selection_token}")
except Exception: except Exception:
pass logger.exception("Failed to debug_table_state during selection %s", selection_token)
if display_table is not None and stage_table is display_table: if display_table is not None and stage_table is display_table:
items_list = ctx.get_last_result_items() or [] items_list = ctx.get_last_result_items() or []
@@ -2600,9 +2581,9 @@ class PipelineExecutor:
try: try:
debug(f"Selection sample object: provider={getattr(sample, 'provider', None)} store={getattr(sample, 'store', None)}") debug(f"Selection sample object: provider={getattr(sample, 'provider', None)} store={getattr(sample, 'store', None)}")
except Exception: except Exception:
pass logger.exception("Failed to debug selection sample object")
except Exception: except Exception:
pass logger.exception("Failed to produce selection debug sample for token %s", selection_token)
if not filtered: if not filtered:
print("No items matched selection\n") print("No items matched selection\n")
@@ -2628,14 +2609,14 @@ class PipelineExecutor:
if base_table is not None and getattr(base_table, "table", None): if base_table is not None and getattr(base_table, "table", None):
new_table.set_table(str(getattr(base_table, "table"))) new_table.set_table(str(getattr(base_table, "table")))
except Exception: except Exception:
pass logger.exception("Failed to set table on new_table for filter overlay")
try: try:
# Attach a one-line header so users see the active filter. # Attach a one-line header so users see the active filter.
safe = str(selection_token)[1:].strip() safe = str(selection_token)[1:].strip()
new_table.set_header_line(f'filter: "{safe}"') new_table.set_header_line(f'filter: "{safe}"')
except Exception: except Exception:
pass logger.exception("Failed to set header line for filter overlay for token %s", selection_token)
for item in filtered: for item in filtered:
new_table.add_result(item) new_table.add_result(item)
@@ -2643,15 +2624,15 @@ class PipelineExecutor:
try: try:
ctx.set_last_result_table_overlay(new_table, items=list(filtered), subject=ctx.get_last_result_subject()) ctx.set_last_result_table_overlay(new_table, items=list(filtered), subject=ctx.get_last_result_subject())
except Exception: except Exception:
pass logger.exception("Failed to set last_result_table_overlay for filter selection")
try: try:
stdout_console().print() stdout_console().print()
stdout_console().print(new_table) stdout_console().print(new_table)
except Exception: except Exception:
pass logger.exception("Failed to render filter overlay to stdout_console")
except Exception: except Exception:
pass logger.exception("Failed while rendering filter overlay for selection %s", selection_token)
continue continue
# UX: selecting a single URL row from get-url tables should open it. # UX: selecting a single URL row from get-url tables should open it.
@@ -2667,7 +2648,7 @@ class PipelineExecutor:
stage_is_last=(stage_index + 1 >= len(stages)), stage_is_last=(stage_index + 1 >= len(stages)),
) )
except Exception: except Exception:
pass logger.exception("Failed to open URL selection for table %s", getattr(current_table, 'table', None))
if PipelineExecutor._maybe_run_class_selector( if PipelineExecutor._maybe_run_class_selector(
ctx, ctx,
@@ -2685,6 +2666,7 @@ class PipelineExecutor:
).replace("_", ).replace("_",
"-").lower() "-").lower()
except Exception: except Exception:
logger.exception("Failed to determine next_cmd during selection expansion for stage_index %s", stage_index)
next_cmd = None next_cmd = None
def _is_tag_row(obj: Any) -> bool: def _is_tag_row(obj: Any) -> bool:
@@ -2696,12 +2678,12 @@ class PipelineExecutor:
"tag_name")): "tag_name")):
return True return True
except Exception: except Exception:
pass logger.exception("Failed to inspect TagItem object while checking _is_tag_row")
try: try:
if isinstance(obj, dict) and obj.get("tag_name"): if isinstance(obj, dict) and obj.get("tag_name"):
return True return True
except Exception: except Exception:
pass logger.exception("Failed to inspect dict tag_name while checking _is_tag_row")
return False return False
if (next_cmd in {"delete-tag", if (next_cmd in {"delete-tag",
@@ -2788,7 +2770,7 @@ class PipelineExecutor:
try: try:
print(f"Auto-running selection via {auto_stage[0]}") print(f"Auto-running selection via {auto_stage[0]}")
except Exception: except Exception:
pass logger.exception("Failed to print auto-run selection message for %s", auto_stage[0])
stages.append(list(auto_stage)) stages.append(list(auto_stage))
else: else:
if auto_stage: if auto_stage:
@@ -2885,12 +2867,12 @@ class PipelineExecutor:
stdout_console().print() stdout_console().print()
stdout_console().print(overlay_table) stdout_console().print(overlay_table)
except Exception: except Exception:
pass logger.exception("Failed to render overlay_table to stdout_console")
if session: if session:
try: try:
session.close() session.close()
except Exception: except Exception:
pass logger.exception("Failed to close pipeline stage session")
except Exception as exc: except Exception as exc:
pipeline_status = "failed" pipeline_status = "failed"
@@ -2907,26 +2889,26 @@ class PipelineExecutor:
try: try:
progress_ui.complete_all_pipes() progress_ui.complete_all_pipes()
except Exception: except Exception:
pass logger.exception("Failed to complete all pipe UI tasks in progress_ui.complete_all_pipes")
try: try:
progress_ui.stop() progress_ui.stop()
except Exception: except Exception:
pass logger.exception("Failed to stop progress_ui")
try: try:
from SYS import pipeline as _pipeline_ctx from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"): if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None) _pipeline_ctx.set_live_progress(None)
except Exception: except Exception:
pass logger.exception("Failed to clear live_progress on pipeline context")
# Close pipeline session and log final status # Close pipeline session and log final status
try: try:
if pipeline_session and worker_manager: if pipeline_session and worker_manager:
pipeline_session.close(status=pipeline_status, error_msg=pipeline_error) pipeline_session.close(status=pipeline_status, error_msg=pipeline_error)
except Exception: except Exception:
pass logger.exception("Failed to close pipeline session during finalization")
try: try:
if pipeline_session and worker_manager: if pipeline_session and worker_manager:
self._log_pipeline_event(worker_manager, pipeline_session.worker_id, self._log_pipeline_event(worker_manager, pipeline_session.worker_id,
f"Pipeline {pipeline_status}: {pipeline_error or ''}") f"Pipeline {pipeline_status}: {pipeline_error or ''}")
except Exception: except Exception:
pass logger.exception("Failed to log final pipeline status (pipeline_session=%r)", getattr(pipeline_session, 'worker_id', None))

View File

@@ -3,6 +3,8 @@ from __future__ import annotations
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
from typing import Any, Iterator, Optional, Sequence, Tuple from typing import Any, Iterator, Optional, Sequence, Tuple
import logging
logger = logging.getLogger(__name__)
class PipelineProgress: class PipelineProgress:
@@ -31,6 +33,7 @@ class PipelineProgress:
) if hasattr(self._ctx, ) if hasattr(self._ctx,
"get_live_progress") else None "get_live_progress") else None
except Exception: except Exception:
logger.exception("Failed to get live progress UI from pipeline context")
ui = None ui = None
pipe_idx: int = 0 pipe_idx: int = 0
@@ -48,6 +51,7 @@ class PipelineProgress:
if isinstance(maybe_idx, int): if isinstance(maybe_idx, int):
pipe_idx = int(maybe_idx) pipe_idx = int(maybe_idx)
except Exception: except Exception:
logger.exception("Failed to determine pipe index from stage context")
pipe_idx = 0 pipe_idx = 0
return ui, pipe_idx return ui, pipe_idx
@@ -61,6 +65,7 @@ class PipelineProgress:
if callable(begin): if callable(begin):
begin(int(pipe_idx), total_steps=int(total_steps)) begin(int(pipe_idx), total_steps=int(total_steps))
except Exception: except Exception:
logger.exception("Failed to call begin_pipe_steps on UI")
return return
def step(self, text: str) -> None: def step(self, text: str) -> None:
@@ -72,6 +77,7 @@ class PipelineProgress:
if callable(adv): if callable(adv):
adv(int(pipe_idx), str(text)) adv(int(pipe_idx), str(text))
except Exception: except Exception:
logger.exception("Failed to advance pipe step on UI")
return return
def set_percent(self, percent: int) -> None: def set_percent(self, percent: int) -> None:
@@ -83,6 +89,7 @@ class PipelineProgress:
if callable(set_pct): if callable(set_pct):
set_pct(int(pipe_idx), int(percent)) set_pct(int(pipe_idx), int(percent))
except Exception: except Exception:
logger.exception("Failed to set pipe percent on UI")
return return
def set_status(self, text: str) -> None: def set_status(self, text: str) -> None:
@@ -94,6 +101,7 @@ class PipelineProgress:
if callable(setter): if callable(setter):
setter(int(pipe_idx), str(text)) setter(int(pipe_idx), str(text))
except Exception: except Exception:
logger.exception("Failed to set pipe status text on UI")
return return
def clear_status(self) -> None: def clear_status(self) -> None:
@@ -105,6 +113,7 @@ class PipelineProgress:
if callable(clr): if callable(clr):
clr(int(pipe_idx)) clr(int(pipe_idx))
except Exception: except Exception:
logger.exception("Failed to clear pipe status text on UI")
return return
def begin_transfer(self, *, label: str, total: Optional[int] = None) -> None: def begin_transfer(self, *, label: str, total: Optional[int] = None) -> None:
@@ -116,6 +125,7 @@ class PipelineProgress:
if callable(fn): if callable(fn):
fn(label=str(label or "transfer"), total=total) fn(label=str(label or "transfer"), total=total)
except Exception: except Exception:
logger.exception("Failed to begin transfer on UI")
return return
def update_transfer( def update_transfer(
@@ -133,6 +143,7 @@ class PipelineProgress:
if callable(fn): if callable(fn):
fn(label=str(label or "transfer"), completed=completed, total=total) fn(label=str(label or "transfer"), completed=completed, total=total)
except Exception: except Exception:
logger.exception("Failed to update transfer on UI")
return return
def finish_transfer(self, *, label: str) -> None: def finish_transfer(self, *, label: str) -> None:
@@ -144,6 +155,7 @@ class PipelineProgress:
if callable(fn): if callable(fn):
fn(label=str(label or "transfer")) fn(label=str(label or "transfer"))
except Exception: except Exception:
logger.exception("Failed to finish transfer on UI")
return return
def begin_pipe( def begin_pipe(
@@ -164,6 +176,7 @@ class PipelineProgress:
items_preview=list(items_preview or []), items_preview=list(items_preview or []),
) )
except Exception: except Exception:
logger.exception("Failed to begin pipe on UI")
return return
def on_emit(self, emitted: Any) -> None: def on_emit(self, emitted: Any) -> None:
@@ -178,6 +191,7 @@ class PipelineProgress:
try: try:
self._local_ui.on_emit(0, emitted) self._local_ui.on_emit(0, emitted)
except Exception: except Exception:
logger.exception("Failed to call local UI on_emit")
return return
def ensure_local_ui( def ensure_local_ui(
@@ -196,6 +210,7 @@ class PipelineProgress:
"get_live_progress") else None "get_live_progress") else None
) )
except Exception: except Exception:
logger.exception("Failed to check existing live progress from pipeline context")
existing = None existing = None
if existing is not None: if existing is not None:
@@ -213,6 +228,7 @@ class PipelineProgress:
self._ctx.set_live_progress(ui) self._ctx.set_live_progress(ui)
self._local_attached = True self._local_attached = True
except Exception: except Exception:
logger.exception("Failed to attach local UI to pipeline context")
self._local_attached = False self._local_attached = False
try: try:
@@ -223,11 +239,12 @@ class PipelineProgress:
items_preview=list(items_preview or []) items_preview=list(items_preview or [])
) )
except Exception: except Exception:
pass logger.exception("Failed to begin_pipe on local UI")
self._local_ui = ui self._local_ui = ui
return True return True
except Exception: except Exception:
logger.exception("Failed to create local PipelineLiveProgress UI")
self._local_ui = None self._local_ui = None
self._local_attached = False self._local_attached = False
return False return False
@@ -239,18 +256,18 @@ class PipelineProgress:
try: try:
self._local_ui.finish_pipe(0, force_complete=bool(force_complete)) self._local_ui.finish_pipe(0, force_complete=bool(force_complete))
except Exception: except Exception:
pass logger.exception("Failed to finish local UI pipe")
try: try:
self._local_ui.stop() self._local_ui.stop()
except Exception: except Exception:
pass logger.exception("Failed to stop local UI")
finally: finally:
self._local_ui = None self._local_ui = None
try: try:
if self._local_attached and hasattr(self._ctx, "set_live_progress"): if self._local_attached and hasattr(self._ctx, "set_live_progress"):
self._ctx.set_live_progress(None) self._ctx.set_live_progress(None)
except Exception: except Exception:
pass logger.exception("Failed to detach local progress from pipeline context")
self._local_attached = False self._local_attached = False
@contextmanager @contextmanager

View File

@@ -25,6 +25,9 @@ from ProviderCore.base import SearchResult
from SYS.html_table import extract_records from SYS.html_table import extract_records
import lxml.html as lxml_html import lxml.html as lxml_html
import logging
logger = logging.getLogger(__name__)
class TableProviderMixin: class TableProviderMixin:
"""Mixin to simplify providers that scrape table/list results from HTML. """Mixin to simplify providers that scrape table/list results from HTML.
@@ -56,15 +59,18 @@ class TableProviderMixin:
resp = client.get(url) resp = client.get(url)
content = resp.content content = resp.content
except Exception: except Exception:
logger.exception("Failed to fetch URL %s for provider %s", url, getattr(self, 'name', '<provider>'))
return [] return []
# Ensure we pass an lxml document or string (httpx returns bytes) # Ensure we pass an lxml document or string (httpx returns bytes)
try: try:
doc = lxml_html.fromstring(content) doc = lxml_html.fromstring(content)
except Exception: except Exception:
logger.debug("Failed to parse content with lxml; attempting to decode as utf-8", exc_info=True)
try: try:
doc = content.decode("utf-8") doc = content.decode("utf-8")
except Exception: except Exception:
logger.debug("Failed to decode content as utf-8; falling back to str()", exc_info=True)
doc = str(content) doc = str(content)
records, chosen = extract_records(doc, base_url=url, xpaths=xpaths or self.DEFAULT_XPATHS) records, chosen = extract_records(doc, base_url=url, xpaths=xpaths or self.DEFAULT_XPATHS)

View File

@@ -43,6 +43,9 @@ else:
# Reuse the existing format_bytes helper under a clearer alias # Reuse the existing format_bytes helper under a clearer alias
from SYS.utils import format_bytes as format_mb from SYS.utils import format_bytes as format_mb
import logging
logger = logging.getLogger(__name__)
def _sanitize_cell_text(value: Any) -> str: def _sanitize_cell_text(value: Any) -> str:
"""Coerce to a single-line, tab-free string suitable for terminal display.""" """Coerce to a single-line, tab-free string suitable for terminal display."""
@@ -82,6 +85,7 @@ def _format_duration_hms(duration: Any) -> str:
else: else:
seconds = float(duration) seconds = float(duration)
except Exception: except Exception:
logger.debug("Failed to format duration '%s' to hms", duration, exc_info=True)
return "" return ""
if seconds < 0: if seconds < 0:
@@ -118,6 +122,7 @@ class TableColumn:
try: try:
return self.extractor(item) return self.extractor(item)
except Exception: except Exception:
logger.exception("TableColumn.extract failed for key '%s'", self.key)
return None return None
@@ -137,6 +142,7 @@ def _as_dict(item: Any) -> Optional[Dict[str, Any]]:
if hasattr(item, "__dict__"): if hasattr(item, "__dict__"):
return dict(getattr(item, "__dict__")) return dict(getattr(item, "__dict__"))
except Exception: except Exception:
logger.exception("Failed to convert %s to dict in _as_dict", type(item))
return None return None
return None return None
@@ -201,6 +207,7 @@ def extract_ext_value(item: Any) -> str:
if suf: if suf:
ext = suf.lstrip(".") ext = suf.lstrip(".")
except Exception: except Exception:
logger.debug("Failed to extract suffix from raw_path: %r", raw_path, exc_info=True)
ext = "" ext = ""
ext_str = str(ext or "").strip().lstrip(".") ext_str = str(ext or "").strip().lstrip(".")
@@ -242,6 +249,7 @@ def extract_size_bytes_value(item: Any) -> Optional[int]:
# Some sources might provide floats or numeric strings # Some sources might provide floats or numeric strings
return int(float(s)) return int(float(s))
except Exception: except Exception:
logger.debug("Failed to parse size value '%r' to int", size_val, exc_info=True)
return None return None
@@ -471,6 +479,7 @@ class Table:
"get_current_cmdlet_name") else "" "get_current_cmdlet_name") else ""
) )
except Exception: except Exception:
logger.debug("Failed to get current cmdlet name from pipeline context", exc_info=True)
cmdlet_name = "" cmdlet_name = ""
stage_text = "" stage_text = ""
@@ -481,6 +490,7 @@ class Table:
"get_current_stage_text") else "" "get_current_stage_text") else ""
) )
except Exception: except Exception:
logger.debug("Failed to get current stage text from pipeline context", exc_info=True)
stage_text = "" stage_text = ""
if cmdlet_name and stage_text: if cmdlet_name and stage_text:
@@ -494,7 +504,8 @@ class Table:
"-").startswith(normalized_cmd): "-").startswith(normalized_cmd):
self.title = normalized_stage self.title = normalized_stage
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to introspect pipeline context to set ResultTable title")
self.title_width = title_width self.title_width = title_width
self.max_columns = ( self.max_columns = (
max_columns if max_columns is not None else 5 max_columns if max_columns is not None else 5
@@ -558,6 +569,7 @@ class Table:
try: try:
return dict(self.table_metadata) return dict(self.table_metadata)
except Exception: except Exception:
logger.exception("Failed to copy table metadata")
return {} return {}
def _interactive(self, interactive: bool = True) -> "Table": def _interactive(self, interactive: bool = True) -> "Table":
@@ -835,7 +847,8 @@ class Table:
val = col.format_fn(val) val = col.format_fn(val)
row.add_column(col.header, val) row.add_column(col.header, val)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to extract column '%s' for row %r", getattr(col, 'header', '<col>'), r)
return instance return instance
@@ -913,11 +926,13 @@ class Table:
md = getattr(result, "full_metadata", None) md = getattr(result, "full_metadata", None)
md_dict = dict(md) if isinstance(md, dict) else {} md_dict = dict(md) if isinstance(md, dict) else {}
except Exception: except Exception:
logger.debug("Failed to extract full_metadata for result of type %s", type(result), exc_info=True)
md_dict = {} md_dict = {}
try: try:
selection_args = getattr(result, "selection_args", None) selection_args = getattr(result, "selection_args", None)
except Exception: except Exception:
logger.debug("Failed to get selection_args from result of type %s", type(result), exc_info=True)
selection_args = None selection_args = None
if selection_args is None: if selection_args is None:
selection_args = md_dict.get("_selection_args") or md_dict.get("selection_args") selection_args = md_dict.get("_selection_args") or md_dict.get("selection_args")
@@ -927,6 +942,7 @@ class Table:
try: try:
selection_action = getattr(result, "selection_action", None) selection_action = getattr(result, "selection_action", None)
except Exception: except Exception:
logger.debug("Failed to get selection_action from result of type %s", type(result), exc_info=True)
selection_action = None selection_action = None
if selection_action is None: if selection_action is None:
selection_action = md_dict.get("_selection_action") or md_dict.get("selection_action") selection_action = md_dict.get("_selection_action") or md_dict.get("selection_action")
@@ -1084,13 +1100,16 @@ class Table:
and "table" not in visible_data and "source" not in visible_data): and "table" not in visible_data and "source" not in visible_data):
visible_data["store"] = store_extracted visible_data["store"] = store_extracted
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to extract store value for item: %r", data)
try: try:
ext_extracted = extract_ext_value(data) ext_extracted = extract_ext_value(data)
# Always ensure `ext` exists so priority_groups keeps a stable column. # Always ensure `ext` exists so priority_groups keeps a stable column.
visible_data["ext"] = str(ext_extracted or "") visible_data["ext"] = str(ext_extracted or "")
except Exception: except Exception:
from SYS.logger import logger
logger.exception("Failed to extract ext value for item: %r", data)
visible_data.setdefault("ext", "") visible_data.setdefault("ext", "")
try: try:
@@ -1099,7 +1118,8 @@ class Table:
and "size" not in visible_data): and "size" not in visible_data):
visible_data["size_bytes"] = size_extracted visible_data["size_bytes"] = size_extracted
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to extract size bytes for item: %r", data)
# Handle extension separation for local files # Handle extension separation for local files
store_val = str( store_val = str(
@@ -1168,7 +1188,8 @@ class Table:
col_value, col_value,
integer_only=False integer_only=False
) )
except Exception: except Exception as exc:
logger.debug("Failed to format 'size' column value: %r", col_value, exc_info=True)
col_value_str = format_value(col_value) col_value_str = format_value(col_value)
elif isinstance(col_name, elif isinstance(col_name,
str) and col_name.strip().lower() == "duration": str) and col_name.strip().lower() == "duration":
@@ -1178,7 +1199,8 @@ class Table:
else: else:
dur = _format_duration_hms(col_value) dur = _format_duration_hms(col_value)
col_value_str = dur or format_value(col_value) col_value_str = dur or format_value(col_value)
except Exception: except Exception as exc:
logger.debug("Failed to format 'duration' column value: %r", col_value, exc_info=True)
col_value_str = format_value(col_value) col_value_str = format_value(col_value)
else: else:
col_value_str = format_value(col_value) col_value_str = format_value(col_value)
@@ -1201,7 +1223,7 @@ class Table:
) # Don't display full metadata as column ) # Don't display full metadata as column
except Exception: except Exception:
# Fall back to regular field handling if columns format is unexpected # Fall back to regular field handling if columns format is unexpected
pass logger.exception("Failed to process 'columns' dynamic field list: %r", visible_data.get("columns"))
# Only add priority groups if we haven't already filled columns from 'columns' field # Only add priority groups if we haven't already filled columns from 'columns' field
if column_count == 0: if column_count == 0:

View File

@@ -8,6 +8,9 @@ possible and let callers decide whether to `Console.print()` or capture output.
from __future__ import annotations from __future__ import annotations
from typing import Any, Dict, Iterable, Optional from typing import Any, Dict, Iterable, Optional
import logging
logger = logging.getLogger(__name__)
from SYS.result_table_api import ColumnSpec, ResultModel, ResultTable, Renderer from SYS.result_table_api import ColumnSpec, ResultModel, ResultTable, Renderer
@@ -40,11 +43,13 @@ class RichRenderer(Renderer):
if col.format_fn: if col.format_fn:
try: try:
cell = col.format_fn(raw) cell = col.format_fn(raw)
except Exception: except Exception as exc:
logger.exception("Column format function failed for '%s': %s", col.header, exc)
cell = str(raw or "") cell = str(raw or "")
else: else:
cell = str(raw or "") cell = str(raw or "")
except Exception: except Exception as exc:
logger.exception("Column extractor failed for '%s': %s", col.header, exc)
cell = "" cell = ""
cells.append(cell) cells.append(cell)
table.add_row(*cells) table.add_row(*cells)

View File

@@ -29,7 +29,8 @@ try:
except TypeError: except TypeError:
_pretty_install() _pretty_install()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to configure rich pretty-printing")
_STDOUT_CONSOLE = Console(file=sys.stdout) _STDOUT_CONSOLE = Console(file=sys.stdout)
_STDERR_CONSOLE = Console(file=sys.stderr) _STDERR_CONSOLE = Console(file=sys.stderr)
@@ -261,8 +262,10 @@ def render_image_to_console(image_path: str | Path, max_width: int | None = None
console.print(line) console.print(line)
except Exception: except Exception:
# Silently fail if image cannot be rendered (e.g. missing PIL or corrupted file) # Emit logs to help diagnose rendering failures (PIL missing, corrupt file, terminal limitations)
pass from SYS.logger import logger
logger.exception("Failed to render image to console: %s", image_path)
return
def render_item_details_panel(item: Dict[str, Any], *, title: Optional[str] = None) -> None: def render_item_details_panel(item: Dict[str, Any], *, title: Optional[str] = None) -> None:
@@ -279,7 +282,8 @@ def render_item_details_panel(item: Dict[str, Any], *, title: Optional[str] = No
view.title = "" view.title = ""
view.header_lines = [] view.header_lines = []
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to sanitize ItemDetailView title/header before printing")
# We want to print ONLY the elements from ItemDetailView, so we don't use stdout_console().print(view) # We want to print ONLY the elements from ItemDetailView, so we don't use stdout_console().print(view)
# as that would include the (empty) results panel. # as that would include the (empty) results panel.

View File

@@ -528,6 +528,7 @@ def get_api_key(config: dict[str, Any], service: str, key_path: str) -> str | No
return None return None
except Exception: except Exception:
_format_logger.exception("Failed to resolve nested config key '%s'", key_path)
return None return None

View File

@@ -9,6 +9,7 @@ from typing import Any, Dict, Optional, Set, TextIO, Sequence
from SYS.config import get_local_storage_path from SYS.config import get_local_storage_path
from SYS.worker_manager import WorkerManager from SYS.worker_manager import WorkerManager
from SYS.logger import log
class WorkerOutputMirror(io.TextIOBase): class WorkerOutputMirror(io.TextIOBase):
@@ -69,7 +70,8 @@ class WorkerOutputMirror(io.TextIOBase):
try: try:
self._manager.append_stdout(self._worker_id, text, channel=self._channel) self._manager.append_stdout(self._worker_id, text, channel=self._channel)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to append stdout for worker '%s' channel '%s'", self._worker_id, self._channel)
@property @property
def encoding(self) -> str: # type: ignore[override] def encoding(self) -> str: # type: ignore[override]
@@ -112,7 +114,8 @@ class WorkerStageSession:
self.stdout_proxy.flush() self.stdout_proxy.flush()
self.stderr_proxy.flush() self.stderr_proxy.flush()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to flush worker stdout/stderr proxies for '%s'", self.worker_id)
sys.stdout = self.orig_stdout sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr sys.stderr = self.orig_stderr
@@ -121,7 +124,8 @@ class WorkerStageSession:
try: try:
self.manager.disable_logging_for_worker(self.worker_id) self.manager.disable_logging_for_worker(self.worker_id)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to disable logging for worker '%s'", self.worker_id)
try: try:
if status == "completed": if status == "completed":
@@ -131,14 +135,16 @@ class WorkerStageSession:
self.worker_id, f"{self._error_label}: {error_msg or status}" self.worker_id, f"{self._error_label}: {error_msg or status}"
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to log step for worker '%s' status='%s' error='%s'", self.worker_id, status, error_msg)
try: try:
self.manager.finish_worker( self.manager.finish_worker(
self.worker_id, result=status or "completed", error_msg=error_msg or "" self.worker_id, result=status or "completed", error_msg=error_msg or ""
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to finish worker '%s' with status '%s'", self.worker_id, status)
if self.config and self.config.get("_current_worker_id") == self.worker_id: if self.config and self.config.get("_current_worker_id") == self.worker_id:
self.config.pop("_current_worker_id", None) self.config.pop("_current_worker_id", None)
@@ -177,7 +183,8 @@ class WorkerManagerRegistry:
try: try:
cls._manager.close() cls._manager.close()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to close existing WorkerManager during registry ensure")
cls._manager = WorkerManager(resolved_root, auto_refresh_interval=0.5) cls._manager = WorkerManager(resolved_root, auto_refresh_interval=0.5)
cls._manager_root = resolved_root cls._manager_root = resolved_root
@@ -192,7 +199,8 @@ class WorkerManagerRegistry:
reason="CLI session ended unexpectedly; marking worker as failed", reason="CLI session ended unexpectedly; marking worker as failed",
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to expire running workers during registry ensure")
else: else:
cls._orphan_cleanup_done = True cls._orphan_cleanup_done = True
@@ -202,7 +210,7 @@ class WorkerManagerRegistry:
return manager return manager
except Exception as exc: except Exception as exc:
print(f"[worker] Could not initialize worker manager: {exc}", file=sys.stderr) log(f"[worker] Could not initialize worker manager: {exc}", file=sys.stderr)
return None return None
@classmethod @classmethod
@@ -212,7 +220,8 @@ class WorkerManagerRegistry:
try: try:
cls._manager.close() cls._manager.close()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to close WorkerManager during registry.close()")
cls._manager = None cls._manager = None
cls._manager_root = None cls._manager_root = None
cls._orphan_cleanup_done = False cls._orphan_cleanup_done = False
@@ -254,7 +263,7 @@ class WorkerStages:
if not tracked: if not tracked:
return None return None
except Exception as exc: except Exception as exc:
print(f"[worker] Failed to track {worker_type}: {exc}", file=sys.stderr) log(f"[worker] Failed to track {worker_type}: {exc}", file=sys.stderr)
return None return None
if session_worker_ids is not None: if session_worker_ids is not None:
@@ -279,7 +288,8 @@ class WorkerStages:
try: try:
worker_manager.log_step(worker_id, f"Started {worker_type}") worker_manager.log_step(worker_id, f"Started {worker_type}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to log start step for worker '%s'", worker_id)
return WorkerStageSession( return WorkerStageSession(
manager=worker_manager, manager=worker_manager,

View File

@@ -400,7 +400,7 @@ class WorkerManager:
try: try:
self.flush_worker_stdout(worker_id) self.flush_worker_stdout(worker_id)
except Exception: except Exception:
pass logger.exception("Failed to flush worker stdout for '%s'", worker_id)
logger.debug( logger.debug(
f"[WorkerManager] Disabled logging for worker: {worker_id}" f"[WorkerManager] Disabled logging for worker: {worker_id}"
@@ -516,7 +516,7 @@ class WorkerManager:
try: try:
self.flush_worker_stdout(worker_id) self.flush_worker_stdout(worker_id)
except Exception: except Exception:
pass logger.exception("Failed to flush worker stdout for '%s' during finish", worker_id)
kwargs = { kwargs = {
"status": "finished", "status": "finished",
"result": result "result": result
@@ -900,7 +900,7 @@ class WorkerManager:
try: try:
self._flush_all_stdout_buffers() self._flush_all_stdout_buffers()
except Exception: except Exception:
pass logger.exception("Failed to flush all stdout buffers during WorkerManager.close()")
logger.info("[WorkerManager] Closed") logger.info("[WorkerManager] Closed")
def _flush_all_stdout_buffers(self) -> None: def _flush_all_stdout_buffers(self) -> None:

View File

@@ -269,45 +269,7 @@ class Store:
# get_backend_instance implementation moved to the bottom of this file to avoid # get_backend_instance implementation moved to the bottom of this file to avoid
# instantiating all backends during startup (see function `get_backend_instance`). # instantiating all backends during startup (see function `get_backend_instance`).
def _resolve_backend_name(self, backend_name: str) -> tuple[Optional[str], Optional[str]]: # Duplicate _resolve_backend_name removed — the method is defined once earlier in the class.
requested = str(backend_name or "")
if requested in self._backends:
return requested, None
requested_norm = _normalize_store_type(requested)
ci_matches = [
name for name in self._backends
if _normalize_store_type(name) == requested_norm
]
if len(ci_matches) == 1:
return ci_matches[0], None
if len(ci_matches) > 1:
return None, f"Ambiguous store alias '{backend_name}' matches {ci_matches}"
type_matches = [
name for name, store_type in self._backend_types.items()
if store_type == requested_norm
]
if len(type_matches) == 1:
return type_matches[0], None
if len(type_matches) > 1:
return None, (
f"Ambiguous store alias '{backend_name}' matches type '{requested_norm}': {type_matches}"
)
prefix_matches = [
name for name, store_type in self._backend_types.items()
if store_type.startswith(requested_norm)
]
if len(prefix_matches) == 1:
return prefix_matches[0], None
if len(prefix_matches) > 1:
return None, (
f"Ambiguous store alias '{backend_name}' matches type prefix '{requested_norm}': {prefix_matches}"
)
return None, None
def get_backend_error(self, backend_name: str) -> Optional[str]: def get_backend_error(self, backend_name: str) -> Optional[str]:
return self._backend_errors.get(str(backend_name)) return self._backend_errors.get(str(backend_name))

30
TUI.py
View File

@@ -29,6 +29,8 @@ from textual.widgets import (
) )
from textual.widgets.option_list import Option from textual.widgets.option_list import Option
import logging
logger = logging.getLogger(__name__)
BASE_DIR = Path(__file__).resolve().parent BASE_DIR = Path(__file__).resolve().parent
REPO_ROOT = BASE_DIR REPO_ROOT = BASE_DIR
@@ -75,7 +77,7 @@ def _extract_tag_names(emitted: Sequence[Any]) -> List[str]:
tags.append(val) tags.append(val)
continue continue
except Exception: except Exception:
pass logger.exception("Error extracting tag_name in _extract_tag_names")
if isinstance(obj, dict): if isinstance(obj, dict):
# Prefer explicit tag lists # Prefer explicit tag lists
@@ -107,7 +109,7 @@ def _extract_tag_names_from_table(table: Any) -> List[str]:
if payloads: if payloads:
sources.extend(payloads) sources.extend(payloads)
except Exception: except Exception:
pass logger.exception("Error while calling table.get_payloads")
rows = getattr(table, "rows", []) or [] rows = getattr(table, "rows", []) or []
for row in rows: for row in rows:
for col in getattr(row, "columns", []) or []: for col in getattr(row, "columns", []) or []:
@@ -289,7 +291,7 @@ class TagEditorPopup(ModalScreen[None]):
try: try:
app.call_from_thread(app._append_log_line, msg) app.call_from_thread(app._append_log_line, msg)
except Exception: except Exception:
pass logger.exception("Failed to append log line from background thread")
def _log_pipeline_command(stage: str, cmd: str) -> None: def _log_pipeline_command(stage: str, cmd: str) -> None:
if not cmd: if not cmd:
@@ -377,7 +379,7 @@ class TagEditorPopup(ModalScreen[None]):
self._seeds, self._seeds,
) )
except Exception: except Exception:
pass logger.exception("Failed to refresh tag overlay")
try: try:
app.call_from_thread(_refresh_overlay) app.call_from_thread(_refresh_overlay)
@@ -513,7 +515,7 @@ class PipelineHubApp(App):
config = load_config() config = load_config()
SharedArgs._refresh_store_choices_cache(config) SharedArgs._refresh_store_choices_cache(config)
except Exception: except Exception:
pass logger.exception("Failed to refresh store choices cache")
self._populate_store_options() self._populate_store_options()
self._load_cmdlet_names() self._load_cmdlet_names()
@@ -535,7 +537,7 @@ class PipelineHubApp(App):
store_display = ", ".join(stores[:10]) + ("..." if len(stores) > 10 else "") store_display = ", ".join(stores[:10]) + ("..." if len(stores) > 10 else "")
self._append_log_line(f"Startup config: providers={len(provs)} ({prov_display or '(none)'}), stores={len(stores)} ({store_display or '(none)'}), db={db.db_path.name}") self._append_log_line(f"Startup config: providers={len(provs)} ({prov_display or '(none)'}), stores={len(stores)} ({store_display or '(none)'}), db={db.db_path.name}")
except Exception: except Exception:
pass logger.exception("Failed to produce startup config summary")
# ------------------------------------------------------------------ # ------------------------------------------------------------------
# Actions # Actions
@@ -699,7 +701,7 @@ class PipelineHubApp(App):
first = options[0] first = options[0]
return str(getattr(first, "prompt", "") or "") return str(getattr(first, "prompt", "") or "")
except Exception: except Exception:
pass logger.exception("Error retrieving first suggestion from suggestion list")
return "" return ""
def _populate_store_options(self) -> None: def _populate_store_options(self) -> None:
@@ -715,6 +717,7 @@ class PipelineHubApp(App):
try: try:
stores = StoreRegistry(config=cfg, suppress_debug=True).list_backends() stores = StoreRegistry(config=cfg, suppress_debug=True).list_backends()
except Exception: except Exception:
logger.exception("Failed to list store backends from StoreRegistry")
stores = [] stores = []
# Always offer a reasonable default even if config is missing. # Always offer a reasonable default even if config is missing.
@@ -730,7 +733,7 @@ class PipelineHubApp(App):
if (current is None) or (current == "") or (current is Select.BLANK): if (current is None) or (current == "") or (current is Select.BLANK):
self.store_select.value = options[0][1] self.store_select.value = options[0][1]
except Exception: except Exception:
pass logger.exception("Failed to set store select options")
def _get_selected_store(self) -> Optional[str]: def _get_selected_store(self) -> Optional[str]:
if not self.store_select: if not self.store_select:
@@ -969,7 +972,7 @@ class PipelineHubApp(App):
subject=payload_subject, subject=payload_subject,
) )
except Exception: except Exception:
pass logger.exception("Failed to emit tags as table")
def _load_cmdlet_names(self, force: bool = False) -> None: def _load_cmdlet_names(self, force: bool = False) -> None:
try: try:
@@ -981,6 +984,7 @@ class PipelineHubApp(App):
for n in names if str(n).strip()} for n in names if str(n).strip()}
) )
except Exception: except Exception:
logger.exception("Failed to load cmdlet names")
self._cmdlet_names = [] self._cmdlet_names = []
def _update_syntax_status(self, text: str) -> None: def _update_syntax_status(self, text: str) -> None:
@@ -1023,7 +1027,7 @@ class PipelineHubApp(App):
# Fallback for older/newer Textual APIs. # Fallback for older/newer Textual APIs.
self.suggestion_list.options = [] # type: ignore[attr-defined] self.suggestion_list.options = [] # type: ignore[attr-defined]
except Exception: except Exception:
pass logger.exception("Failed to clear suggestion list options via fallback")
try: try:
self.suggestion_list.add_options( self.suggestion_list.add_options(
@@ -1035,7 +1039,7 @@ class PipelineHubApp(App):
Option(m) for m in matches Option(m) for m in matches
] # type: ignore[attr-defined] ] # type: ignore[attr-defined]
except Exception: except Exception:
pass logger.exception("Failed to set suggestion list options via fallback")
self.suggestion_list.display = True self.suggestion_list.display = True
@@ -1173,12 +1177,12 @@ class PipelineHubApp(App):
if store_name and not str(seeds.get("store") or "").strip(): if store_name and not str(seeds.get("store") or "").strip():
seeds["store"] = store_name seeds["store"] = store_name
except Exception: except Exception:
pass logger.exception("Failed to set seed store value")
try: try:
if file_hash and not str(seeds.get("hash") or "").strip(): if file_hash and not str(seeds.get("hash") or "").strip():
seeds["hash"] = file_hash seeds["hash"] = file_hash
except Exception: except Exception:
pass logger.exception("Failed to set seed hash value")
self.push_screen( self.push_screen(
TagEditorPopup(seeds=seeds, TagEditorPopup(seeds=seeds,

View File

@@ -17,6 +17,8 @@ from Store.registry import _discover_store_classes, _required_keys_for
from ProviderCore.registry import list_providers from ProviderCore.registry import list_providers
from TUI.modalscreen.matrix_room_picker import MatrixRoomPicker from TUI.modalscreen.matrix_room_picker import MatrixRoomPicker
from TUI.modalscreen.selection_modal import SelectionModal from TUI.modalscreen.selection_modal import SelectionModal
import logging
logger = logging.getLogger(__name__)
class ConfigModal(ModalScreen): class ConfigModal(ModalScreen):
"""A modal for editing the configuration.""" """A modal for editing the configuration."""
@@ -187,22 +189,23 @@ class ConfigModal(ModalScreen):
try: try:
self.query_one("#add-tool-btn", Button).display = False self.query_one("#add-tool-btn", Button).display = False
except Exception: except Exception:
pass logger.exception("Failed to hide add-tool button in ConfigModal.on_mount")
# Update DB path and last-saved on mount # Update DB path and last-saved on mount
try: try:
self.query_one("#config-db-path", Static).update(self._db_path) self.query_one("#config-db-path", Static).update(self._db_path)
except Exception: except Exception:
pass logger.exception("Failed to update config DB path display in ConfigModal.on_mount")
try: try:
mtime = None mtime = None
try: try:
mtime = db.db_path.stat().st_mtime mtime = db.db_path.stat().st_mtime
mtime = __import__('datetime').datetime.utcfromtimestamp(mtime).isoformat() + "Z" mtime = __import__('datetime').datetime.utcfromtimestamp(mtime).isoformat() + "Z"
except Exception: except Exception:
logger.exception("Failed to stat DB path for last-saved time")
mtime = None mtime = None
self.query_one("#config-last-save", Static).update(f"Last saved: {mtime or '(unknown)'}") self.query_one("#config-last-save", Static).update(f"Last saved: {mtime or '(unknown)'}")
except Exception: except Exception:
pass logger.exception("Failed to update last-saved display in ConfigModal.on_mount")
self.refresh_view() self.refresh_view()
def refresh_view(self) -> None: def refresh_view(self) -> None:
@@ -236,7 +239,7 @@ class ConfigModal(ModalScreen):
self.query_one("#back-btn", Button).display = (self.editing_item_name is not None) self.query_one("#back-btn", Button).display = (self.editing_item_name is not None)
self.query_one("#save-btn", Button).display = (self.editing_item_name is not None or self.current_category == "globals") self.query_one("#save-btn", Button).display = (self.editing_item_name is not None or self.current_category == "globals")
except Exception: except Exception:
pass logger.exception("Failed to update visibility of config modal action buttons")
render_id = self._render_id render_id = self._render_id
@@ -445,7 +448,7 @@ class ConfigModal(ModalScreen):
if k: if k:
provider_schema_map[k.upper()] = field_def provider_schema_map[k.upper()] = field_def
except Exception: except Exception:
pass logger.exception("Failed to retrieve provider config_schema")
# Fetch Tool schema # Fetch Tool schema
if item_type == "tool": if item_type == "tool":
try: try:
@@ -457,7 +460,7 @@ class ConfigModal(ModalScreen):
if k: if k:
provider_schema_map[k.upper()] = field_def provider_schema_map[k.upper()] = field_def
except Exception: except Exception:
pass logger.exception("Failed to retrieve tool config_schema")
# Use columns for better layout of inputs with paste buttons # Use columns for better layout of inputs with paste buttons
container.mount(Label("Edit Settings")) container.mount(Label("Edit Settings"))
@@ -629,7 +632,7 @@ class ConfigModal(ModalScreen):
row.mount(Button("Paste", id=f"paste-{inp_id}", classes="paste-btn")) row.mount(Button("Paste", id=f"paste-{inp_id}", classes="paste-btn"))
idx += 1 idx += 1
except Exception: except Exception:
pass logger.exception("Failed to build required config inputs for provider/tool")
if ( if (
item_type == "provider" item_type == "provider"
@@ -700,15 +703,16 @@ class ConfigModal(ModalScreen):
seen_ids.add(rid) seen_ids.add(rid)
deduped.append(r) deduped.append(r)
except Exception: except Exception:
logger.exception("Failed to process a matrix room entry while deduplicating")
continue continue
if self._matrix_inline_list is not None and deduped: if self._matrix_inline_list is not None and deduped:
try: try:
self._render_matrix_rooms_inline(deduped) self._render_matrix_rooms_inline(deduped)
except Exception: except Exception:
pass logger.exception("Failed to render matrix inline rooms")
except Exception: except Exception:
pass logger.exception("Failed to fetch or process matrix rooms for inline rendering")
except Exception: except Exception:
self._matrix_inline_checkbox_map = {} self._matrix_inline_checkbox_map = {}
self._matrix_inline_list = None self._matrix_inline_list = None
@@ -785,7 +789,7 @@ class ConfigModal(ModalScreen):
try: try:
self.config_data = reload_config() self.config_data = reload_config()
except Exception: except Exception:
pass logger.exception("Failed to reload config after save conflict")
self._editor_snapshot = None self._editor_snapshot = None
self.editing_item_name = None self.editing_item_name = None
self.editing_item_type = None self.editing_item_type = None
@@ -807,7 +811,7 @@ class ConfigModal(ModalScreen):
try: try:
self.config_data = reload_config() self.config_data = reload_config()
except Exception: except Exception:
pass logger.exception("Failed to reload config after durable save")
if saved == 0: if saved == 0:
msg = f"Configuration saved (no rows changed) to {db.db_path.name}" msg = f"Configuration saved (no rows changed) to {db.db_path.name}"
@@ -816,7 +820,7 @@ class ConfigModal(ModalScreen):
try: try:
self.notify(msg, timeout=6) self.notify(msg, timeout=6)
except Exception: except Exception:
pass logger.exception("Failed to show notification message in ConfigModal")
# Return to the main list view within the current category # Return to the main list view within the current category
self.editing_item_name = None self.editing_item_name = None
@@ -828,7 +832,7 @@ class ConfigModal(ModalScreen):
try: try:
log(f"Durable save failed: {exc}") log(f"Durable save failed: {exc}")
except Exception: except Exception:
pass logger.exception("Failed to call log() for durable save error")
elif bid in self._button_id_map: elif bid in self._button_id_map:
action, itype, name = self._button_id_map[bid] action, itype, name = self._button_id_map[bid]
if action == "edit": if action == "edit":
@@ -870,7 +874,7 @@ class ConfigModal(ModalScreen):
if cls.config_schema(): if cls.config_schema():
options.append(stype) options.append(stype)
except Exception: except Exception:
pass logger.exception("Failed to inspect store class config_schema for '%s'", stype)
self.app.push_screen(SelectionModal("Select Store Type", options), callback=self.on_store_type_selected) self.app.push_screen(SelectionModal("Select Store Type", options), callback=self.on_store_type_selected)
elif bid == "add-provider-btn": elif bid == "add-provider-btn":
provider_names = list(list_providers().keys()) provider_names = list(list_providers().keys())
@@ -883,7 +887,7 @@ class ConfigModal(ModalScreen):
if pcls.config_schema(): if pcls.config_schema():
options.append(ptype) options.append(ptype)
except Exception: except Exception:
pass logger.exception("Failed to inspect provider class config_schema for '%s'", ptype)
self.app.push_screen(SelectionModal("Select Provider Type", options), callback=self.on_provider_type_selected) self.app.push_screen(SelectionModal("Select Provider Type", options), callback=self.on_provider_type_selected)
elif bid == "add-tool-btn": elif bid == "add-tool-btn":
# Discover tool modules that advertise a config_schema() # Discover tool modules that advertise a config_schema()
@@ -918,22 +922,22 @@ class ConfigModal(ModalScreen):
cb = self.query_one(f"#{checkbox_id}", Checkbox) cb = self.query_one(f"#{checkbox_id}", Checkbox)
cb.value = True cb.value = True
except Exception: except Exception:
pass logger.exception("Failed to set matrix inline checkbox to True for '%s'", checkbox_id)
try: try:
self.query_one("#matrix-inline-save", Button).disabled = False self.query_one("#matrix-inline-save", Button).disabled = False
except Exception: except Exception:
pass logger.exception("Failed to enable matrix inline save button")
elif bid == "matrix-inline-clear": elif bid == "matrix-inline-clear":
for checkbox_id in list(self._matrix_inline_checkbox_map.keys()): for checkbox_id in list(self._matrix_inline_checkbox_map.keys()):
try: try:
cb = self.query_one(f"#{checkbox_id}", Checkbox) cb = self.query_one(f"#{checkbox_id}", Checkbox)
cb.value = False cb.value = False
except Exception: except Exception:
pass logger.exception("Failed to set matrix inline checkbox to False for '%s'", checkbox_id)
try: try:
self.query_one("#matrix-inline-save", Button).disabled = True self.query_one("#matrix-inline-save", Button).disabled = True
except Exception: except Exception:
pass logger.exception("Failed to disable matrix inline save button")
elif bid == "matrix-inline-save": elif bid == "matrix-inline-save":
selected: List[str] = [] selected: List[str] = []
for checkbox_id, room_id in self._matrix_inline_checkbox_map.items(): for checkbox_id, room_id in self._matrix_inline_checkbox_map.items():
@@ -942,7 +946,7 @@ class ConfigModal(ModalScreen):
if cb.value and room_id: if cb.value and room_id:
selected.append(room_id) selected.append(room_id)
except Exception: except Exception:
pass logger.exception("Failed to read matrix inline checkbox '%s'", checkbox_id)
if not selected: if not selected:
if self._matrix_status: if self._matrix_status:
self._matrix_status.update("No default rooms were saved.") self._matrix_status.update("No default rooms were saved.")
@@ -963,7 +967,7 @@ class ConfigModal(ModalScreen):
try: try:
self.query_one("#matrix-inline-save", Button).disabled = True self.query_one("#matrix-inline-save", Button).disabled = True
except Exception: except Exception:
pass logger.exception("Failed to disable matrix inline save button")
self.refresh_view() self.refresh_view()
@@ -1096,7 +1100,7 @@ class ConfigModal(ModalScreen):
if key: if key:
new_config[key] = field_def.get("default", "") new_config[key] = field_def.get("default", "")
except Exception: except Exception:
pass logger.exception("Failed to load config_schema for tool '%s'", tname)
self.config_data["tool"][tname] = new_config self.config_data["tool"][tname] = new_config
self.editing_item_type = "tool" self.editing_item_type = "tool"
@@ -1247,7 +1251,7 @@ class ConfigModal(ModalScreen):
self._matrix_status.update("Matrix test skipped: please set both 'homeserver' and 'access_token' before testing.") self._matrix_status.update("Matrix test skipped: please set both 'homeserver' and 'access_token' before testing.")
return return
except Exception: except Exception:
pass logger.exception("Failed to check matrix configuration before testing")
if self._matrix_status: if self._matrix_status:
self._matrix_status.update("Saving configuration before testing…") self._matrix_status.update("Saving configuration before testing…")
@@ -1280,7 +1284,7 @@ class ConfigModal(ModalScreen):
try: try:
debug(f"[matrix] Test connection failed: {exc}\n{tb}") debug(f"[matrix] Test connection failed: {exc}\n{tb}")
except Exception: except Exception:
pass logger.exception("Failed to debug matrix test failure")
msg = str(exc) or "Matrix test failed" msg = str(exc) or "Matrix test failed"
m_lower = msg.lower() m_lower = msg.lower()
@@ -1328,7 +1332,7 @@ class ConfigModal(ModalScreen):
if isinstance(parsed, (list, tuple, dict)): if isinstance(parsed, (list, tuple, dict)):
return self._normalize_cached_raw(parsed if isinstance(parsed, (list, tuple)) else [parsed]) return self._normalize_cached_raw(parsed if isinstance(parsed, (list, tuple)) else [parsed])
except Exception: except Exception:
pass logger.exception("Failed to parse cached_rooms JSON for provider matrix")
# Try Python literal eval (accepts single quotes, repr-style lists) # Try Python literal eval (accepts single quotes, repr-style lists)
try: try:
@@ -1338,7 +1342,7 @@ class ConfigModal(ModalScreen):
if isinstance(parsed, (list, tuple, dict)): if isinstance(parsed, (list, tuple, dict)):
return self._normalize_cached_raw(parsed if isinstance(parsed, (list, tuple)) else [parsed]) return self._normalize_cached_raw(parsed if isinstance(parsed, (list, tuple)) else [parsed])
except Exception: except Exception:
pass logger.exception("Failed to parse cached_rooms as Python literal for provider matrix")
# Try to extract dict-like pairs for room_id/name when the string looks like # Try to extract dict-like pairs for room_id/name when the string looks like
# a Python repr or partial dict fragment (e.g., "'room_id': '!r1', 'name': 'Room'" # a Python repr or partial dict fragment (e.g., "'room_id': '!r1', 'name': 'Room'"
@@ -1362,11 +1366,11 @@ class ConfigModal(ModalScreen):
if ids: if ids:
return [{"room_id": rid, "name": ""} for rid in ids] return [{"room_id": rid, "name": ""} for rid in ids]
except Exception: except Exception:
pass logger.exception("Failed to extract cached_rooms pairs or ids for provider matrix")
return [] return []
except Exception: except Exception:
pass logger.exception("Failed to parse cached_rooms for provider matrix")
return [] return []
def _normalize_cached_raw(self, parsed: List[Any]) -> List[Dict[str, Any]]: def _normalize_cached_raw(self, parsed: List[Any]) -> List[Dict[str, Any]]:
@@ -1383,6 +1387,7 @@ class ConfigModal(ModalScreen):
if s: if s:
out.append({"room_id": s, "name": ""}) out.append({"room_id": s, "name": ""})
except Exception: except Exception:
logger.exception("Failed to normalize cached_rooms entry: %r", it)
continue continue
return out return out
@@ -1406,7 +1411,7 @@ class ConfigModal(ModalScreen):
self._matrix_status.update("Load skipped: please set both 'homeserver' and 'access_token' before loading rooms.") self._matrix_status.update("Load skipped: please set both 'homeserver' and 'access_token' before loading rooms.")
return return
except Exception: except Exception:
pass logger.exception("Failed to check matrix configuration before load")
if self._matrix_status: if self._matrix_status:
self._matrix_status.update("Saving configuration before loading rooms…") self._matrix_status.update("Saving configuration before loading rooms…")
@@ -1437,7 +1442,7 @@ class ConfigModal(ModalScreen):
try: try:
debug(f"[matrix] Load rooms failed: {exc}\n{tb}") debug(f"[matrix] Load rooms failed: {exc}\n{tb}")
except Exception: except Exception:
pass logger.exception("Failed to debug matrix load failure")
msg = str(exc) or "Matrix load failed" msg = str(exc) or "Matrix load failed"
if "auth" in msg.lower(): if "auth" in msg.lower():
msg = msg + ". Please verify your access token and try again." msg = msg + ". Please verify your access token and try again."
@@ -1453,14 +1458,14 @@ class ConfigModal(ModalScreen):
try: try:
self.notify(full_msg, severity="error", timeout=8) self.notify(full_msg, severity="error", timeout=8)
except Exception: except Exception:
pass logger.exception("Failed to show Matrix load failure notification")
return return
# Populate inline list # Populate inline list
try: try:
self._render_matrix_rooms_inline(rooms) self._render_matrix_rooms_inline(rooms)
except Exception: except Exception:
pass logger.exception("Failed to render inline matrix rooms")
# Persist cached rooms so they are available on next editor open # Persist cached rooms so they are available on next editor open
try: try:
@@ -1474,15 +1479,15 @@ class ConfigModal(ModalScreen):
try: try:
save_config(self.config_data) save_config(self.config_data)
except Exception: except Exception:
pass logger.exception("Failed to persist cached matrix rooms via save_config() fallback")
if self._matrix_status: if self._matrix_status:
self._matrix_status.update(f"Loaded and cached {len(rooms)} room(s).") self._matrix_status.update(f"Loaded and cached {len(rooms)} room(s).")
try: try:
self.notify(f"Loaded {len(rooms)} rooms and cached the results", timeout=5) self.notify(f"Loaded {len(rooms)} rooms and cached the results", timeout=5)
except Exception: except Exception:
pass logger.exception("Failed to notify loaded-and-cached message for Matrix rooms")
except Exception: except Exception:
pass logger.exception("Failed to cache Matrix rooms after load")
def _open_matrix_room_picker( def _open_matrix_room_picker(
self, self,
@@ -1522,15 +1527,15 @@ class ConfigModal(ModalScreen):
try: try:
save_config(self.config_data) save_config(self.config_data)
except Exception: except Exception:
pass logger.exception("Failed to persist cached matrix rooms via save_config() fallback")
if self._matrix_status: if self._matrix_status:
self._matrix_status.update(f"Loaded {len(rooms)} rooms (cached)") self._matrix_status.update(f"Loaded {len(rooms)} rooms (cached)")
try: try:
self.notify(f"Loaded {len(rooms)} rooms and cached the results", timeout=5) self.notify(f"Loaded {len(rooms)} rooms and cached the results", timeout=5)
except Exception: except Exception:
pass logger.exception("Failed to notify loaded-and-cached message for Matrix rooms")
except Exception: except Exception:
pass logger.exception("Failed to cache Matrix rooms when inline view unavailable")
return return
# Clear current entries # Clear current entries
@@ -1549,7 +1554,7 @@ class ConfigModal(ModalScreen):
save_btn = self.query_one("#matrix-inline-save", Button) save_btn = self.query_one("#matrix-inline-save", Button)
save_btn.disabled = True save_btn.disabled = True
except Exception: except Exception:
pass logger.exception("Failed to disable matrix inline save button when no rooms returned")
return return
any_selected = False any_selected = False
@@ -1597,7 +1602,7 @@ class ConfigModal(ModalScreen):
save_btn = self.query_one("#matrix-inline-save", Button) save_btn = self.query_one("#matrix-inline-save", Button)
save_btn.disabled = not any_selected save_btn.disabled = not any_selected
except Exception: except Exception:
pass logger.exception("Failed to set matrix inline save button disabled state")
def _resolve_matrix_rooms_by_ids(self, ids: Iterable[str]) -> List[Dict[str, Any]]: def _resolve_matrix_rooms_by_ids(self, ids: Iterable[str]) -> List[Dict[str, Any]]:
""" """
@@ -1628,7 +1633,7 @@ class ConfigModal(ModalScreen):
try: try:
debug(f"[config] failed to resolve matrix room names: {exc}") debug(f"[config] failed to resolve matrix room names: {exc}")
except Exception: except Exception:
pass logger.exception("Failed to debug matrix name resolution")
return [] return []
def on_matrix_rooms_selected(self, result: Any = None) -> None: def on_matrix_rooms_selected(self, result: Any = None) -> None:
@@ -1683,7 +1688,7 @@ class ConfigModal(ModalScreen):
try: try:
self.query_one("#matrix-inline-save", Button).disabled = not any_selected self.query_one("#matrix-inline-save", Button).disabled = not any_selected
except Exception: except Exception:
pass logger.exception("Failed to update matrix inline save button")
def on_input_changed(self, event: Input.Changed) -> None: def on_input_changed(self, event: Input.Changed) -> None:
if event.input.id: if event.input.id:
@@ -1722,11 +1727,11 @@ class ConfigModal(ModalScreen):
try: try:
self.query_one("#config-db-path", Static).update(self._db_path) self.query_one("#config-db-path", Static).update(self._db_path)
except Exception: except Exception:
pass logger.exception("Failed to update config db path label")
try: try:
self.query_one("#config-last-save", Static).update("Last saved: (saving...)") self.query_one("#config-last-save", Static).update("Last saved: (saving...)")
except Exception: except Exception:
pass logger.exception("Failed to update config last-save label")
log(f"ConfigModal scheduled save (changed={changed})") log(f"ConfigModal scheduled save (changed={changed})")
return changed return changed
@@ -1775,7 +1780,7 @@ class ConfigModal(ModalScreen):
try: try:
self.config_data = reload_config() self.config_data = reload_config()
except Exception: except Exception:
pass logger.exception("Failed to reload config after save completion")
# Update last-saved label with file timestamp for visibility # Update last-saved label with file timestamp for visibility
db_mtime = None db_mtime = None
@@ -1794,19 +1799,19 @@ class ConfigModal(ModalScreen):
try: try:
self.query_one("#config-last-save", Static).update(label_text) self.query_one("#config-last-save", Static).update(label_text)
except Exception: except Exception:
pass logger.exception("Failed to update last-save label with timestamp")
except Exception: except Exception:
pass logger.exception("Failed to compute last-save label text")
try: try:
self.refresh_view() self.refresh_view()
except Exception: except Exception:
pass logger.exception("Failed to refresh config editor view after save completion")
try: try:
self.notify(f"Configuration saved ({changed} change(s)) to {db.db_path.name}", timeout=5) self.notify(f"Configuration saved ({changed} change(s)) to {db.db_path.name}", timeout=5)
except Exception: except Exception:
pass logger.exception("Failed to show configuration saved notification")
else: else:
# No TUI available; log instead of updating UI # No TUI available; log instead of updating UI
log(f"Configuration saved ({changed} change(s)) to {db.db_path.name}") log(f"Configuration saved ({changed} change(s)) to {db.db_path.name}")
@@ -1818,17 +1823,17 @@ class ConfigModal(ModalScreen):
try: try:
self.notify(f"Save failed: {error}", severity="error", timeout=10) self.notify(f"Save failed: {error}", severity="error", timeout=10)
except Exception: except Exception:
pass logger.exception("Failed to show save failed notification")
try: try:
self.config_data = reload_config() self.config_data = reload_config()
except Exception: except Exception:
pass logger.exception("Failed to reload config after save failure")
try: try:
self.refresh_view() self.refresh_view()
except Exception: except Exception:
pass logger.exception("Failed to refresh view after save failure")
else: else:
log(f"Save failed: {error}") log(f"Save failed: {error}")
@@ -1867,7 +1872,7 @@ class ConfigModal(ModalScreen):
if rk not in required_keys: if rk not in required_keys:
required_keys.append(rk) required_keys.append(rk)
except Exception: except Exception:
pass logger.exception("Failed to inspect provider class '%s' for required keys", item_name)
section = self.config_data.get("provider", {}).get(item_name, {}) section = self.config_data.get("provider", {}).get(item_name, {})
elif item_type == "tool": elif item_type == "tool":
try: try:
@@ -1880,7 +1885,7 @@ class ConfigModal(ModalScreen):
if k and k not in required_keys: if k and k not in required_keys:
required_keys.append(k) required_keys.append(k)
except Exception: except Exception:
pass logger.exception("Failed to inspect tool module 'tool.%s' for required keys", item_name)
section = self.config_data.get("tool", {}).get(item_name, {}) section = self.config_data.get("tool", {}).get(item_name, {})
# Check required keys # Check required keys

View File

@@ -630,7 +630,7 @@ class DownloadModal(ModalScreen):
f"Download failed: {error_reason}", f"Download failed: {error_reason}",
) )
except Exception: except Exception:
pass logger.exception("Failed to finish worker during download failure handling")
# Also append detailed error info to worker stdout for visibility # Also append detailed error info to worker stdout for visibility
if worker: if worker:
@@ -799,7 +799,7 @@ class DownloadModal(ModalScreen):
f"Download error: {str(e)}", f"Download error: {str(e)}",
) )
except Exception: except Exception:
pass logger.exception("Failed to finish worker during download error handling")
self.app.call_from_thread(self._hide_progress) self.app.call_from_thread(self._hide_progress)
self.app.call_from_thread(self.dismiss) self.app.call_from_thread(self.dismiss)
return return
@@ -1091,7 +1091,7 @@ class DownloadModal(ModalScreen):
try: try:
worker.finish("error", f"Download failed: {str(e)}") worker.finish("error", f"Download failed: {str(e)}")
except Exception: except Exception:
pass logger.exception("Failed to finish worker on download submit error")
self.app.call_from_thread(self._hide_progress) self.app.call_from_thread(self._hide_progress)
self.app.call_from_thread( self.app.call_from_thread(
self.app.notify, self.app.notify,

View File

@@ -8,6 +8,8 @@ from textual.screen import ModalScreen
from textual.widgets import Static, Button, Checkbox, ListView, ListItem from textual.widgets import Static, Button, Checkbox, ListView, ListItem
from textual import work from textual import work
from rich.text import Text from rich.text import Text
import logging
logger = logging.getLogger(__name__)
class MatrixRoomPicker(ModalScreen[List[str]]): class MatrixRoomPicker(ModalScreen[List[str]]):
@@ -121,7 +123,7 @@ class MatrixRoomPicker(ModalScreen[List[str]]):
# Stop propagation so parent handlers (ConfigModal) don't react. # Stop propagation so parent handlers (ConfigModal) don't react.
event.stop() event.stop()
except Exception: except Exception:
pass logger.exception("Failed to stop ListView.Selected event propagation")
def _set_status(self, text: str) -> None: def _set_status(self, text: str) -> None:
if self._status_widget: if self._status_widget:
@@ -137,6 +139,7 @@ class MatrixRoomPicker(ModalScreen[List[str]]):
any_selected = True any_selected = True
break break
except Exception: except Exception:
logger.exception("Error querying checkbox in MatrixRoomPicker; skipping")
continue continue
if self._save_button: if self._save_button:
self._save_button.disabled = not any_selected self._save_button.disabled = not any_selected
@@ -214,7 +217,7 @@ class MatrixRoomPicker(ModalScreen[List[str]]):
cb = self.query_one(f"#{checkbox_id}", Checkbox) cb = self.query_one(f"#{checkbox_id}", Checkbox)
cb.value = True cb.value = True
except Exception: except Exception:
pass logger.exception("Failed to set checkbox value in MatrixRoomPicker")
if self._save_button: if self._save_button:
self._save_button.disabled = False self._save_button.disabled = False
elif event.button.id == "matrix-room-clear": elif event.button.id == "matrix-room-clear":
@@ -223,7 +226,7 @@ class MatrixRoomPicker(ModalScreen[List[str]]):
cb = self.query_one(f"#{checkbox_id}", Checkbox) cb = self.query_one(f"#{checkbox_id}", Checkbox)
cb.value = False cb.value = False
except Exception: except Exception:
pass logger.exception("Failed to set checkbox value to False in MatrixRoomPicker")
if self._save_button: if self._save_button:
self._save_button.disabled = True self._save_button.disabled = True
elif event.button.id == "matrix-room-save": elif event.button.id == "matrix-room-save":
@@ -234,5 +237,5 @@ class MatrixRoomPicker(ModalScreen[List[str]]):
if cb.value and room_id: if cb.value and room_id:
selected.append(room_id) selected.append(room_id)
except Exception: except Exception:
pass logger.exception("Failed to read checkbox state for '%s' while saving MatrixRoomPicker selection", checkbox_id)
self.dismiss(selected) self.dismiss(selected)

View File

@@ -501,7 +501,7 @@ class WorkersModal(ModalScreen):
try: try:
self.stdout_display.cursor_location = (len(combined_text) - 1, 0) self.stdout_display.cursor_location = (len(combined_text) - 1, 0)
except Exception: except Exception:
pass logger.exception("Failed to set stdout_display cursor location")
logger.info("[workers-modal] Updated stdout display successfully") logger.info("[workers-modal] Updated stdout display successfully")
except Exception as e: except Exception as e:
logger.error( logger.error(

View File

@@ -10,6 +10,7 @@ import contextlib
import io import io
import shlex import shlex
import sys import sys
import traceback
from pathlib import Path from pathlib import Path
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Any, Callable, Dict, List, Optional, Sequence from typing import Any, Callable, Dict, List, Optional, Sequence
@@ -25,8 +26,9 @@ from SYS import pipeline as ctx
from CLI import ConfigLoader from CLI import ConfigLoader
from SYS.pipeline import PipelineExecutor from SYS.pipeline import PipelineExecutor
from SYS.worker import WorkerManagerRegistry from SYS.worker import WorkerManagerRegistry
from SYS.logger import set_debug from SYS.logger import set_debug, debug
from SYS.rich_display import capture_rich_output from SYS.rich_display import capture_rich_output
import traceback
from SYS.result_table import Table from SYS.result_table import Table
@@ -120,7 +122,7 @@ class PipelineRunner:
result.stderr = syntax_error.message result.stderr = syntax_error.message
return result return result
except Exception: except Exception:
pass debug(traceback.format_exc())
try: try:
tokens = shlex.split(normalized) tokens = shlex.split(normalized)
@@ -137,11 +139,12 @@ class PipelineRunner:
try: try:
set_debug(bool(config.get("debug", False))) set_debug(bool(config.get("debug", False)))
except Exception: except Exception:
pass debug(traceback.format_exc())
try: try:
self._worker_manager = WorkerManagerRegistry.ensure(config) self._worker_manager = WorkerManagerRegistry.ensure(config)
except Exception: except Exception:
debug(traceback.format_exc())
self._worker_manager = None self._worker_manager = None
ctx.reset() ctx.reset()
@@ -153,7 +156,7 @@ class PipelineRunner:
seeds = [seeds] seeds = [seeds]
ctx.set_last_result_items_only(list(seeds)) ctx.set_last_result_items_only(list(seeds))
except Exception: except Exception:
pass debug(traceback.format_exc())
stdout_buffer = io.StringIO() stdout_buffer = io.StringIO()
stderr_buffer = io.StringIO() stderr_buffer = io.StringIO()
@@ -173,7 +176,7 @@ class PipelineRunner:
try: try:
ctx.clear_current_command_text() ctx.clear_current_command_text()
except Exception: except Exception:
pass debug(traceback.format_exc())
result.stdout = stdout_buffer.getvalue() result.stdout = stdout_buffer.getvalue()
result.stderr = stderr_buffer.getvalue() result.stderr = stderr_buffer.getvalue()
@@ -268,7 +271,7 @@ class PipelineRunner:
items_copy = items.copy() if isinstance(items, list) else list(items) if items else [] items_copy = items.copy() if isinstance(items, list) else list(items) if items else []
out.append((t, items_copy, subj)) out.append((t, items_copy, subj))
except Exception: except Exception:
pass debug(traceback.format_exc())
return out return out
snap["result_table_history"] = _copy_history(state.result_table_history) snap["result_table_history"] = _copy_history(state.result_table_history)
@@ -306,7 +309,7 @@ class PipelineRunner:
out.append((t, items_copy, subj)) out.append((t, items_copy, subj))
setattr(state, key, out) setattr(state, key, out)
except Exception: except Exception:
pass debug(traceback.format_exc())
try: try:
if "live_progress" in snapshot: if "live_progress" in snapshot:

View File

@@ -203,7 +203,7 @@ def _run_cli(clean_args: List[str]) -> int:
error_msg += f" - detected repo root: {repo}\n" error_msg += f" - detected repo root: {repo}\n"
cli_path = repo / "CLI.py" cli_path = repo / "CLI.py"
error_msg += f" - CLI.py exists at {cli_path}: {cli_path.exists()}\n" error_msg += f" - CLI.py exists at {cli_path}: {cli_path.exists()}\n"
except: except Exception:
pass pass
error_msg += ( error_msg += (
"\nRemedy: Run 'pip install -e scripts' from the project root or re-run the bootstrap script.\n" "\nRemedy: Run 'pip install -e scripts' from the project root or re-run the bootstrap script.\n"

View File

@@ -0,0 +1,109 @@
"""Migration utility: convert Python literal config values in the DB into canonical JSON.
Usage:
python scripts/migrate_config_literals.py [--apply] [--backup=path] [--quiet]
By default the script runs in dry-run mode and prints candidate rows it would change.
Use --apply to persist changes. --backup writes a JSON file listing changed rows before applying.
"""
from __future__ import annotations
import argparse
import sqlite3
import json
import ast
from pathlib import Path
from typing import Any, Dict, List, Tuple
DB = Path("medios.db")
def _is_json_like(s: str) -> bool:
if not isinstance(s, str):
return False
s = s.strip()
if not s:
return False
return s[0] in '{["' or s.lower() in ("true", "false", "null") or s[0].isdigit() or s[0] == "'"
def find_candidates(conn: sqlite3.Connection) -> List[Tuple[int, str, str, str, str, str]]:
cur = conn.cursor()
cur.execute("SELECT rowid, category, subtype, item_name, key, value FROM config")
rows = []
for rowid, cat, sub, name, key, val in cur.fetchall():
if val is None:
continue
s = str(val)
if _is_json_like(s):
try:
json.loads(s)
except Exception:
# Try ast.literal_eval
try:
parsed = ast.literal_eval(s)
# Only consider basic JSON-serializable types
json.dumps(parsed)
rows.append((rowid, cat, sub, name, key, s))
except Exception:
continue
return rows
def apply_migration(conn: sqlite3.Connection, candidates: List[Tuple[int, str, str, str, str, str]]) -> List[Tuple[int, str, str, str, str, str]]:
cur = conn.cursor()
changed = []
for row in candidates:
rowid, cat, sub, name, key, val = row
try:
parsed = ast.literal_eval(val)
new_val = json.dumps(parsed, ensure_ascii=False)
cur.execute("UPDATE config SET value = ? WHERE rowid = ?", (new_val, rowid))
changed.append((rowid, cat, sub, name, key, new_val))
except Exception:
continue
conn.commit()
return changed
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--apply", action="store_true", help="Persist changes to DB")
parser.add_argument("--backup", type=str, default=None, help="Path to write backup JSON of changed rows")
parser.add_argument("--quiet", action="store_true", help="Minimize output")
args = parser.parse_args()
conn = sqlite3.connect(str(DB))
candidates = find_candidates(conn)
if not args.quiet:
print(f"Found {len(candidates)} candidate rows for migration")
for r in candidates[:50]:
rowid, cat, sub, name, key, val = r
print(f"row {rowid}: {cat}.{sub}.{name} {key} -> {val[:200]!r}")
if not candidates:
return 0
if args.backup:
out_path = Path(args.backup)
data = [dict(rowid=r[0], category=r[1], subtype=r[2], item_name=r[3], key=r[4], value=r[5]) for r in candidates]
out_path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
if not args.quiet:
print(f"Wrote backup to {out_path}")
if args.apply:
changed = apply_migration(conn, candidates)
if not args.quiet:
print(f"Applied migration to {len(changed)} rows")
return 0
if not args.quiet:
print("Dry-run; re-run with --apply to persist changes")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -428,7 +428,8 @@ class FlorenceVisionTool:
if self._model is not None and not hasattr(self._model, "_supports_sdpa"): if self._model is not None and not hasattr(self._model, "_supports_sdpa"):
setattr(self._model, "_supports_sdpa", False) setattr(self._model, "_supports_sdpa", False)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set model compatibility flag _supports_sdpa")
try: try:
self._model.to(device) # type: ignore[union-attr] self._model.to(device) # type: ignore[union-attr]
@@ -439,7 +440,8 @@ class FlorenceVisionTool:
try: try:
self._model.eval() # type: ignore[union-attr] self._model.eval() # type: ignore[union-attr]
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set Florence model to eval mode")
try: try:
md = getattr(self._model, "device", None) md = getattr(self._model, "device", None)
@@ -450,7 +452,8 @@ class FlorenceVisionTool:
dt = None dt = None
debug(f"[florencevision] Model loaded: device={md} param_dtype={dt}") debug(f"[florencevision] Model loaded: device={md} param_dtype={dt}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to inspect Florence model device/dtype")
def tags_for_image(self, media_path: Path) -> List[str]: def tags_for_image(self, media_path: Path) -> List[str]:
"""Return Florence-derived tags for an image. """Return Florence-derived tags for an image.
@@ -472,7 +475,8 @@ class FlorenceVisionTool:
try: try:
debug(f"[florencevision] Task prompt: {prompt}") debug(f"[florencevision] Task prompt: {prompt}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit debug Task prompt for FlorenceVision")
max_tags = max(0, int(self.defaults.max_tags or 0)) max_tags = max(0, int(self.defaults.max_tags or 0))
@@ -487,7 +491,8 @@ class FlorenceVisionTool:
try: try:
debug(f"[florencevision] Image loaded: mode={image.mode} size={image.width}x{image.height}") debug(f"[florencevision] Image loaded: mode={image.mode} size={image.width}x{image.height}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit debug for image load")
processor = self._processor processor = self._processor
model = self._model model = self._model
@@ -544,19 +549,22 @@ class FlorenceVisionTool:
) )
continue continue
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug tensor shape for processor key '%s'", k)
if isinstance(v, (list, tuple)): if isinstance(v, (list, tuple)):
has_none = any(x is None for x in v) has_none = any(x is None for x in v)
debug(f"[florencevision] {k}: {type(v).__name__} len={len(v)} has_none={has_none}") debug(f"[florencevision] {k}: {type(v).__name__} len={len(v)} has_none={has_none}")
continue continue
debug(f"[florencevision] {k}: type={type(v).__name__}") debug(f"[florencevision] {k}: type={type(v).__name__}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed while inspecting processor output keys")
try: try:
inputs = inputs.to(model.device) # type: ignore[attr-defined] inputs = inputs.to(model.device) # type: ignore[attr-defined]
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to move processor inputs to device %s", getattr(model, 'device', None))
# Align floating-point input tensors with the model's parameter dtype. # Align floating-point input tensors with the model's parameter dtype.
try: try:
@@ -575,7 +583,8 @@ class FlorenceVisionTool:
except Exception: except Exception:
continue continue
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to inspect/align model dtype for Florence inputs")
try: try:
gen_inputs_all = {k: v for k, v in dict(inputs).items() if v is not None} gen_inputs_all = {k: v for k, v in dict(inputs).items() if v is not None}
@@ -602,7 +611,8 @@ class FlorenceVisionTool:
): ):
gen_inputs["attention_mask"] = attention_mask gen_inputs["attention_mask"] = attention_mask
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to reconcile attention mask shape with input_ids for Florence processor")
try: try:
debug( debug(
@@ -612,18 +622,21 @@ class FlorenceVisionTool:
f"pixel_attention_mask={'pixel_attention_mask' in forward_params}" f"pixel_attention_mask={'pixel_attention_mask' in forward_params}"
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug model forward supports")
try: try:
gen_inputs.setdefault("use_cache", False) gen_inputs.setdefault("use_cache", False)
gen_inputs.setdefault("num_beams", 1) gen_inputs.setdefault("num_beams", 1)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set default gen_inputs values")
try: try:
debug(f"[florencevision] generate kwargs: {sorted(list(gen_inputs.keys()))}") debug(f"[florencevision] generate kwargs: {sorted(list(gen_inputs.keys()))}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug generate kwargs")
pv = gen_inputs.get("pixel_values") pv = gen_inputs.get("pixel_values")
if pv is None: if pv is None:
@@ -654,7 +667,8 @@ class FlorenceVisionTool:
if not hasattr(model, "_supports_sdpa"): if not hasattr(model, "_supports_sdpa"):
setattr(model, "_supports_sdpa", False) setattr(model, "_supports_sdpa", False)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to patch model _supports_sdpa flag in retry handler")
generated_ids = _do_generate(gen_inputs) generated_ids = _do_generate(gen_inputs)
elif "NoneType" in msg and "shape" in msg: elif "NoneType" in msg and "shape" in msg:
retry_inputs = dict(gen_inputs) retry_inputs = dict(gen_inputs)
@@ -676,7 +690,8 @@ class FlorenceVisionTool:
): ):
retry_inputs["attention_mask"] = am retry_inputs["attention_mask"] = am
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed while filling retry_inputs attention_mask in AttributeError handler")
try: try:
import torch import torch
@@ -692,14 +707,16 @@ class FlorenceVisionTool:
elif "pixel_attention_mask" in forward_params and "pixel_attention_mask" not in retry_inputs: elif "pixel_attention_mask" in forward_params and "pixel_attention_mask" not in retry_inputs:
retry_inputs["pixel_attention_mask"] = mask retry_inputs["pixel_attention_mask"] = mask
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to build mask or adjust retry_inputs in AttributeError handler")
try: try:
debug( debug(
f"[florencevision] generate retry kwargs: {sorted(list(retry_inputs.keys()))}" f"[florencevision] generate retry kwargs: {sorted(list(retry_inputs.keys()))}"
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug generate retry kwargs")
generated_ids = _do_generate(retry_inputs) generated_ids = _do_generate(retry_inputs)
else: else:
@@ -708,7 +725,8 @@ class FlorenceVisionTool:
try: try:
debug(f"[florencevision] generated_ids type={type(generated_ids).__name__}") debug(f"[florencevision] generated_ids type={type(generated_ids).__name__}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug generated_ids type")
seq = getattr(generated_ids, "sequences", generated_ids) seq = getattr(generated_ids, "sequences", generated_ids)
generated_text = processor.batch_decode(seq, skip_special_tokens=False)[0] generated_text = processor.batch_decode(seq, skip_special_tokens=False)[0]
@@ -719,7 +737,8 @@ class FlorenceVisionTool:
debug(f"[florencevision] prompt run failed: {type(exc).__name__}: {exc}") debug(f"[florencevision] prompt run failed: {type(exc).__name__}: {exc}")
debug("[florencevision] traceback:\n" + traceback.format_exc()) debug("[florencevision] traceback:\n" + traceback.format_exc())
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit debug for prompt run failure: %s", exc)
raise raise
parsed = None parsed = None
@@ -766,12 +785,14 @@ class FlorenceVisionTool:
debug(f"[florencevision] post_process[{k!r}] type={type(parsed.get(k)).__name__}") debug(f"[florencevision] post_process[{k!r}] type={type(parsed.get(k)).__name__}")
debug("[florencevision] post_process[key] repr:\n" + _debug_repr(parsed.get(k))) debug("[florencevision] post_process[key] repr:\n" + _debug_repr(parsed.get(k)))
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed while debugging parsed post_process output for prompt %s", task_prompt)
else: else:
debug(f"[florencevision] post_process_generation: type={type(parsed).__name__}") debug(f"[florencevision] post_process_generation: type={type(parsed).__name__}")
debug("[florencevision] post_process repr:\n" + _debug_repr(parsed)) debug("[florencevision] post_process repr:\n" + _debug_repr(parsed))
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to post-process generated output for prompt %s", task_prompt)
return generated_text, parsed, seq return generated_text, parsed, seq
@@ -800,7 +821,8 @@ class FlorenceVisionTool:
try: try:
debug(f"[florencevision] candidate label strings ({len(labels)}): {labels!r}") debug(f"[florencevision] candidate label strings ({len(labels)}): {labels!r}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit candidate label strings debug")
out: List[str] = [] out: List[str] = []
seen: set[str] = set() seen: set[str] = set()
@@ -848,7 +870,8 @@ class FlorenceVisionTool:
for raw_lab, cleaned, reason in dropped: for raw_lab, cleaned, reason in dropped:
debug(f"[florencevision] drop reason={reason} raw={raw_lab!r} cleaned={cleaned!r}") debug(f"[florencevision] drop reason={reason} raw={raw_lab!r} cleaned={cleaned!r}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit cleaned/dropped tags debug info")
return labels, caption_candidates, out, dropped return labels, caption_candidates, out, dropped
@@ -871,7 +894,12 @@ class FlorenceVisionTool:
try: try:
return max(cleaned, key=lambda s: len(str(s)), default=None) return max(cleaned, key=lambda s: len(str(s)), default=None)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to choose best caption from cleaned candidates")
try:
return max(raw, key=lambda s: len(str(s)), default=None)
except Exception:
return None
try: try:
return max(raw, key=lambda s: len(str(s)), default=None) return max(raw, key=lambda s: len(str(s)), default=None)
except Exception: except Exception:
@@ -936,7 +964,8 @@ class FlorenceVisionTool:
try: try:
debug(f"[florencevision] grounding prompt: {grounding_prompt}") debug(f"[florencevision] grounding prompt: {grounding_prompt}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit grounding prompt debug")
grd_text, grd_parsed, _grd_seq = _run_prompt(grounding_prompt) grd_text, grd_parsed, _grd_seq = _run_prompt(grounding_prompt)
_grd_labels, grd_captions, grd_cleaned, _grd_dropped = _extract_labels_and_captions(grounding_prompt, grd_text, grd_parsed) _grd_labels, grd_captions, grd_cleaned, _grd_dropped = _extract_labels_and_captions(grounding_prompt, grd_text, grd_parsed)
@@ -962,6 +991,8 @@ class FlorenceVisionTool:
is_combo = "<|detailed_caption|>" in prompt and "<|grounding|>" in prompt is_combo = "<|detailed_caption|>" in prompt and "<|grounding|>" in prompt
only_task_tokens = not final_tags or all(t in {"object_detection", "grounding", "tag"} for t in final_tags) only_task_tokens = not final_tags or all(t in {"object_detection", "grounding", "tag"} for t in final_tags)
except Exception: except Exception:
from SYS.logger import logger
logger.exception("Failed to compute is_combo/only_task_tokens for prompt '%s'", prompt)
is_combo = False is_combo = False
only_task_tokens = False only_task_tokens = False
@@ -973,13 +1004,15 @@ class FlorenceVisionTool:
try: try:
self.defaults.task = "<|detailed_caption|>" self.defaults.task = "<|detailed_caption|>"
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set self.defaults.task to '<|detailed_caption|>' during od retry")
final_tags = self.tags_for_image(media_path) final_tags = self.tags_for_image(media_path)
finally: finally:
try: try:
self.defaults.task = original_task self.defaults.task = original_task
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to restore self.defaults.task after od retry")
self._od_tag_retrying = False self._od_tag_retrying = False
self._last_caption = caption_text if caption_text else None self._last_caption = caption_text if caption_text else None

View File

@@ -48,7 +48,8 @@ def _resolve_out_dir(arg_outdir: Optional[Union[str, Path]]) -> Path:
try: try:
p.mkdir(parents=True, exist_ok=True) p.mkdir(parents=True, exist_ok=True)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to create resolved output dir %s", p)
return p return p
except Exception: except Exception:
return Path(tempfile.mkdtemp(prefix="pwdl_")) return Path(tempfile.mkdtemp(prefix="pwdl_"))
@@ -425,17 +426,20 @@ def config_schema() -> List[Dict[str, Any]]:
if context is not None: if context is not None:
context.close() context.close()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to close Playwright context")
try: try:
if browser is not None: if browser is not None:
browser.close() browser.close()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to close Playwright browser")
try: try:
if pw is not None: if pw is not None:
pw.stop() pw.stop()
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to stop Playwright engine")
def goto(self, page: Any, url: str) -> None: def goto(self, page: Any, url: str) -> None:
"""Navigate with configured timeout.""" """Navigate with configured timeout."""
@@ -564,7 +568,8 @@ def config_schema() -> List[Dict[str, Any]]:
page.mouse.move(box['x'] + box['width'] / 2, box['y'] + box['height'] / 2) page.mouse.move(box['x'] + box['width'] / 2, box['y'] + box['height'] / 2)
page.mouse.click(box['x'] + box['width'] / 2, box['y'] + box['height'] / 2) page.mouse.click(box['x'] + box['width'] / 2, box['y'] + box['height'] / 2)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to perform mouse click for selector '%s'", selector)
resp = page.wait_for_response( resp = page.wait_for_response(
lambda r: r.status == 200 and any(k.lower() == 'content-disposition' for k in r.headers.keys()), lambda r: r.status == 200 and any(k.lower() == 'content-disposition' for k in r.headers.keys()),
@@ -583,7 +588,8 @@ def config_schema() -> List[Dict[str, Any]]:
try: try:
debug(f"[playwright] attempt failed (headless={mode}): {traceback.format_exc()}") debug(f"[playwright] attempt failed (headless={mode}): {traceback.format_exc()}")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to emit debug info for Playwright attempt failure")
continue continue
return PlaywrightDownloadResult(ok=False, error=last_error or "no download captured") return PlaywrightDownloadResult(ok=False, error=last_error or "no download captured")
@@ -596,7 +602,8 @@ def config_schema() -> List[Dict[str, Any]]:
f"nav_timeout_ms={self.defaults.navigation_timeout_ms}" f"nav_timeout_ms={self.defaults.navigation_timeout_ms}"
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug_dump Playwright defaults")
def _wait_for_block_clear(self, page: Any, timeout_ms: int = 8000) -> bool: def _wait_for_block_clear(self, page: Any, timeout_ms: int = 8000) -> bool:
try: try:

View File

@@ -131,7 +131,8 @@ def _build_supported_domains() -> set[str]:
domains = extract_domains(regex) domains = extract_domains(regex)
_SUPPORTED_DOMAINS.update(domains) _SUPPORTED_DOMAINS.update(domains)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to build supported domains from yt-dlp extractors")
return _SUPPORTED_DOMAINS return _SUPPORTED_DOMAINS
@@ -299,7 +300,8 @@ def _add_browser_cookies_if_available(options: Dict[str, Any], preferred_browser
log(f"Requested browser cookie DB '{preferred_browser}' not found; falling back to autodetect.") log(f"Requested browser cookie DB '{preferred_browser}' not found; falling back to autodetect.")
_BROWSER_COOKIE_WARNING_EMITTED = True _BROWSER_COOKIE_WARNING_EMITTED = True
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to check browser cookie path for preferred browser '%s'", preferred_browser)
# Auto-detect in common order (chrome/chromium/brave) # Auto-detect in common order (chrome/chromium/brave)
for candidate in ("chrome", "chromium", "brave"): for candidate in ("chrome", "chromium", "brave"):
@@ -308,6 +310,8 @@ def _add_browser_cookies_if_available(options: Dict[str, Any], preferred_browser
options["cookiesfrombrowser"] = [candidate] options["cookiesfrombrowser"] = [candidate]
return return
except Exception: except Exception:
from SYS.logger import logger
logger.exception("Error while checking cookie path for candidate browser '%s'", candidate)
continue continue
if not _BROWSER_COOKIE_WARNING_EMITTED: if not _BROWSER_COOKIE_WARNING_EMITTED:
@@ -650,7 +654,8 @@ def format_for_table_selection(
if vcodec != "none" and acodec == "none" and format_id: if vcodec != "none" and acodec == "none" and format_id:
selection_format_id = f"{format_id}+ba" selection_format_id = f"{format_id}+ba"
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to compute selection_format_id for format: %s", fmt)
# Format file size # Format file size
size_str = "" size_str = ""
@@ -661,7 +666,8 @@ def format_for_table_selection(
size_mb = float(size_bytes) / (1024 * 1024) size_mb = float(size_bytes) / (1024 * 1024)
size_str = f"{size_prefix}{size_mb:.1f}MB" size_str = f"{size_prefix}{size_mb:.1f}MB"
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to compute size string for format: %s", fmt)
# Build description # Build description
desc_parts: List[str] = [] desc_parts: List[str] = []
@@ -755,7 +761,8 @@ class YtDlpTool:
if resolved is not None and resolved.is_file(): if resolved is not None and resolved.is_file():
return resolved return resolved
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to initialize cookiefile using resolve_cookies_path")
return None return None
def resolve_height_selector(self, format_str: Optional[str]) -> Optional[str]: def resolve_height_selector(self, format_str: Optional[str]) -> Optional[str]:
@@ -908,13 +915,15 @@ class YtDlpTool:
if bundled_ffmpeg_dir.exists(): if bundled_ffmpeg_dir.exists():
base_options.setdefault("ffmpeg_location", str(bundled_ffmpeg_dir)) base_options.setdefault("ffmpeg_location", str(bundled_ffmpeg_dir))
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to inspect bundled ffmpeg directory")
try: try:
if os.name == "nt": if os.name == "nt":
base_options.setdefault("file_access_retries", 40) base_options.setdefault("file_access_retries", 40)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set Windows-specific yt-dlp options")
if opts.cookies_path and opts.cookies_path.is_file(): if opts.cookies_path and opts.cookies_path.is_file():
base_options["cookiefile"] = str(opts.cookies_path) base_options["cookiefile"] = str(opts.cookies_path)
@@ -948,7 +957,8 @@ class YtDlpTool:
opts = _dc.replace(opts, mode="audio", ytdl_format=None) opts = _dc.replace(opts, mode="audio", ytdl_format=None)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set opts mode to audio via dataclasses.replace")
elif opts.ytdl_format == "video": elif opts.ytdl_format == "video":
try: try:
opts = opts._replace(mode="video", ytdl_format=None) opts = opts._replace(mode="video", ytdl_format=None)
@@ -958,7 +968,8 @@ class YtDlpTool:
opts = _dc.replace(opts, mode="video", ytdl_format=None) opts = _dc.replace(opts, mode="video", ytdl_format=None)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set opts mode to video via dataclasses.replace")
if opts.no_playlist: if opts.no_playlist:
base_options["noplaylist"] = True base_options["noplaylist"] = True
@@ -978,7 +989,8 @@ class YtDlpTool:
opts = _dc.replace(opts, mode="audio") opts = _dc.replace(opts, mode="audio")
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set opts mode to audio via dataclasses.replace (configured default)")
ytdl_format = None ytdl_format = None
else: else:
# Leave ytdl_format None so that default_format(opts.mode) # Leave ytdl_format None so that default_format(opts.mode)
@@ -1130,7 +1142,8 @@ class YtDlpTool:
try: try:
debug("yt-dlp argv: " + " ".join(str(a) for a in argv)) debug("yt-dlp argv: " + " ".join(str(a) for a in argv))
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to debug-print yt-dlp CLI arguments")
def config_schema() -> List[Dict[str, Any]]: def config_schema() -> List[Dict[str, Any]]:
@@ -1150,6 +1163,8 @@ def config_schema() -> List[Dict[str, Any]]:
if _browser_cookie_path_for(b) is not None: if _browser_cookie_path_for(b) is not None:
browser_choices.append(b) browser_choices.append(b)
except Exception: except Exception:
from SYS.logger import logger
logger.exception("Error while checking cookie path for browser '%s'", b)
continue continue
return [ return [
@@ -1410,7 +1425,8 @@ def _download_with_sections_via_cli(
try: try:
_set_pipe_percent(current) _set_pipe_percent(current)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set pipeline percent to %d", current)
def start(self) -> None: def start(self) -> None:
if self._thread is not None or self._start_pct >= self._max_pct: if self._thread is not None or self._start_pct >= self._max_pct:
@@ -1426,7 +1442,8 @@ def _download_with_sections_via_cli(
try: try:
_set_pipe_percent(self._max_pct) _set_pipe_percent(self._max_pct)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set pipeline percent to max %d", self._max_pct)
session_id = hashlib.md5((url + str(time.time()) + "".join(random.choices(string.ascii_letters, k=10))).encode()).hexdigest()[:12] session_id = hashlib.md5((url + str(time.time()) + "".join(random.choices(string.ascii_letters, k=10))).encode()).hexdigest()[:12]
first_section_info = None first_section_info = None
@@ -1440,7 +1457,8 @@ def _download_with_sections_via_cli(
try: try:
_set_pipe_percent(display_pct) _set_pipe_percent(display_pct)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set pipeline percent to display_pct %d for section %d", display_pct, section_idx)
pipeline.set_status(f"Downloading & clipping clip section {section_idx}/{total_sections}") pipeline.set_status(f"Downloading & clipping clip section {section_idx}/{total_sections}")
@@ -1484,7 +1502,8 @@ def _download_with_sections_via_cli(
try: try:
cmd.extend(["--ffmpeg-location", str(ytdl_options["ffmpeg_location"])]) cmd.extend(["--ffmpeg-location", str(ytdl_options["ffmpeg_location"])])
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to append ffmpeg_location CLI option")
if ytdl_options.get("format"): if ytdl_options.get("format"):
cmd.extend(["-f", ytdl_options["format"]]) cmd.extend(["-f", ytdl_options["format"]])
if ytdl_options.get("merge_output_format"): if ytdl_options.get("merge_output_format"):
@@ -1547,7 +1566,8 @@ def _download_with_sections_via_cli(
try: try:
_set_pipe_percent(99) _set_pipe_percent(99)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to set pipeline percent to 99 at end of multi-section job")
return session_id, first_section_info or {} return session_id, first_section_info or {}
@@ -1654,7 +1674,8 @@ def _progress_callback(status: Dict[str, Any]) -> None:
if isinstance(value, (int, float)) and value > 0: if isinstance(value, (int, float)) and value > 0:
return int(value) return int(value)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to interpret total bytes value: %r", value)
return None return None
if event == "downloading": if event == "downloading":
@@ -1669,7 +1690,8 @@ def _progress_callback(status: Dict[str, Any]) -> None:
total=_total_bytes(total), total=_total_bytes(total),
) )
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to update pipeline transfer for label '%s'", label)
else: else:
_YTDLP_PROGRESS_BAR.update( _YTDLP_PROGRESS_BAR.update(
downloaded=int(downloaded) if downloaded is not None else None, downloaded=int(downloaded) if downloaded is not None else None,
@@ -1683,7 +1705,8 @@ def _progress_callback(status: Dict[str, Any]) -> None:
if _YTDLP_TRANSFER_STATE.get(label, {}).get("started"): if _YTDLP_TRANSFER_STATE.get(label, {}).get("started"):
pipeline.finish_transfer(label=label) pipeline.finish_transfer(label=label)
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to finish pipeline transfer for label '%s'", label)
_YTDLP_TRANSFER_STATE.pop(label, None) _YTDLP_TRANSFER_STATE.pop(label, None)
else: else:
_YTDLP_PROGRESS_BAR.finish() _YTDLP_PROGRESS_BAR.finish()
@@ -1848,7 +1871,8 @@ def download_media(opts: DownloadOptions, *, config: Optional[Dict[str, Any]] =
if cand.suffix.lower() in {".json", ".info.json"}: if cand.suffix.lower() in {".json", ".info.json"}:
continue continue
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to inspect candidate suffix for %s", cand)
media_file = cand media_file = cand
break break
if media_file is None and media_candidates: if media_file is None and media_candidates:
@@ -1870,10 +1894,13 @@ def download_media(opts: DownloadOptions, *, config: Optional[Dict[str, Any]] =
if name.startswith(prefix): if name.startswith(prefix):
return name[len(prefix):] return name[len(prefix):]
except Exception: except Exception:
pass from SYS.logger import logger
logger.exception("Failed to check name prefix for '%s'", name)
try: try:
return Path(name).suffix return Path(name).suffix
except Exception: except Exception:
from SYS.logger import logger
logger.exception("Failed to obtain suffix for name '%s'", name)
return "" return ""
try: try:
@@ -1884,7 +1911,8 @@ def download_media(opts: DownloadOptions, *, config: Optional[Dict[str, Any]] =
try: try:
media_file.unlink() media_file.unlink()
except OSError: except OSError:
pass from SYS.logger import logger
logger.exception("Failed to unlink duplicate media file %s", media_file)
else: else:
media_file.rename(new_media_path) media_file.rename(new_media_path)
debug(f"Renamed section file: {media_file.name} -> {new_media_name}") debug(f"Renamed section file: {media_file.name} -> {new_media_name}")