style: apply ruff auto-fixes

This commit is contained in:
2026-01-19 03:14:30 -08:00
parent 3ab122a55d
commit a961ac3ce7
72 changed files with 2477 additions and 2871 deletions

View File

@@ -499,6 +499,9 @@ class Add_File(Cmdlet):
pending_url_associations: Dict[str,
List[tuple[str,
List[str]]]] = {}
pending_tag_associations: Dict[str,
List[tuple[str,
List[str]]]] = {}
successes = 0
failures = 0
@@ -612,6 +615,8 @@ class Add_File(Cmdlet):
collect_relationship_pairs=pending_relationship_pairs,
defer_url_association=defer_url_association,
pending_url_associations=pending_url_associations,
defer_tag_association=defer_url_association,
pending_tag_associations=pending_tag_associations,
suppress_last_stage_overlay=want_final_search_file,
auto_search_file=auto_search_file_after_add,
store_instance=storage_registry,
@@ -664,6 +669,17 @@ class Add_File(Cmdlet):
except Exception:
pass
# Apply deferred tag associations (bulk) if collected
if pending_tag_associations:
try:
Add_File._apply_pending_tag_associations(
pending_tag_associations,
config,
store_instance=storage_registry
)
except Exception:
pass
# Always end add-file -store (when last stage) by showing item detail panels.
# Legacy search-file refresh is no longer used for final display.
if want_final_search_file and collected_payloads:
@@ -1854,6 +1870,10 @@ class Add_File(Cmdlet):
pending_url_associations: Optional[Dict[str,
List[tuple[str,
List[str]]]]] = None,
defer_tag_association: bool = False,
pending_tag_associations: Optional[Dict[str,
List[tuple[str,
List[str]]]]] = None,
suppress_last_stage_overlay: bool = False,
auto_search_file: bool = True,
store_instance: Optional[Store] = None,
@@ -2072,15 +2092,22 @@ class Add_File(Cmdlet):
resolved_hash = chosen_hash
if hydrus_like_backend and tags:
try:
adder = getattr(backend, "add_tag", None)
if callable(adder):
debug(
f"[add-file] Applying {len(tags)} tag(s) post-upload to Hydrus"
)
adder(resolved_hash, list(tags))
except Exception as exc:
log(f"[add-file] Hydrus post-upload tagging failed: {exc}", file=sys.stderr)
# Support deferring tag application for batching bulk operations
if defer_tag_association and pending_tag_associations is not None:
try:
pending_tag_associations.setdefault(str(backend_name), []).append((str(resolved_hash), list(tags)))
except Exception:
pass
else:
try:
adder = getattr(backend, "add_tag", None)
if callable(adder):
debug(
f"[add-file] Applying {len(tags)} tag(s) post-upload to Hydrus"
)
adder(resolved_hash, list(tags))
except Exception as exc:
log(f"[add-file] Hydrus post-upload tagging failed: {exc}", file=sys.stderr)
# If we have url(s), ensure they get associated with the destination file.
# This mirrors `add-url` behavior but avoids emitting extra pipeline noise.
@@ -2322,6 +2349,47 @@ class Add_File(Cmdlet):
except Exception:
continue
@staticmethod
def _apply_pending_tag_associations(
pending: Dict[str,
List[tuple[str,
List[str]]]],
config: Dict[str,
Any],
store_instance: Optional[Store] = None,
) -> None:
"""Apply deferred tag associations in bulk, grouped per backend."""
try:
store = store_instance if store_instance is not None else Store(config)
except Exception:
return
for backend_name, pairs in (pending or {}).items():
if not pairs:
continue
try:
backend = store[backend_name]
except Exception:
continue
# Try bulk variant first
bulk = getattr(backend, "add_tags_bulk", None)
if callable(bulk):
try:
bulk([(h, t) for h, t in pairs])
continue
except Exception:
pass
single = getattr(backend, "add_tag", None)
if callable(single):
for h, t in pairs:
try:
single(h, t)
except Exception:
continue
@staticmethod
def _load_sidecar_bundle(
media_path: Path,

View File

@@ -1097,7 +1097,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
]
if not relationship_tags:
log(f"No relationship tags found in sidecar", file=sys.stderr)
log("No relationship tags found in sidecar", file=sys.stderr)
return 0 # Not an error, just nothing to do
# Get the file hash from result (should have been set by add-file)
@@ -1166,7 +1166,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
)
return 0
elif error_count == 0:
log(f"No relationships to set", file=sys.stderr)
log("No relationships to set", file=sys.stderr)
return 0 # Success with nothing to do
else:
log(f"Failed with {error_count} error(s)", file=sys.stderr)

View File

@@ -1,6 +1,6 @@
from __future__ import annotations
from typing import Any, Dict, List, Optional, Sequence, Tuple
from typing import Any, Dict, List, Sequence, Tuple
import sys
from SYS import pipeline as ctx

View File

@@ -7,7 +7,6 @@ import sys
from pathlib import Path
from SYS.logger import debug, log
from SYS.utils import format_bytes
from Store.Folder import Folder
from Store import Store
from . import _shared as sh

View File

@@ -2,10 +2,8 @@ from __future__ import annotations
from typing import Any, Dict, Sequence
from pathlib import Path
import json
import sys
from SYS import models
from SYS import pipeline as ctx
from . import _shared as sh

View File

@@ -1,6 +1,6 @@
from __future__ import annotations
from typing import Any, Dict, List, Optional, Sequence, Tuple
from typing import Any, Dict, List, Sequence, Tuple
import sys
from SYS import pipeline as ctx

View File

@@ -15,7 +15,6 @@ from typing import Any, Dict, List, Optional, Sequence
from urllib.parse import urlparse
from contextlib import AbstractContextManager, nullcontext
import requests
from API.HTTP import _download_direct_file
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult
@@ -26,7 +25,6 @@ from SYS.rich_display import stderr_console as get_stderr_console
from SYS import pipeline as pipeline_context
from SYS.utils import sha256_file
from SYS.metadata import normalize_urls as normalize_url_list
from rich.prompt import Confirm
from tool.ytdlp import (
YtDlpTool,
@@ -948,7 +946,7 @@ class Download_File(Cmdlet):
from Store import Store
from API.HydrusNetwork import is_hydrus_available
debug(f"[download-file] Initializing storage interface...")
debug("[download-file] Initializing storage interface...")
storage = Store(config=config or {}, suppress_debug=True)
hydrus_available = bool(is_hydrus_available(config or {}))
@@ -1338,7 +1336,7 @@ class Download_File(Cmdlet):
table.set_source_command("download-file", [url])
debug(f"[ytdlp.formatlist] Displaying format selection table for {url}")
debug(f"[ytdlp.formatlist] Provider: ytdlp (routing to download-file via TABLE_AUTO_STAGES)")
debug("[ytdlp.formatlist] Provider: ytdlp (routing to download-file via TABLE_AUTO_STAGES)")
results_list: List[Dict[str, Any]] = []
for idx, fmt in enumerate(filtered_formats, 1):
@@ -1420,7 +1418,7 @@ class Download_File(Cmdlet):
f"[ytdlp.formatlist] When user selects @N, will invoke: download-file {url} -query 'format:<format_id>'"
)
log(f"", file=sys.stderr)
log("", file=sys.stderr)
return 0
return None
@@ -2054,7 +2052,7 @@ class Download_File(Cmdlet):
forced_single_format_id = None
forced_single_format_for_batch = False
debug(f"[download-file] Checking if format table should be shown...")
debug("[download-file] Checking if format table should be shown...")
early_ret = self._maybe_show_format_table_for_single_url(
mode=mode,
clip_spec=clip_spec,
@@ -2763,7 +2761,7 @@ class Download_File(Cmdlet):
debug(f"[download-file] Processing {total_selection} selected item(s) from table...")
for idx, run_args in enumerate(selection_runs, 1):
debug(f"[download-file] Item {idx}/{total_selection}: {run_args}")
debug(f"[download-file] Re-invoking download-file for selected item...")
debug("[download-file] Re-invoking download-file for selected item...")
exit_code = self._run_impl(None, run_args, config)
if exit_code == 0:
successes += 1

View File

@@ -92,7 +92,7 @@ class Get_File(sh.Cmdlet):
debug(f"[get-file] Backend retrieved: {type(backend).__name__}")
# Get file metadata to determine name and extension
debug(f"[get-file] Getting metadata for hash...")
debug("[get-file] Getting metadata for hash...")
metadata = backend.get_metadata(file_hash)
if not metadata:
log(f"Error: File metadata not found for hash {file_hash}")
@@ -228,7 +228,7 @@ class Get_File(sh.Cmdlet):
}
)
debug(f"[get-file] Completed successfully")
debug("[get-file] Completed successfully")
return 0
def _open_file_default(self, path: Path) -> None:

View File

@@ -5,7 +5,6 @@ import json
import sys
from SYS.logger import log
from pathlib import Path
from . import _shared as sh

View File

@@ -7,7 +7,6 @@ import sys
from SYS.logger import log
from SYS import pipeline as ctx
from SYS.result_table import Table
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -1,13 +1,11 @@
from __future__ import annotations
from typing import Any, Dict, Sequence, List, Optional
import json
from typing import Any, Dict, Sequence, Optional
import sys
from pathlib import Path
from SYS.logger import log
from SYS import models
from SYS import pipeline as ctx
from API import HydrusNetwork as hydrus_wrapper
from . import _shared as sh
@@ -22,8 +20,6 @@ fetch_hydrus_metadata = sh.fetch_hydrus_metadata
should_show_help = sh.should_show_help
get_field = sh.get_field
from API.folder import API_folder_store
from SYS.config import get_local_storage_path
from SYS.result_table import Table
from Store import Store
CMDLET = Cmdlet(
@@ -512,7 +508,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
if source_title and source_title != "Unknown":
metadata["Title"] = source_title
table = ItemDetailView(f"Relationships", item_metadata=metadata
table = ItemDetailView("Relationships", item_metadata=metadata
).init_command("get-relationship",
[])

View File

@@ -25,8 +25,7 @@ from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple
from SYS import pipeline as ctx
from API import HydrusNetwork
from API.folder import read_sidecar, write_sidecar, find_sidecar, API_folder_store
from API.folder import read_sidecar, write_sidecar
from . import _shared as sh
normalize_hash = sh.normalize_hash
@@ -36,7 +35,6 @@ CmdletArg = sh.CmdletArg
SharedArgs = sh.SharedArgs
parse_cmdlet_args = sh.parse_cmdlet_args
get_field = sh.get_field
from SYS.config import get_local_storage_path
try:
from SYS.metadata import extract_title
@@ -944,7 +942,7 @@ def _scrape_url_metadata(
)
except json_module.JSONDecodeError:
pass
except Exception as e:
except Exception:
pass # Silently ignore if we can't get playlist entries
# Fallback: if still no tags detected, get from first item

View File

@@ -320,7 +320,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
f"Mixed file types detected: {', '.join(sorted(file_types))}",
file=sys.stderr
)
log(f"Can only merge files of the same type", file=sys.stderr)
log("Can only merge files of the same type", file=sys.stderr)
return 1
file_kind = list(file_types)[0] if file_types else "other"
@@ -524,7 +524,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
current_time_ms = 0
log(f"Analyzing {len(files)} files for chapter information...", file=sys.stderr)
logger.info(f"[merge-file] Analyzing files for chapters")
logger.info("[merge-file] Analyzing files for chapters")
for file_path in files:
# Get duration using ffprobe
@@ -767,14 +767,14 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
logger.exception(f"[merge-file] ffmpeg process error: {e}")
raise
log(f"Merge successful, adding chapters metadata...", file=sys.stderr)
log("Merge successful, adding chapters metadata...", file=sys.stderr)
# Step 5: Embed chapters into container (MKA, MP4/M4A, or note limitation)
if output_format == "mka" or output.suffix.lower() == ".mka":
# MKA/MKV format has native chapter support via FFMetadata
# Re-mux the file with chapters embedded (copy streams, no re-encode)
log(f"Embedding chapters into Matroska container...", file=sys.stderr)
logger.info(f"[merge-file] Adding chapters to MKA file via FFMetadata")
log("Embedding chapters into Matroska container...", file=sys.stderr)
logger.info("[merge-file] Adding chapters to MKA file via FFMetadata")
temp_output = output.parent / f".temp_{output.stem}.mka"
@@ -783,7 +783,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
if mkvmerge_path:
# mkvmerge is the best tool for embedding chapters in Matroska files
log(f"Using mkvmerge for optimal chapter embedding...", file=sys.stderr)
log("Using mkvmerge for optimal chapter embedding...", file=sys.stderr)
cmd2 = [
mkvmerge_path,
"-o",
@@ -795,7 +795,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
else:
# Fallback to ffmpeg with proper chapter embedding for Matroska
log(
f"Using ffmpeg for chapter embedding (install mkvtoolnix for better quality)...",
"Using ffmpeg for chapter embedding (install mkvtoolnix for better quality)...",
file=sys.stderr,
)
# For Matroska files, the metadata must be provided via -f ffmetadata input
@@ -838,12 +838,12 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
if output.exists():
output.unlink()
shutil.move(str(temp_output), str(output))
log(f"✓ Chapters successfully embedded!", file=sys.stderr)
logger.info(f"[merge-file] Chapters embedded successfully")
log("✓ Chapters successfully embedded!", file=sys.stderr)
logger.info("[merge-file] Chapters embedded successfully")
except Exception as e:
logger.warning(f"[merge-file] Could not replace file: {e}")
log(
f"Warning: Could not embed chapters, using merge without chapters",
"Warning: Could not embed chapters, using merge without chapters",
file=sys.stderr,
)
try:
@@ -852,12 +852,12 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
pass
else:
logger.warning(
f"[merge-file] Chapter embedding did not create output"
"[merge-file] Chapter embedding did not create output"
)
except Exception as e:
logger.exception(f"[merge-file] Chapter embedding failed: {e}")
log(
f"Warning: Chapter embedding failed, using merge without chapters",
"Warning: Chapter embedding failed, using merge without chapters",
file=sys.stderr,
)
elif output_format in {"m4a",
@@ -865,15 +865,15 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
".m4b",
".mp4"]:
# MP4/M4A format has native chapter support via iTunes metadata atoms
log(f"Embedding chapters into MP4 container...", file=sys.stderr)
log("Embedding chapters into MP4 container...", file=sys.stderr)
logger.info(
f"[merge-file] Adding chapters to M4A/MP4 file via iTunes metadata"
"[merge-file] Adding chapters to M4A/MP4 file via iTunes metadata"
)
temp_output = output.parent / f".temp_{output.stem}{output.suffix}"
# ffmpeg embeds chapters in MP4 using -map_metadata and -map_chapters
log(f"Using ffmpeg for MP4 chapter embedding...", file=sys.stderr)
log("Using ffmpeg for MP4 chapter embedding...", file=sys.stderr)
cmd2 = [
ffmpeg_path,
"-y",
@@ -916,14 +916,14 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
output.unlink()
shutil.move(str(temp_output), str(output))
log(
f"✓ Chapters successfully embedded in MP4!",
"✓ Chapters successfully embedded in MP4!",
file=sys.stderr
)
logger.info(f"[merge-file] MP4 chapters embedded successfully")
logger.info("[merge-file] MP4 chapters embedded successfully")
except Exception as e:
logger.warning(f"[merge-file] Could not replace file: {e}")
log(
f"Warning: Could not embed chapters, using merge without chapters",
"Warning: Could not embed chapters, using merge without chapters",
file=sys.stderr,
)
try:
@@ -932,12 +932,12 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
pass
else:
logger.warning(
f"[merge-file] MP4 chapter embedding did not create output"
"[merge-file] MP4 chapter embedding did not create output"
)
except Exception as e:
logger.exception(f"[merge-file] MP4 chapter embedding failed: {e}")
log(
f"Warning: MP4 chapter embedding failed, using merge without chapters",
"Warning: MP4 chapter embedding failed, using merge without chapters",
file=sys.stderr,
)
else:
@@ -945,7 +945,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
logger.info(
f"[merge-file] Format {output_format} does not have native chapter support"
)
log(f"Note: For chapter support, use MKA or M4A format", file=sys.stderr)
log("Note: For chapter support, use MKA or M4A format", file=sys.stderr)
# Clean up temp files
try:

View File

@@ -4,7 +4,7 @@ import sys
from typing import Any, Dict, Iterable, Sequence
from . import _shared as sh
from SYS.logger import log, debug
from SYS.logger import log
from SYS import pipeline as ctx
from SYS.result_table_adapters import get_provider
@@ -43,7 +43,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
provider = get_provider(provider_name)
except Exception as exc:
except Exception:
log(f"Unknown provider: {provider_name}", file=sys.stderr)
return 1

View File

@@ -656,7 +656,7 @@ def _capture(
# Attempt platform-specific target capture if requested (and not PDF)
element_captured = False
if options.prefer_platform_target and format_name != "pdf":
debug(f"[_capture] Target capture enabled")
debug("[_capture] Target capture enabled")
debug("Attempting platform-specific content capture...")
progress.step("capturing locating target")
try:
@@ -913,7 +913,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
url_to_process.append((str(url), item))
if not url_to_process:
log(f"No url to process for screen-shot cmdlet", file=sys.stderr)
log("No url to process for screen-shot cmdlet", file=sys.stderr)
return 1
debug(f"[_run] url to process: {[u for u, _ in url_to_process]}")
@@ -1157,7 +1157,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
progress.close_local_ui(force_complete=True)
if not all_emitted:
log(f"No screenshots were successfully captured", file=sys.stderr)
log("No screenshots were successfully captured", file=sys.stderr)
return 1
# Log completion message (keep this as normal output)

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
from typing import Any, Dict, Sequence, List, Optional
import importlib
import uuid
from pathlib import Path
import re