Migrate imports to SYS package (pipeline/result_table) and update related imports
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled

This commit is contained in:
2025-12-29 23:28:15 -08:00
parent 30d3bf480b
commit ef01ca03a0
60 changed files with 162 additions and 149 deletions

6
.gitignore vendored
View File

@@ -228,4 +228,8 @@ Log/medeia_macina/telegram.session
example.py
test*
MPV/portable_config/watch_later*
hydrusnetwork
hydrusnetwork
.style.yapf
.yapfignore

View File

@@ -166,7 +166,7 @@ class HydrusNetwork:
)
# Stream upload body with a stderr progress bar (pipeline-safe).
from models import ProgressBar
from SYS.models import ProgressBar
bar = ProgressBar()
# Keep the PipelineLiveProgress transfer line clean: show the file name.
@@ -1087,7 +1087,7 @@ def hydrus_request(args, parser) -> int:
def hydrus_export(args, _parser) -> int:
from metadata import apply_mutagen_metadata, build_ffmpeg_command, prepare_ffmpeg_metadata
from SYS.metadata import apply_mutagen_metadata, build_ffmpeg_command, prepare_ffmpeg_metadata
output_path: Path = args.output
original_suffix = output_path.suffix

View File

@@ -1077,7 +1077,7 @@ def _register_unlock_link():
Any]
) -> int:
"""Wrapper to make unlock_link_cmdlet available as cmdlet."""
import pipeline as ctx
from SYS import pipeline as ctx
ret_code = unlock_link_cmdlet(result, args, config)

30
CLI.py
View File

@@ -32,7 +32,7 @@ from rich.panel import Panel
from rich.markdown import Markdown
from rich.bar import Bar
from rich.table import Table
from rich_display import stderr_console, stdout_console
from SYS.rich_display import stderr_console, stdout_console
def _install_rich_traceback(*, show_locals: bool = False) -> None:
@@ -66,7 +66,7 @@ from SYS.cmdlet_catalog import (
list_cmdlet_names,
)
from SYS.config import get_local_storage_path, load_config
from result_table import ResultTable
from SYS.result_table import ResultTable
class SelectionSyntax:
@@ -1134,7 +1134,7 @@ class CmdletExecutor:
return "Results"
def execute(self, cmd_name: str, args: List[str]) -> None:
import pipeline as ctx
from SYS import pipeline as ctx
from cmdlet import REGISTRY
ensure_registry_loaded()
@@ -1241,7 +1241,7 @@ class CmdletExecutor:
return
try:
from models import PipelineLiveProgress
from SYS.models import PipelineLiveProgress
progress_ui = PipelineLiveProgress([cmd_name_norm], enabled=True)
progress_ui.start()
@@ -2436,7 +2436,7 @@ class PipelineExecutor:
if (not quiet_mode) and bool(getattr(_sys.stderr,
"isatty", lambda: False)()):
from models import PipelineLiveProgress
from SYS.models import PipelineLiveProgress
pipe_stage_indices: List[int] = []
pipe_labels: List[str] = []
@@ -2502,7 +2502,7 @@ class PipelineExecutor:
progress_ui = PipelineLiveProgress(pipe_labels, enabled=True)
progress_ui.start()
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(progress_ui)
@@ -2520,7 +2520,7 @@ class PipelineExecutor:
def execute_tokens(self, tokens: List[str]) -> None:
from cmdlet import REGISTRY
import pipeline as ctx
from SYS import pipeline as ctx
try:
self._try_clear_pipeline_stop(ctx)
@@ -2999,7 +2999,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3142,7 +3142,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx,
"set_live_progress"):
@@ -3199,7 +3199,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3354,7 +3354,7 @@ class PipelineExecutor:
except Exception:
pass
try:
import pipeline as _pipeline_ctx
from SYS import pipeline as _pipeline_ctx
if hasattr(_pipeline_ctx, "set_live_progress"):
_pipeline_ctx.set_live_progress(None)
@@ -3485,7 +3485,7 @@ class MedeiaCLI:
help="JSON string of seed items"
),
) -> None:
import pipeline as ctx
from SYS import pipeline as ctx
config = self._config_loader.load()
debug_enabled = bool(config.get("debug", False))
@@ -4229,7 +4229,7 @@ Come to love it when others take what you share, as there is no greater joy
pipeline_ctx_ref = None
try:
import pipeline as ctx
from SYS import pipeline as ctx
ctx.set_current_command_text(user_input)
pipeline_ctx_ref = ctx
@@ -4257,7 +4257,7 @@ Come to love it when others take what you share, as there is no greater joy
if len(tokens) == 1 and tokens[0] == "@,,":
try:
import pipeline as ctx
from SYS import pipeline as ctx
if ctx.restore_next_result_table():
last_table = (
@@ -4286,7 +4286,7 @@ Come to love it when others take what you share, as there is no greater joy
if len(tokens) == 1 and tokens[0] == "@..":
try:
import pipeline as ctx
from SYS import pipeline as ctx
if ctx.restore_previous_result_table():
last_table = (

View File

@@ -206,8 +206,8 @@ class Bandcamp(Provider):
# Build a new table from artist discography.
try:
from result_table import ResultTable
from rich_display import stdout_console
from SYS.result_table import ResultTable
from SYS.rich_display import stdout_console
except Exception:
return False

View File

@@ -70,7 +70,7 @@ class FileIO(Provider):
def upload(self, file_path: str, **kwargs: Any) -> str:
from API.HTTP import HTTPClient
from models import ProgressFileReader
from SYS.models import ProgressFileReader
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")

View File

@@ -13,7 +13,7 @@ from urllib.parse import urljoin, urlparse, unquote
from ProviderCore.base import Provider, SearchResult
from ProviderCore.download import sanitize_filename
from SYS.logger import log
from models import ProgressBar
from SYS.models import ProgressBar
# Optional dependency for HTML scraping fallbacks
try:

View File

@@ -359,7 +359,7 @@ class Matrix(Provider):
def upload_to_room(self, file_path: str, room_id: str, **kwargs: Any) -> str:
"""Upload a file and send it to a specific room."""
from models import ProgressFileReader
from SYS.models import ProgressFileReader
path = Path(file_path)
if not path.exists():

View File

@@ -13,7 +13,7 @@ from typing import Any, Dict, List, Optional
from ProviderCore.base import Provider, SearchResult
from SYS.logger import log, debug
from models import ProgressBar
from SYS.models import ProgressBar
_SOULSEEK_NOISE_SUBSTRINGS = (
"search reply ticket does not match any search request",

View File

@@ -1058,7 +1058,7 @@ class Telegram(Provider):
except Exception:
pass
from models import ProgressBar
from SYS.models import ProgressBar
progress_bar = ProgressBar()
last_print = {

View File

@@ -13,7 +13,7 @@ class ZeroXZero(Provider):
def upload(self, file_path: str, **kwargs: Any) -> str:
from API.HTTP import HTTPClient
from models import ProgressFileReader
from SYS.models import ProgressFileReader
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")

View File

@@ -6,7 +6,7 @@ import sys
import requests
from models import ProgressBar
from SYS.models import ProgressBar
def sanitize_filename(name: str, *, max_len: int = 150) -> str:

View File

@@ -28,7 +28,7 @@ import httpx
from SYS.logger import log, debug
from SYS.utils import ensure_directory, sha256_file
from API.HTTP import HTTPClient
from models import DownloadError, DownloadOptions, DownloadMediaResult, DebugLogger, ProgressBar
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult, DebugLogger, ProgressBar
try:
import yt_dlp # type: ignore
@@ -40,7 +40,7 @@ else:
YTDLP_IMPORT_ERROR = None
try:
from metadata import extract_ytdlp_tags
from SYS.metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None

View File

@@ -5,7 +5,7 @@ import inspect
import threading
from pathlib import Path
from rich_display import console_for
from SYS.rich_display import console_for
_DEBUG_ENABLED = False
_thread_local = threading.local()

View File

@@ -8,7 +8,7 @@ from urllib.parse import urlsplit, urlunsplit, unquote
from collections import deque
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple
from models import FileRelationshipTracker
from SYS.models import FileRelationshipTracker
try:
import musicbrainzngs # type: ignore
@@ -3874,7 +3874,7 @@ def scrape_url_metadata(
import json as json_module
try:
from metadata import extract_ytdlp_tags
from SYS.metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None

View File

@@ -114,7 +114,7 @@ class PipeObject:
cmdlet_name = "PipeObject"
try:
import pipeline as ctx
from SYS import pipeline as ctx
current = (
ctx.get_current_cmdlet_name("")
@@ -482,7 +482,7 @@ class ProgressBar:
# Prefer integrating with the pipeline Live UI to avoid nested Rich Live instances.
try:
import pipeline as pipeline_context
from SYS import pipeline as pipeline_context
ui = pipeline_context.get_live_progress()
if ui is not None and hasattr(ui,
@@ -510,7 +510,7 @@ class ProgressBar:
# Use shared stderr console when rendering to stderr (cooperates with PipelineLiveProgress).
if stream is sys.stderr:
try:
from rich_display import stderr_console
from SYS.rich_display import stderr_console
console = stderr_console()
except Exception:
@@ -878,7 +878,7 @@ class PipelineLiveProgress:
# tables/prompts in download-media) cooperate with Rich Live rendering.
# If we create a separate Console(file=sys.stderr), output will fight for
# terminal cursor control and appear "blocked"/truncated.
from rich_display import stderr_console
from SYS.rich_display import stderr_console
self._console = stderr_console()

View File

@@ -23,7 +23,7 @@ import shlex
from contextlib import contextmanager
from typing import Any, Dict, List, Optional, Sequence
from models import PipelineStageContext
from SYS.models import PipelineStageContext
from SYS.logger import log
# Live progress UI instance (optional). Set by the pipeline runner.

View File

@@ -184,7 +184,7 @@ class PipelineProgress:
return False
try:
from models import PipelineLiveProgress
from SYS.models import PipelineLiveProgress
ui = PipelineLiveProgress([str(label or "pipeline")], enabled=True)
ui.start()

View File

@@ -8,7 +8,7 @@ from __future__ import annotations
import sys
from models import ProgressBar
from SYS.models import ProgressBar
_BAR = ProgressBar()

View File

@@ -441,7 +441,7 @@ class ResultTable:
"""
self.title = title
try:
import pipeline as ctx
from SYS import pipeline as ctx
cmdlet_name = ""
try:
@@ -1277,14 +1277,14 @@ class ResultTable:
If accept_args=True: Dict with "indices" and "args" keys, or None if cancelled
"""
if self.no_choice:
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(self)
stdout_console().print(Panel(Text("Selection is disabled for this table.")))
return None
# Display the table
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(self)

View File

@@ -419,7 +419,7 @@ class Folder(Store):
# Move or copy file (with progress bar on actual byte transfer).
# Note: a same-volume move may be a fast rename and won't show progress.
def _copy_with_progress(src: Path, dst: Path, *, label: str) -> None:
from models import ProgressFileReader
from SYS.models import ProgressFileReader
total_bytes = None
try:
@@ -1421,7 +1421,7 @@ class Folder(Store):
if isinstance(t, str) and t.strip()
]
from metadata import compute_namespaced_tag_overwrite
from SYS.metadata import compute_namespaced_tag_overwrite
_to_remove, _to_add, merged = compute_namespaced_tag_overwrite(
existing_tags, tag or []
@@ -1488,7 +1488,7 @@ class Folder(Store):
file_hash = file_identifier
if self._location:
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
with API_folder_store(Path(self._location)) as db:
meta = db.get_metadata(file_hash) or {}
@@ -1509,7 +1509,7 @@ class Folder(Store):
file_hash = file_identifier
if self._location:
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
with API_folder_store(Path(self._location)) as db:
meta = db.get_metadata(file_hash) or {}
@@ -1550,7 +1550,7 @@ class Folder(Store):
# Normalize + coalesce duplicates per hash.
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
except Exception:
normalize_urls = None # type: ignore
@@ -1689,7 +1689,7 @@ class Folder(Store):
file_hash = file_identifier
if self._location:
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
with API_folder_store(Path(self._location)) as db:
meta = db.get_metadata(file_hash) or {}
@@ -1726,7 +1726,7 @@ class Folder(Store):
return False
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
except Exception:
normalize_urls = None # type: ignore

View File

@@ -15,7 +15,7 @@ import asyncio
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from SYS.config import load_config, resolve_output_dir
from result_table import ResultTable
from SYS.result_table import ResultTable
from ProviderCore.registry import get_search_provider
logger = logging.getLogger(__name__)

View File

@@ -21,11 +21,11 @@ for path in (ROOT_DIR, BASE_DIR):
if str_path not in sys.path:
sys.path.insert(0, str_path)
import pipeline as ctx
from SYS import pipeline as ctx
from CLI import ConfigLoader, PipelineExecutor as CLIPipelineExecutor, WorkerManagerRegistry
from SYS.logger import set_debug
from rich_display import capture_rich_output
from result_table import ResultTable
from SYS.rich_display import capture_rich_output
from SYS.result_table import ResultTable
@dataclass(slots=True)

View File

@@ -36,7 +36,7 @@ for path in (BASE_DIR, ROOT_DIR):
sys.path.insert(0, str_path)
from pipeline_runner import PipelineRunResult # type: ignore # noqa: E402
from result_table import ResultTable # type: ignore # noqa: E402
from SYS.result_table import ResultTable # type: ignore # noqa: E402
from SYS.config import load_config # type: ignore # noqa: E402
from Store.registry import Store as StoreRegistry # type: ignore # noqa: E402

View File

@@ -11,7 +11,7 @@ from SYS.logger import log
from pathlib import Path
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set
from dataclasses import dataclass, field
import models
from SYS import models
@dataclass
@@ -1519,13 +1519,13 @@ def _unique_destination_path(dest: Path) -> Path:
def _print_live_safe_stderr(message: str) -> None:
"""Print to stderr without breaking Rich Live progress output."""
try:
from rich_display import stderr_console # type: ignore
from SYS.rich_display import stderr_console # type: ignore
except Exception:
return
cm = None
try:
import pipeline as _pipeline_ctx # type: ignore
from SYS import pipeline as _pipeline_ctx # type: ignore
suspend = getattr(_pipeline_ctx, "suspend_live_progress", None)
cm = suspend() if callable(suspend) else None
@@ -1714,14 +1714,14 @@ def _print_saved_output_panel(item: Any, final_path: Path) -> None:
try:
from rich.panel import Panel # type: ignore
from rich.table import Table # type: ignore
from rich_display import stderr_console # type: ignore
from SYS.rich_display import stderr_console # type: ignore
except Exception:
return
# If Rich Live progress is active, pause it while printing so the panel
# doesn't get overwritten/truncated by Live's cursor control.
try:
import pipeline as _pipeline_ctx # type: ignore
from SYS import pipeline as _pipeline_ctx # type: ignore
suspend = getattr(_pipeline_ctx, "suspend_live_progress", None)
cm = suspend() if callable(suspend) else None
@@ -2255,7 +2255,7 @@ def coerce_to_pipe_object(
}
# Extract URL: prefer direct url field, then url list
from metadata import normalize_urls
from SYS.metadata import normalize_urls
url_list = normalize_urls(value.get("url"))
url_val = url_list[0] if url_list else None

View File

@@ -8,15 +8,15 @@ import tempfile
import re
from urllib.parse import urlsplit, parse_qs
import models
import pipeline as ctx
from SYS import models
from SYS import pipeline as ctx
from API import HydrusNetwork as hydrus_wrapper
from SYS.logger import log, debug
from SYS.pipeline_progress import PipelineProgress
from SYS.utils_constant import ALL_SUPPORTED_EXTENSIONS
from Store import Store
from . import _shared as sh
from result_table import ResultTable
from SYS.result_table import ResultTable
Cmdlet = sh.Cmdlet
CmdletArg = sh.CmdletArg
@@ -32,7 +32,7 @@ coerce_to_pipe_object = sh.coerce_to_pipe_object
collapse_namespace_tag = sh.collapse_namespace_tag
from API.folder import read_sidecar, find_sidecar, write_sidecar, API_folder_store
from SYS.utils import sha256_file, unique_path
from metadata import write_metadata
from SYS.metadata import write_metadata
# Canonical supported filetypes for all stores/cmdlets
SUPPORTED_MEDIA_EXTENSIONS = ALL_SUPPORTED_EXTENSIONS
@@ -266,7 +266,7 @@ class Add_File(Cmdlet):
# The user then runs @N (optionally piped), which replays add-file with selected paths.
if dir_scan_mode:
try:
from result_table import ResultTable
from SYS.result_table import ResultTable
from pathlib import Path as _Path
# Build base args to replay: keep everything except the directory -path.
@@ -1071,7 +1071,7 @@ class Add_File(Cmdlet):
)
if not refreshed_items:
# Fallback: at least show the add-file payloads as a display overlay
from result_table import ResultTable
from SYS.result_table import ResultTable
table = ResultTable("Result")
for payload in collected_payloads:
@@ -1578,7 +1578,7 @@ class Add_File(Cmdlet):
# If first item is a PipeObject object
try:
# models.PipeObject is an actual class; check attribute presence
import models as _models
from SYS import models as _models
if isinstance(first_item, _models.PipeObject):
path_candidate = getattr(first_item, "path", None)
@@ -1929,7 +1929,7 @@ class Add_File(Cmdlet):
@staticmethod
def _get_url(result: Any, pipe_obj: models.PipeObject) -> List[str]:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
# Prefer explicit PipeObject.url if present
urls: List[str] = []
@@ -2092,7 +2092,7 @@ class Add_File(Cmdlet):
@staticmethod
def _emit_pipe_object(pipe_obj: models.PipeObject) -> None:
from result_table import format_result
from SYS.result_table import format_result
log(format_result(pipe_obj, title="Result"), file=sys.stderr)
ctx.emit(pipe_obj.to_dict())
@@ -2125,7 +2125,7 @@ class Add_File(Cmdlet):
return
try:
from result_table import ResultTable
from SYS.result_table import ResultTable
table = ResultTable("Result")
table.add_result(payload)
@@ -2518,7 +2518,7 @@ class Add_File(Cmdlet):
_best_subtitle_sidecar,
_read_text_file,
)
from models import DownloadOptions
from SYS.models import DownloadOptions
from tool.ytdlp import YtDlpTool
except Exception:
return []
@@ -2681,7 +2681,7 @@ class Add_File(Cmdlet):
paths = getattr(result_obj, "paths", None)
if isinstance(paths, list) and paths:
# Section downloads: create one result per file.
from models import DownloadMediaResult
from SYS.models import DownloadMediaResult
results = []
for p in paths:
@@ -3012,7 +3012,7 @@ class Add_File(Cmdlet):
ctx.set_current_stage_table(table)
print()
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)
print(
@@ -3142,7 +3142,7 @@ class Add_File(Cmdlet):
# If we're moving/copying from one store to another, also copy the source store's
# existing associated URLs so they aren't lost.
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
source_store = None
source_hash = None

View File

@@ -7,7 +7,7 @@ import re
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -9,7 +9,7 @@ import sys
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from API import HydrusNetwork as hydrus_wrapper
from . import _shared as sh

View File

@@ -7,8 +7,8 @@ import re
from SYS.logger import log
import models
import pipeline as ctx
from SYS import models
from SYS import pipeline as ctx
from . import _shared as sh
normalize_result_input = sh.normalize_result_input

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from typing import Any, Dict, List, Optional, Sequence, Tuple
import sys
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
from SYS.logger import log
from Store import Store

View File

@@ -14,7 +14,7 @@ from urllib.parse import parse_qs, urlparse
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from SYS.config import resolve_output_dir
from . import _shared as sh

View File

@@ -12,9 +12,9 @@ from Store.Folder import Folder
from Store import Store
from . import _shared as sh
from API import HydrusNetwork as hydrus_wrapper
import pipeline as ctx
from result_table import ResultTable, _format_size
from rich_display import stdout_console
from SYS import pipeline as ctx
from SYS.result_table import ResultTable, _format_size
from SYS.rich_display import stdout_console
class Delete_File(sh.Cmdlet):

View File

@@ -6,7 +6,7 @@ import sys
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -9,7 +9,7 @@ import sys
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -5,8 +5,8 @@ from pathlib import Path
import json
import sys
import models
import pipeline as ctx
from SYS import models
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from typing import Any, Dict, List, Optional, Sequence, Tuple
import sys
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash = (
@@ -91,7 +91,7 @@ class Delete_Url(Cmdlet):
log("Error: Invalid hash format")
return 1
from metadata import normalize_urls
from SYS.metadata import normalize_urls
def _urls_from_arg(raw: Any) -> List[str]:
if raw is None:

View File

@@ -17,7 +17,7 @@ from urllib.parse import urlparse
from SYS.download import DownloadError, _download_direct_file
from SYS.logger import log, debug
from SYS.pipeline_progress import PipelineProgress
import pipeline as pipeline_context
from SYS import pipeline as pipeline_context
from . import _shared as sh
@@ -243,7 +243,7 @@ class Download_File(Cmdlet):
)
try:
from result_table import ResultTable
from SYS.result_table import ResultTable
except Exception as exc:
log(f"download-file: ResultTable unavailable: {exc}", file=sys.stderr)
return 1

View File

@@ -30,11 +30,11 @@ from urllib.parse import urlparse
from SYS.logger import log, debug
from SYS.pipeline_progress import PipelineProgress
from SYS.utils import sha256_file
from models import DownloadError, DownloadOptions, DownloadMediaResult, DebugLogger, ProgressBar
import pipeline as pipeline_context
from result_table import ResultTable
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult, DebugLogger, ProgressBar
from SYS import pipeline as pipeline_context
from SYS.result_table import ResultTable
from rich.prompt import Confirm
from rich_display import stderr_console as get_stderr_console
from SYS.rich_display import stderr_console as get_stderr_console
from . import _shared as sh
QueryArg = sh.QueryArg
@@ -150,7 +150,7 @@ else:
YTDLP_IMPORT_ERROR = None
try:
from metadata import extract_ytdlp_tags
from SYS.metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None
@@ -1718,7 +1718,7 @@ class Download_Media(Cmdlet):
debug(f"Preflight URL check: candidate={candidate_url}")
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
except Exception:
normalize_urls = None # type: ignore[assignment]
@@ -1879,7 +1879,7 @@ class Download_Media(Cmdlet):
# Keep the full payload for history/inspection, but display a focused table.
# Use shared extractors so Ext/Size/Store/Hash remain consistent everywhere.
try:
from result_table import build_display_row
from SYS.result_table import build_display_row
except Exception:
build_display_row = None # type: ignore
@@ -2032,7 +2032,7 @@ class Download_Media(Cmdlet):
return True
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
except Exception:
normalize_urls = None # type: ignore[assignment]
@@ -2236,7 +2236,7 @@ class Download_Media(Cmdlet):
) or hit.get("sha256") or ""
try:
from result_table import build_display_row
from SYS.result_table import build_display_row
except Exception:
build_display_row = None # type: ignore

View File

@@ -15,7 +15,7 @@ import webbrowser
from urllib.parse import urljoin
from urllib.request import pathname2url
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
from SYS.logger import log, debug
from Store import Store

View File

@@ -14,8 +14,8 @@ CmdletArg = sh.CmdletArg
SharedArgs = sh.SharedArgs
parse_cmdlet_args = sh.parse_cmdlet_args
get_field = sh.get_field
import pipeline as ctx
from result_table import ResultTable
from SYS import pipeline as ctx
from SYS.result_table import ResultTable
class Get_Metadata(Cmdlet):

View File

@@ -6,7 +6,7 @@ import sys
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
from . import _shared as sh
Cmdlet = sh.Cmdlet

View File

@@ -7,8 +7,8 @@ from pathlib import Path
from SYS.logger import log
import models
import pipeline as ctx
from SYS import models
from SYS import pipeline as ctx
from API import HydrusNetwork as hydrus_wrapper
from . import _shared as sh
@@ -23,7 +23,7 @@ should_show_help = sh.should_show_help
get_field = sh.get_field
from API.folder import API_folder_store
from SYS.config import get_local_storage_path
from result_table import ResultTable
from SYS.result_table import ResultTable
from Store import Store
CMDLET = Cmdlet(
@@ -502,7 +502,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
if not found_relationships:
try:
from rich.panel import Panel
from rich_display import stdout_console
from SYS.rich_display import stdout_console
title = source_title or (hash_hex[:16] + "..." if hash_hex else "Item")
stdout_console().print(
@@ -569,7 +569,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
)
ctx.set_last_result_table(table, pipeline_results)
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)

View File

@@ -27,7 +27,7 @@ import subprocess
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple
import pipeline as ctx
from SYS import pipeline as ctx
from API import HydrusNetwork
from API.folder import read_sidecar, write_sidecar, find_sidecar, API_folder_store
from . import _shared as sh
@@ -42,7 +42,7 @@ get_field = sh.get_field
from SYS.config import get_local_storage_path
try:
from metadata import extract_title
from SYS.metadata import extract_title
except ImportError:
extract_title = None
@@ -152,7 +152,7 @@ def _resolve_candidate_urls_for_item(
) -> List[str]:
"""Get candidate URLs from backend and/or piped result."""
try:
from metadata import normalize_urls
from SYS.metadata import normalize_urls
except Exception:
normalize_urls = None # type: ignore[assignment]
@@ -328,7 +328,7 @@ def _emit_tags_as_table(
This replaces _print_tag_list to make tags pipe-able.
Stores the table in ctx._LAST_RESULT_TABLE for downstream @ selection.
"""
from result_table import ResultTable
from SYS.result_table import ResultTable
# Create ResultTable with just tag column (no title)
# Keep the title stable and avoid including hash fragments.
@@ -776,7 +776,7 @@ def _scrape_url_metadata(
import json as json_module
try:
from metadata import extract_ytdlp_tags
from SYS.metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None
@@ -956,7 +956,7 @@ def _scrape_url_metadata(
# Deduplicate tags by namespace to prevent duplicate title:, artist:, etc.
try:
from metadata import dedup_tags_by_namespace as _dedup
from SYS.metadata import dedup_tags_by_namespace as _dedup
if _dedup:
tags = _dedup(tags, keep_first=True)
@@ -1265,7 +1265,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
return 1
try:
from metadata import extract_ytdlp_tags
from SYS.metadata import extract_ytdlp_tags
except Exception:
extract_ytdlp_tags = None # type: ignore[assignment]
@@ -1554,7 +1554,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
)
return 0
from result_table import ResultTable
from SYS.result_table import ResultTable
table = ResultTable(f"Metadata: {provider.name}")
table.set_source_command("get-tag", [])

View File

@@ -6,8 +6,6 @@ import sys
import re
from fnmatch import fnmatch
from urllib.parse import urlparse
import pipeline as ctx
from . import _shared as sh
Cmdlet, SharedArgs, parse_cmdlet_args, get_field, normalize_hash = (
@@ -19,6 +17,7 @@ Cmdlet, SharedArgs, parse_cmdlet_args, get_field, normalize_hash = (
)
from SYS.logger import log
from Store import Store
from SYS import pipeline as ctx
@dataclass
@@ -172,7 +171,7 @@ class Get_Url(Cmdlet):
return 1
# Create result table
from result_table import ResultTable
from SYS.result_table import ResultTable
table = (
ResultTable(
@@ -244,7 +243,7 @@ class Get_Url(Cmdlet):
urls = backend.get_url(file_hash)
from result_table import ResultTable
from SYS.result_table import ResultTable
title = str(get_field(result, "title") or "").strip()
table_title = "Title"

View File

@@ -26,7 +26,7 @@ normalize_result_input = sh.normalize_result_input
parse_cmdlet_args = sh.parse_cmdlet_args
should_show_help = sh.should_show_help
import pipeline as ctx
from SYS import pipeline as ctx
try:
from pypdf import PdfWriter, PdfReader
@@ -38,7 +38,7 @@ except ImportError:
PdfReader = None
try:
from metadata import (
from SYS.metadata import (
read_tags_from_file,
merge_multiple_tag_lists,
)

View File

@@ -30,7 +30,7 @@ normalize_result_input = sh.normalize_result_input
should_show_help = sh.should_show_help
get_field = sh.get_field
parse_cmdlet_args = sh.parse_cmdlet_args
import pipeline as pipeline_context
from SYS import pipeline as pipeline_context
# ============================================================================
# CMDLET Metadata Declaration

View File

@@ -18,7 +18,7 @@ Cmdlet, CmdletArg, should_show_help = (
sh.CmdletArg,
sh.should_show_help,
)
import pipeline as ctx
from SYS import pipeline as ctx
# Optional dependencies
try:
@@ -226,7 +226,7 @@ class Search_Provider(Cmdlet):
import result_table
importlib.reload(result_table)
from result_table import ResultTable
from SYS.result_table import ResultTable
provider_text = str(provider_name or "").strip()
provider_lower = provider_text.lower()

View File

@@ -32,7 +32,7 @@ from . import _shared as sh
sh.first_title_tag,
sh.parse_hash_query,
)
import pipeline as ctx
from SYS import pipeline as ctx
STORAGE_ORIGINS = {"local",
"hydrus",
@@ -257,7 +257,7 @@ class Search_Store(Cmdlet):
import importlib
importlib.reload(result_table)
from result_table import ResultTable
from SYS.result_table import ResultTable
table = ResultTable(command_title)
try:

View File

@@ -23,7 +23,7 @@ normalize_result_input = sh.normalize_result_input
extract_title_from_result = sh.extract_title_from_result
extract_url_from_result = sh.extract_url_from_result
get_field = sh.get_field
import pipeline as ctx
from SYS import pipeline as ctx
CMDLET = Cmdlet(
name="trim-file",

View File

@@ -4,8 +4,8 @@ import sys
from typing import List, Dict, Any, Optional, Sequence
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
from SYS.logger import log
from result_table import ResultTable
import pipeline as ctx
from SYS.result_table import ResultTable
from SYS import pipeline as ctx
ADJECTIVE_FILE = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
@@ -55,7 +55,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
table.set_source_command(".adjective")
ctx.set_last_result_table_overlay(table, list(data.keys()))
ctx.set_current_stage_table(table)
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)
return 0
@@ -138,7 +138,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
table.set_source_command(".adjective")
ctx.set_last_result_table_overlay(table, tags)
ctx.set_current_stage_table(table)
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)

View File

@@ -118,7 +118,7 @@ def _run(piped_result: Any, args: List[str], config: Dict[str, Any]) -> int:
items.sort(key=lambda x: x["Key"])
# Emit items for ResultTable
import pipeline as ctx
from SYS import pipeline as ctx
for item in items:
ctx.emit(item)

View File

@@ -6,8 +6,8 @@ import sys
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
from SYS.logger import log
from result_table import ResultTable
import pipeline as ctx
from SYS.result_table import ResultTable
from SYS import pipeline as ctx
def _normalize_choice_list(arg_names: Optional[List[str]]) -> List[str]:
@@ -72,7 +72,7 @@ def _render_list(
ctx.set_last_result_table(table, items)
ctx.set_current_stage_table(table)
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)
@@ -142,7 +142,7 @@ def _render_detail(meta: Dict[str, Any], args: Sequence[str]) -> None:
ctx.set_last_result_table_overlay(table, [meta])
ctx.set_current_stage_table(table)
from rich_display import stdout_console
from SYS.rich_display import stdout_console
stdout_console().print(table)

View File

@@ -10,8 +10,8 @@ from urllib.parse import parse_qs, urlparse
from cmdlet._shared import Cmdlet, CmdletArg
from SYS.logger import log, debug
from result_table import ResultTable
import pipeline as ctx
from SYS.result_table import ResultTable
from SYS import pipeline as ctx
_MATRIX_PENDING_ITEMS_KEY = "matrix_pending_items"
_MATRIX_PENDING_TEXT_KEY = "matrix_pending_text"

View File

@@ -8,7 +8,7 @@ from typing import Any, Dict, Sequence, Optional
from cmdlet._shared import Cmdlet, CmdletArg
from SYS.logger import log
import pipeline as ctx
from SYS import pipeline as ctx
CMDLET = Cmdlet(
name=".out-table",

View File

@@ -9,10 +9,10 @@ from urllib.parse import urlparse, parse_qs
from pathlib import Path
from cmdlet._shared import Cmdlet, CmdletArg, parse_cmdlet_args
from SYS.logger import debug, get_thread_stream, is_debug_enabled, set_debug, set_thread_stream
from result_table import ResultTable
from SYS.result_table import ResultTable
from MPV.mpv_ipc import MPV
import pipeline as ctx
from models import PipeObject
from SYS import pipeline as ctx
from SYS.models import PipeObject
from API.folder import LocalLibrarySearchOptimizer
from SYS.config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url

View File

@@ -6,8 +6,8 @@ from typing import Any, Dict, List, Optional, Sequence
from cmdlet._shared import Cmdlet, CmdletArg
from SYS.logger import log
from result_table import ResultTable
import pipeline as ctx
from SYS.result_table import ResultTable
from SYS import pipeline as ctx
_TELEGRAM_PENDING_ITEMS_KEY = "telegram_pending_items"

View File

@@ -9,7 +9,7 @@ from typing import Any, Dict, Sequence, List
from cmdlet import register
from cmdlet._shared import Cmdlet, CmdletArg
import pipeline as ctx
from SYS import pipeline as ctx
from SYS.logger import log
from SYS.config import get_local_storage_path

10
scripts/check_imports.py Normal file
View File

@@ -0,0 +1,10 @@
import importlib
import sys
import traceback
try:
importlib.import_module("CLI")
print("CLI imported OK")
except Exception as e:
traceback.print_exc()
sys.exit(1)

View File

@@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional, Sequence
from SYS.logger import debug
from SYS.utils import ensure_directory
from models import DownloadOptions
from SYS.models import DownloadOptions
def _get_nested(config: Dict[str, Any], *path: str) -> Any: