This commit is contained in:
2026-01-19 06:24:09 -08:00
parent a961ac3ce7
commit 7ddf0065d1
45 changed files with 627 additions and 411 deletions

2
.gitignore vendored
View File

@@ -240,3 +240,5 @@ tmp_*
*.secret *.secret
# Ignore local ZeroTier auth tokens (project-local copy) # Ignore local ZeroTier auth tokens (project-local copy)
authtoken.secret authtoken.secret
mypy.ini

View File

@@ -452,7 +452,7 @@ class HTTPClient:
else: else:
kwargs["headers"] = self._get_headers() kwargs["headers"] = self._get_headers()
last_exception = None last_exception: Exception | None = None
for attempt in range(self.retries): for attempt in range(self.retries):
self._debug_panel( self._debug_panel(
@@ -875,7 +875,7 @@ def download_direct_file(
pass pass
tags: List[str] = [] tags: List[str] = []
if extract_ytdlp_tags: if extract_ytdlp_tags is not None:
try: try:
tags = extract_ytdlp_tags(info) tags = extract_ytdlp_tags(info)
except Exception as exc: except Exception as exc:
@@ -884,7 +884,7 @@ def download_direct_file(
if not any(str(t).startswith("title:") for t in tags): if not any(str(t).startswith("title:") for t in tags):
info["title"] = str(filename) info["title"] = str(filename)
tags = [] tags = []
if extract_ytdlp_tags: if extract_ytdlp_tags is not None:
try: try:
tags = extract_ytdlp_tags(info) tags = extract_ytdlp_tags(info)
except Exception as exc: except Exception as exc:
@@ -1135,7 +1135,7 @@ class AsyncHTTPClient:
else: else:
kwargs["headers"] = self._get_headers() kwargs["headers"] = self._get_headers()
last_exception = None last_exception: Exception | None = None
for attempt in range(self.retries): for attempt in range(self.retries):
try: try:

View File

@@ -2066,9 +2066,9 @@ def _derive_title(
"original_display_filename", "original_display_filename",
"original_filename", "original_filename",
): ):
value = entry.get(key) raw_val = entry.get(key)
if isinstance(value, str): if isinstance(raw_val, str):
cleaned = value.strip() cleaned = raw_val.strip()
if cleaned: if cleaned:
return cleaned return cleaned
return None return None
@@ -2444,7 +2444,7 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
matched_url = None matched_url = None
normalized_reported = None normalized_reported = None
seen: Set[str] = set() seen: Set[str] = set()
queue = deque() queue: deque[str] = deque()
for variant in _generate_hydrus_url_variants(url): for variant in _generate_hydrus_url_variants(url):
queue.append(variant) queue.append(variant)
if not queue: if not queue:
@@ -2486,11 +2486,11 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
if isinstance(raw_hashes, list): if isinstance(raw_hashes, list):
for item in raw_hashes: for item in raw_hashes:
try: try:
normalized = _normalize_hash(item) norm_hash = _normalize_hash(item)
except ValueError: except ValueError:
continue continue
if normalized: if norm_hash:
response_hashes_list.append(normalized) response_hashes_list.append(norm_hash)
raw_ids = response.get("file_ids") or response.get("file_id") raw_ids = response.get("file_ids") or response.get("file_id")
if isinstance(raw_ids, list): if isinstance(raw_ids, list):
for item in raw_ids: for item in raw_ids:
@@ -2510,12 +2510,13 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
continue continue
status_hash = entry.get("hash") or entry.get("file_hash") status_hash = entry.get("hash") or entry.get("file_hash")
if status_hash: if status_hash:
norm_status: Optional[str] = None
try: try:
normalized = _normalize_hash(status_hash) norm_status = _normalize_hash(status_hash)
except ValueError: except ValueError:
normalized = None pass
if normalized: if norm_status:
response_hashes_list.append(normalized) response_hashes_list.append(norm_status)
status_id = entry.get("file_id") or entry.get("fileid") status_id = entry.get("file_id") or entry.get("fileid")
if status_id is not None: if status_id is not None:
try: try:

3
API/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
"""Medeia API helpers that power external integrations."""
__all__ = []

View File

@@ -55,6 +55,7 @@ def _db_retry(max_attempts: int = 6, base_sleep: float = 0.1):
return _decorator return _decorator
# Try to import optional dependencies # Try to import optional dependencies
mutagen: Any
try: try:
import mutagen import mutagen
except ImportError: except ImportError:
@@ -72,12 +73,12 @@ try:
METADATA_AVAILABLE = True METADATA_AVAILABLE = True
except ImportError: except ImportError:
_read_sidecar_metadata = None _read_sidecar_metadata = None # type: ignore
_derive_sidecar_path = None _derive_sidecar_path = None # type: ignore
write_tags = None write_tags = None # type: ignore
write_tags_to_file = None write_tags_to_file = None # type: ignore
embed_metadata_in_file = None embed_metadata_in_file = None # type: ignore
read_tags_from_file = None read_tags_from_file = None # type: ignore
METADATA_AVAILABLE = False METADATA_AVAILABLE = False
# Media extensions to index # Media extensions to index
@@ -219,7 +220,7 @@ class API_folder_store:
""" """
self.library_root = expand_path(library_root).resolve() self.library_root = expand_path(library_root).resolve()
self.db_path = self.library_root / self.DB_NAME self.db_path = self.library_root / self.DB_NAME
self.connection: Optional[sqlite3.Connection] = None self.connection: sqlite3.Connection = None # type: ignore
# Use the shared lock # Use the shared lock
self._db_lock = self._shared_db_lock self._db_lock = self._shared_db_lock
mm_debug(f"[folder-db] init: root={self.library_root} db={self.db_path}") mm_debug(f"[folder-db] init: root={self.library_root} db={self.db_path}")
@@ -3818,7 +3819,7 @@ def migrate_all(library_root: Path,
db), db),
} }
finally: finally:
if should_close: if should_close and db is not None:
db.close() db.close()

117
CLI.py
View File

@@ -419,16 +419,16 @@ class CmdletCompleter(Completer):
return return
arg_names = CmdletIntrospection.cmdlet_args(cmd_name, config) arg_names = CmdletIntrospection.cmdlet_args(cmd_name, config)
logical_seen: Set[str] = set() seen_logicals: Set[str] = set()
for arg in arg_names: for arg in arg_names:
arg_low = arg.lower() arg_low = arg.lower()
if arg_low.startswith("--"): if arg_low.startswith("--"):
continue continue
logical = arg.lstrip("-").lower() logical = arg.lstrip("-").lower()
if logical in logical_seen: if logical in seen_logicals:
continue continue
yield Completion(arg, start_position=0) yield Completion(arg, start_position=0)
logical_seen.add(logical) seen_logicals.add(logical)
yield Completion("-help", start_position=0) yield Completion("-help", start_position=0)
return return
@@ -541,117 +541,6 @@ class CmdletCompleter(Completer):
yield Completion("-help", start_position=-len(current_token)) yield Completion("-help", start_position=-len(current_token))
# Lexer implementation removed; use `MedeiaLexer` from `SYS.cli_parsing` instead.
line = document.lines[lineno]
tokens: List[tuple[str, str]] = []
pattern = re.compile(
r"""
(\s+) | # 1. Whitespace
(\|) | # 2. Pipe
("(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*') | # 3. Quoted string
([^\s\|]+) # 4. Word
""",
re.VERBOSE,
)
is_cmdlet = True
def _emit_keyed_value(word: str) -> bool:
"""Emit `key:` prefixes (comma-separated) as argument tokens.
Designed for values like:
clip:3m4s-3m14s,1h22m-1h33m,item:2-3
Avoids special-casing URLs (://) and Windows drive paths (C:\\...).
Returns True if it handled the token.
"""
if not word or ":" not in word:
return False
# Avoid URLs and common scheme patterns.
if "://" in word:
return False
# Avoid Windows drive paths (e.g., C:\foo or D:/bar)
if re.match(r"^[A-Za-z]:[\\/]", word):
return False
key_prefix = re.compile(r"^([A-Za-z_][A-Za-z0-9_-]*:)(.*)$")
parts = word.split(",")
handled_any = False
for i, part in enumerate(parts):
if i > 0:
tokens.append(("class:value", ","))
if part == "":
continue
m = key_prefix.match(part)
if m:
tokens.append(("class:argument", m.group(1)))
if m.group(2):
tokens.append(("class:value", m.group(2)))
handled_any = True
else:
tokens.append(("class:value", part))
handled_any = True
return handled_any
for match in pattern.finditer(line):
ws, pipe, quote, word = match.groups()
if ws:
tokens.append(("", ws))
continue
if pipe:
tokens.append(("class:pipe", pipe))
is_cmdlet = True
continue
if quote:
# If the quoted token contains a keyed spec (clip:/item:/hash:),
# highlight the `key:` portion in argument-blue even inside quotes.
if len(quote) >= 2 and quote[0] == quote[-1] and quote[0] in ('"',
"'"):
q = quote[0]
inner = quote[1:-1]
start_index = len(tokens)
if _emit_keyed_value(inner):
# _emit_keyed_value already appended tokens for inner; insert opening quote
# before that chunk, then add the closing quote.
tokens.insert(start_index, ("class:string", q))
tokens.append(("class:string", q))
is_cmdlet = False
continue
tokens.append(("class:string", quote))
is_cmdlet = False
continue
if not word:
continue
if word.startswith("@"): # selection tokens
rest = word[1:]
if rest and re.fullmatch(r"[0-9\-\*,]+", rest):
tokens.append(("class:selection_at", "@"))
tokens.append(("class:selection_range", rest))
is_cmdlet = False
continue
if rest == "":
tokens.append(("class:selection_at", "@"))
is_cmdlet = False
continue
if is_cmdlet:
tokens.append(("class:cmdlet", word))
is_cmdlet = False
elif word.startswith("-"):
tokens.append(("class:argument", word))
else:
if not _emit_keyed_value(word):
tokens.append(("class:value", word))
return tokens
return get_line
class ConfigLoader: class ConfigLoader:

View File

@@ -1278,7 +1278,7 @@ class HIFI(Provider):
) )
return materialized return materialized
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]: def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path | Dict[str, Any]]]:
view, identifier = self._parse_tidal_url(url) view, identifier = self._parse_tidal_url(url)
if not view: if not view:
return False, None return False, None

View File

@@ -1265,7 +1265,7 @@ class Tidal(Provider):
) )
return materialized return materialized
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]: def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path | Dict[str, Any]]]:
view, identifier = self._parse_tidal_url(url) view, identifier = self._parse_tidal_url(url)
if not view: if not view:
return False, None return False, None

View File

@@ -585,7 +585,7 @@ class AllDebrid(TableProviderMixin, Provider):
URL_DOMAINS = () URL_DOMAINS = ()
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "api_key", "key": "api_key",
@@ -646,7 +646,7 @@ class AllDebrid(TableProviderMixin, Provider):
return spec return spec
return resolve_magnet_spec(str(target)) if isinstance(target, str) else None return resolve_magnet_spec(str(target)) if isinstance(target, str) else None
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]: def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path | Dict[str, Any]]]:
magnet_id = _parse_alldebrid_magnet_id(url) magnet_id = _parse_alldebrid_magnet_id(url)
if magnet_id is not None: if magnet_id is not None:
return True, { return True, {

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
import os import os
import sys import sys
from typing import Any, Dict, Optional from typing import Any, Dict, List, Optional
from ProviderCore.base import Provider from ProviderCore.base import Provider
from SYS.logger import log from SYS.logger import log
@@ -53,7 +53,7 @@ class FileIO(Provider):
PROVIDER_NAME = "file.io" PROVIDER_NAME = "file.io"
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "api_key", "key": "api_key",

View File

@@ -468,7 +468,7 @@ class InternetArchive(Provider):
URL = ("archive.org",) URL = ("archive.org",)
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "access_key", "key": "access_key",

View File

@@ -235,7 +235,7 @@ class Matrix(TableProviderMixin, Provider):
""" """
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "homeserver", "key": "homeserver",

View File

@@ -287,7 +287,7 @@ class OpenLibrary(Provider):
} }
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "email", "key": "email",

View File

@@ -245,7 +245,7 @@ class Soulseek(Provider):
return False return False
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "username", "key": "username",
@@ -325,6 +325,10 @@ class Soulseek(Provider):
) )
return None return None
# Cast to str for Mypy
username = str(username)
filename = str(filename)
# Use tempfile directory as default if generic path elements were passed or None. # Use tempfile directory as default if generic path elements were passed or None.
if output_dir is None: if output_dir is None:
import tempfile import tempfile
@@ -363,10 +367,13 @@ class Soulseek(Provider):
target_dir = Path(tempfile.gettempdir()) / "Medios" / "Soulseek" target_dir = Path(tempfile.gettempdir()) / "Medios" / "Soulseek"
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
# Cast to str for Mypy
username_str = str(username)
filename_str = str(filename)
return loop.run_until_complete( return loop.run_until_complete(
download_soulseek_file( download_soulseek_file(
username=username, username=username_str,
filename=filename, filename=filename_str,
output_dir=target_dir, output_dir=target_dir,
timeout=self.MAX_WAIT_TRANSFER, timeout=self.MAX_WAIT_TRANSFER,
) )

View File

@@ -7,7 +7,7 @@ import sys
import time import time
import threading import threading
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional, Sequence, Tuple from typing import Any, Dict, List, Optional, Sequence, Tuple
from urllib.parse import urlparse from urllib.parse import urlparse
from ProviderCore.base import Provider, SearchResult from ProviderCore.base import Provider, SearchResult
@@ -150,7 +150,7 @@ class Telegram(Provider):
URL = ("t.me", "telegram.me") URL = ("t.me", "telegram.me")
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "app_id", "key": "app_id",
@@ -1175,7 +1175,7 @@ class Telegram(Provider):
raise ValueError("Not a Telegram URL") raise ValueError("Not a Telegram URL")
return self._download_message_media_sync(url=url, output_dir=output_dir) return self._download_message_media_sync(url=url, output_dir=output_dir)
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]: def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path | Dict[str, Any]]]:
"""Optional provider override to parse and act on URLs.""" """Optional provider override to parse and act on URLs."""
if not _looks_like_telegram_message_url(url): if not _looks_like_telegram_message_url(url):
return False, None return False, None

View File

@@ -24,7 +24,7 @@ class SearchResult:
size_bytes: Optional[int] = None size_bytes: Optional[int] = None
tag: set[str] = field(default_factory=set) # Searchable tag values tag: set[str] = field(default_factory=set) # Searchable tag values
columns: List[Tuple[str, str]] = field(default_factory=list) # Display columns columns: List[Tuple[str, str]] = field(default_factory=list) # Display columns
selection_action: Optional[Dict[str, Any]] = None selection_action: Optional[List[str]] = None
selection_args: Optional[List[str]] = None selection_args: Optional[List[str]] = None
full_metadata: Dict[str, Any] = field(default_factory=dict) # Extra metadata full_metadata: Dict[str, Any] = field(default_factory=dict) # Extra metadata
@@ -150,7 +150,7 @@ class Provider(ABC):
).lower() ).lower()
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
"""Return configuration schema for this provider. """Return configuration schema for this provider.
Returns a list of dicts, each defining a field: Returns a list of dicts, each defining a field:
@@ -228,7 +228,7 @@ class Provider(ABC):
_ = config _ = config
return 0 return 0
def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path]]: def handle_url(self, url: str, *, output_dir: Optional[Path] = None) -> Tuple[bool, Optional[Path | Dict[str, Any]]]:
"""Optional provider override to parse and act on URLs.""" """Optional provider override to parse and act on URLs."""
_ = url _ = url

0
SYS/__init__.py Normal file
View File

View File

@@ -13,12 +13,14 @@ from typing import Any, Dict, List, Optional, Set, Tuple
# stubs if prompt_toolkit is not available so imports remain safe for testing. # stubs if prompt_toolkit is not available so imports remain safe for testing.
try: try:
from prompt_toolkit.document import Document from prompt_toolkit.document import Document
from prompt_toolkit.lexers import Lexer from prompt_toolkit.lexers import Lexer as _PTK_Lexer
except Exception: # pragma: no cover - optional dependency except Exception: # pragma: no cover - optional dependency
Document = object # type: ignore Document = object # type: ignore
# Fallback to a simple object when prompt_toolkit is not available
_PTK_Lexer = object # type: ignore
class Lexer: # simple fallback base # Expose a stable name used by the rest of the module
pass Lexer = _PTK_Lexer
class SelectionSyntax: class SelectionSyntax:
@@ -216,19 +218,19 @@ class SelectionFilterSyntax:
if ":" in s: if ":" in s:
parts = [p.strip() for p in s.split(":")] parts = [p.strip() for p in s.split(":")]
if len(parts) == 2 and all(p.isdigit() for p in parts): if len(parts) == 2 and all(p.isdigit() for p in parts):
m, sec = parts m_str, sec_str = parts
return max(0, int(m) * 60 + int(sec)) return max(0, int(m_str) * 60 + int(sec_str))
if len(parts) == 3 and all(p.isdigit() for p in parts): if len(parts) == 3 and all(p.isdigit() for p in parts):
h, m, sec = parts h_str, m_str, sec_str = parts
return max(0, int(h) * 3600 + int(m) * 60 + int(sec)) return max(0, int(h_str) * 3600 + int(m_str) * 60 + int(sec_str))
# token format: 1h2m3s (tokens can appear in any combination) # token format: 1h2m3s (tokens can appear in any combination)
total = 0 total = 0
found = False found = False
for m in SelectionFilterSyntax._DUR_TOKEN_RE.finditer(s): for match in SelectionFilterSyntax._DUR_TOKEN_RE.finditer(s):
found = True found = True
n = int(m.group(1)) n = int(match.group(1))
unit = m.group(2).lower() unit = match.group(2).lower()
if unit == "h": if unit == "h":
total += n * 3600 total += n * 3600
elif unit == "m": elif unit == "m":

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import re import re
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional from typing import Any, Dict, Optional, List
from SYS.logger import log from SYS.logger import log
from SYS.utils import expand_path from SYS.utils import expand_path
@@ -722,10 +722,6 @@ def reload_config(
return load_config(config_dir=config_dir, filename=filename) return load_config(config_dir=config_dir, filename=filename)
def clear_config_cache() -> None:
_CONFIG_CACHE.clear()
def _validate_config_safety(config: Dict[str, Any]) -> None: def _validate_config_safety(config: Dict[str, Any]) -> None:
"""Check for dangerous configurations, like folder stores in non-empty dirs.""" """Check for dangerous configurations, like folder stores in non-empty dirs."""
store = config.get("store") store = config.get("store")

View File

@@ -220,11 +220,11 @@ def extract_records(doc_or_html: Any, base_url: Optional[str] = None, xpaths: Op
records: List[Dict[str, str]] = [] records: List[Dict[str, str]] = []
for row in rows: for row in rows:
nr: Dict[str, str] = {} row_norm: Dict[str, str] = {}
for k, v in (row or {}).items(): for k, v in (row or {}).items():
nk = normalize_header(str(k or "")) nk = normalize_header(str(k or ""))
nr[nk] = (str(v).strip() if v is not None else "") row_norm[nk] = (str(v).strip() if v is not None else "")
records.append(nr) records.append(row_norm)
# Attempt to recover hrefs by matching anchor text -> href # Attempt to recover hrefs by matching anchor text -> href
try: try:
@@ -265,11 +265,11 @@ def extract_records(doc_or_html: Any, base_url: Optional[str] = None, xpaths: Op
# Normalize keys (map platform->system etc) # Normalize keys (map platform->system etc)
normed: List[Dict[str, str]] = [] normed: List[Dict[str, str]] = []
for r in records: for r in records:
nr: Dict[str, str] = {} norm_row: Dict[str, str] = {}
for k, v in (r or {}).items(): for k, v in (r or {}).items():
nk = normalize_header(k) nk = normalize_header(k)
nr[nk] = v norm_row[nk] = v
normed.append(nr) normed.append(norm_row)
return normed, chosen return normed, chosen

View File

@@ -24,16 +24,16 @@ def _coerce_value(value: Any) -> str:
if isinstance(value, bool): if isinstance(value, bool):
return "true" if value else "false" return "true" if value else "false"
if isinstance(value, (list, tuple, set)): if isinstance(value, (list, tuple, set)):
parts = [_coerce_value(v) for v in value] parts_list = [_coerce_value(v) for v in value]
cleaned = [part for part in parts if part] cleaned = [part for part in parts_list if part]
return ", ".join(cleaned) return ", ".join(cleaned)
if isinstance(value, dict): if isinstance(value, dict):
parts: List[str] = [] dict_parts: List[str] = []
for subkey, subvalue in value.items(): for subkey, subvalue in value.items():
part = _coerce_value(subvalue) part = _coerce_value(subvalue)
if part: if part:
parts.append(f"{subkey}:{part}") dict_parts.append(f"{subkey}:{part}")
return ", ".join(parts) return ", ".join(dict_parts)
try: try:
return str(value).strip() return str(value).strip()
except Exception: except Exception:

View File

@@ -140,7 +140,7 @@ def debug_inspect(
value=value, value=value,
max_string=100_000, max_string=100_000,
max_length=100_000, max_length=100_000,
) ) # type: ignore[call-arg]
except TypeError: except TypeError:
rich_inspect( rich_inspect(
obj, obj,
@@ -155,7 +155,6 @@ def debug_inspect(
value=value, value=value,
) )
def log(*args, **kwargs) -> None: def log(*args, **kwargs) -> None:
"""Print with automatic file.function prefix. """Print with automatic file.function prefix.

View File

@@ -17,6 +17,14 @@ try: # Optional; used for IMDb lookup without API key
from imdbinfo.services import search_title # type: ignore from imdbinfo.services import search_title # type: ignore
except Exception: # pragma: no cover - optional dependency except Exception: # pragma: no cover - optional dependency
search_title = None # type: ignore[assignment] search_title = None # type: ignore[assignment]
try:
import mutagen
except ImportError:
mutagen = None
try:
import musicbrainzngs
except ImportError:
musicbrainzngs = None
def value_normalize(value: Any) -> str: def value_normalize(value: Any) -> str:
@@ -93,6 +101,52 @@ def _sanitize_url(value: Optional[str]) -> Optional[str]:
return cleaned return cleaned
def sanitize_metadata_value(value: Any) -> str:
if value is None:
return ""
if isinstance(value, (list, tuple)):
value = ", ".join(str(v) for v in value if v)
return str(value).strip().replace("\n", " ").replace("\r", " ")
def unique_preserve_order(items: Iterable[Any]) -> list[Any]:
seen = set()
result = []
for item in items:
if item not in seen:
seen.add(item)
result.append(item)
return result
def fetch_musicbrainz_tags(mbid: str, entity: str = "release") -> Dict[str, Any]:
if not musicbrainzngs:
return {"tag": []}
musicbrainzngs.set_useragent("Medeia-Macina", "0.1")
tags: list[str] = []
try:
if entity == "release":
res = musicbrainzngs.get_release_by_id(mbid, includes=["tags"])
tags_list = res.get("release", {}).get("tag-list", [])
elif entity == "recording":
res = musicbrainzngs.get_recording_by_id(mbid, includes=["tags"])
tags_list = res.get("recording", {}).get("tag-list", [])
elif entity == "artist":
res = musicbrainzngs.get_artist_by_id(mbid, includes=["tags"])
tags_list = res.get("artist", {}).get("tag-list", [])
else:
return {"tag": []}
for t in tags_list:
if isinstance(t, dict) and "name" in t:
tags.append(t["name"])
except Exception as exc:
debug(f"MusicBrainz lookup failed: {exc}")
return {"tag": tags}
def _clean_existing_tags(existing: Any) -> List[str]: def _clean_existing_tags(existing: Any) -> List[str]:
tags: List[str] = [] tags: List[str] = []
seen: Set[str] = set() seen: Set[str] = set()
@@ -601,7 +655,7 @@ def write_tags(
# Write via consolidated function # Write via consolidated function
try: try:
lines = [] lines: List[str] = []
lines.extend(str(tag).strip().lower() for tag in tag_list if str(tag).strip()) lines.extend(str(tag).strip().lower() for tag in tag_list if str(tag).strip())
if lines: if lines:
@@ -2415,11 +2469,6 @@ def scrape_url_metadata(
try: try:
import json as json_module import json as json_module
try:
from SYS.metadata import extract_ytdlp_tags
except ImportError:
extract_ytdlp_tags = None
# Build yt-dlp command with playlist support # Build yt-dlp command with playlist support
# IMPORTANT: Do NOT use --flat-playlist! It strips metadata like artist, album, uploader, genre # IMPORTANT: Do NOT use --flat-playlist! It strips metadata like artist, album, uploader, genre
# Without it, yt-dlp gives us full metadata in an 'entries' array within a single JSON object # Without it, yt-dlp gives us full metadata in an 'entries' array within a single JSON object
@@ -2462,12 +2511,11 @@ def scrape_url_metadata(
# is_playlist = 'entries' in data and isinstance(data.get('entries'), list) # is_playlist = 'entries' in data and isinstance(data.get('entries'), list)
# Extract tags and playlist items # Extract tags and playlist items
tags = [] tags: List[str] = []
playlist_items = [] playlist_items: List[Dict[str, Any]] = []
# IMPORTANT: Extract album/playlist-level tags FIRST (before processing entries) # IMPORTANT: Extract album/playlist-level tags FIRST (before processing entries)
# This ensures we get metadata about the collection, not just individual tracks # This ensures we get metadata about the collection, not just individual tracks
if extract_ytdlp_tags:
album_tags = extract_ytdlp_tags(data) album_tags = extract_ytdlp_tags(data)
tags.extend(album_tags) tags.extend(album_tags)
@@ -2493,7 +2541,6 @@ def scrape_url_metadata(
# Extract tags from each entry and merge (but don't duplicate album-level tags) # Extract tags from each entry and merge (but don't duplicate album-level tags)
# Only merge entry tags that are multi-value prefixes (not single-value like title:, artist:, etc.) # Only merge entry tags that are multi-value prefixes (not single-value like title:, artist:, etc.)
if extract_ytdlp_tags:
entry_tags = extract_ytdlp_tags(entry) entry_tags = extract_ytdlp_tags(entry)
# Single-value namespaces that should not be duplicated from entries # Single-value namespaces that should not be duplicated from entries
@@ -2586,7 +2633,7 @@ def scrape_url_metadata(
pass # Silently ignore if we can't get playlist entries pass # Silently ignore if we can't get playlist entries
# Fallback: if still no tags detected, get from first item # Fallback: if still no tags detected, get from first item
if not tags and extract_ytdlp_tags: if not tags:
tags = extract_ytdlp_tags(data) tags = extract_ytdlp_tags(data)
# Extract formats from the main data object # Extract formats from the main data object
@@ -2595,11 +2642,7 @@ def scrape_url_metadata(
formats = extract_url_formats(data.get("formats", [])) formats = extract_url_formats(data.get("formats", []))
# Deduplicate tags by namespace to prevent duplicate title:, artist:, etc. # Deduplicate tags by namespace to prevent duplicate title:, artist:, etc.
try:
if dedup_tags_by_namespace:
tags = dedup_tags_by_namespace(tags, keep_first=True) tags = dedup_tags_by_namespace(tags, keep_first=True)
except Exception:
pass # If dedup fails, return tags as-is
return title, tags, formats, playlist_items return title, tags, formats, playlist_items
@@ -2617,8 +2660,8 @@ def extract_url_formats(formats: list) -> List[Tuple[str, str]]:
Returns list of (display_label, format_id) tuples. Returns list of (display_label, format_id) tuples.
""" """
try: try:
video_formats = {} # {resolution: format_data} video_formats: Dict[str, Dict[str, Any]] = {} # {resolution: format_data}
audio_formats = {} # {quality_label: format_data} audio_formats: Dict[str, Dict[str, Any]] = {} # {quality_label: format_data}
for fmt in formats: for fmt in formats:
vcodec = fmt.get("vcodec", "none") vcodec = fmt.get("vcodec", "none")
@@ -2655,7 +2698,7 @@ def extract_url_formats(formats: list) -> List[Tuple[str, str]]:
"abr": abr, "abr": abr,
} }
result = [] result: List[Tuple[str, str]] = []
# Add video formats in descending resolution order # Add video formats in descending resolution order
for res in sorted(video_formats.keys(), for res in sorted(video_formats.keys(),
@@ -2674,3 +2717,237 @@ def extract_url_formats(formats: list) -> List[Tuple[str, str]]:
except Exception as e: except Exception as e:
log(f"Error extracting formats: {e}", file=sys.stderr) log(f"Error extracting formats: {e}", file=sys.stderr)
return [] return []
def prepare_ffmpeg_metadata(payload: Optional[dict[str, Any]]) -> dict[str, str]:
if not isinstance(payload, dict):
return {}
metadata: dict[str, str] = {}
def set_field(key: str, raw: Any, limit: int = 2000) -> None:
sanitized = sanitize_metadata_value(raw)
if not sanitized:
return
if len(sanitized) > limit:
sanitized = sanitized[:limit]
metadata[key] = sanitized
set_field("title", payload.get("title"))
set_field("artist", payload.get("artist"), 512)
set_field("album", payload.get("album"), 512)
set_field("date", payload.get("year") or payload.get("date"), 20)
comment = payload.get("comment")
tags_value = payload.get("tags")
tag_strings: list[str] = []
artists_from_tags: list[str] = []
albums_from_tags: list[str] = []
genres_from_tags: list[str] = []
if isinstance(tags_value, list):
for raw_tag in tags_value:
if raw_tag is None:
continue
if not isinstance(raw_tag, str):
raw_tag = str(raw_tag)
tag = raw_tag.strip()
if not tag:
continue
tag_strings.append(tag)
namespace, sep, value = tag.partition(":")
if sep and value:
ns = namespace.strip().lower()
value = value.strip()
if ns in {"artist", "creator", "author", "performer"}:
artists_from_tags.append(value)
elif ns in {"album", "series", "collection", "group"}:
albums_from_tags.append(value)
elif ns in {"genre", "rating"}:
genres_from_tags.append(value)
elif ns in {"comment", "description"} and not comment:
comment = value
elif ns in {"year", "date"} and not (payload.get("year") or payload.get("date")):
set_field("date", value, 20)
else:
genres_from_tags.append(tag)
if "artist" not in metadata and artists_from_tags:
set_field("artist", ", ".join(unique_preserve_order(artists_from_tags)[:3]), 512)
if "album" not in metadata and albums_from_tags:
set_field("album", unique_preserve_order(albums_from_tags)[0], 512)
if genres_from_tags:
set_field("genre", ", ".join(unique_preserve_order(genres_from_tags)[:5]), 256)
if tag_strings:
joined_tags = ", ".join(tag_strings[:50])
set_field("keywords", joined_tags, 2000)
if not comment:
comment = joined_tags
if comment:
set_field("comment", str(comment), 2000)
set_field("description", str(comment), 2000)
return metadata
def apply_mutagen_metadata(path: Path, metadata: dict[str, str], fmt: str) -> None:
if fmt != "audio":
return
if not metadata:
return
if mutagen is None:
return
try:
audio = mutagen.File(path, easy=True) # type: ignore[attr-defined]
except Exception as exc: # pragma: no cover - best effort only
log(f"mutagen load failed: {exc}", file=sys.stderr)
return
if audio is None:
return
field_map = {
"title": "title",
"artist": "artist",
"album": "album",
"genre": "genre",
"comment": "comment",
"description": "comment",
"date": "date",
}
changed = False
for source_key, target_key in field_map.items():
value = metadata.get(source_key)
if not value:
continue
try:
audio[target_key] = [value]
changed = True
except Exception: # pragma: no cover - best effort only
continue
if not changed:
return
try:
audio.save()
except Exception as exc: # pragma: no cover - best effort only
log(f"mutagen save failed: {exc}", file=sys.stderr)
def build_ffmpeg_command(
ffmpeg_path: str,
input_path: Path,
output_path: Path,
fmt: str,
max_width: int,
metadata: Optional[dict[str, str]] = None,
) -> list[str]:
cmd = [ffmpeg_path, "-y", "-i", str(input_path)]
if fmt in {"mp4", "webm"} and max_width and max_width > 0:
cmd.extend(["-vf", f"scale='min({max_width},iw)':-2"])
if metadata:
for key, value in metadata.items():
cmd.extend(["-metadata", f"{key}={value}"])
# Video formats
if fmt == "mp4":
cmd.extend([
"-c:v",
"libx265",
"-preset",
"medium",
"-crf",
"26",
"-tag:v",
"hvc1",
"-pix_fmt",
"yuv420p",
"-c:a",
"aac",
"-b:a",
"192k",
"-movflags",
"+faststart",
])
elif fmt == "webm":
cmd.extend([
"-c:v",
"libvpx-vp9",
"-b:v",
"0",
"-crf",
"32",
"-c:a",
"libopus",
"-b:a",
"160k",
])
cmd.extend(["-f", "webm"])
# Audio formats
elif fmt == "mp3":
cmd.extend([
"-vn",
"-c:a",
"libmp3lame",
"-b:a",
"192k",
])
cmd.extend(["-f", "mp3"])
elif fmt == "flac":
cmd.extend([
"-vn",
"-c:a",
"flac",
])
cmd.extend(["-f", "flac"])
elif fmt == "wav":
cmd.extend([
"-vn",
"-c:a",
"pcm_s16le",
])
cmd.extend(["-f", "wav"])
elif fmt == "aac":
cmd.extend([
"-vn",
"-c:a",
"aac",
"-b:a",
"192k",
])
cmd.extend(["-f", "adts"])
elif fmt == "m4a":
cmd.extend([
"-vn",
"-c:a",
"aac",
"-b:a",
"192k",
])
cmd.extend(["-f", "ipod"])
elif fmt == "ogg":
cmd.extend([
"-vn",
"-c:a",
"libvorbis",
"-b:a",
"192k",
])
cmd.extend(["-f", "ogg"])
elif fmt == "opus":
cmd.extend([
"-vn",
"-c:a",
"libopus",
"-b:a",
"192k",
])
cmd.extend(["-f", "opus"])
elif fmt == "audio":
# Legacy format name for mp3
cmd.extend([
"-vn",
"-c:a",
"libmp3lame",
"-b:a",
"192k",
])
cmd.extend(["-f", "mp3"])
elif fmt != "copy":
raise ValueError(f"Unsupported format: {fmt}")
cmd.append(str(output_path))
return cmd

View File

@@ -633,7 +633,13 @@ class ProgressFileReader:
min_interval_s: float = 0.25, min_interval_s: float = 0.25,
): ):
self._f = fileobj self._f = fileobj
self._total = int(total_bytes) if total_bytes not in (None, 0, "") else 0 if total_bytes is None:
self._total = 0
else:
try:
self._total = int(total_bytes)
except Exception:
self._total = 0
self._label = str(label or "upload") self._label = str(label or "upload")
self._min_interval_s = max(0.05, float(min_interval_s)) self._min_interval_s = max(0.05, float(min_interval_s))
self._bar = ProgressBar() self._bar = ProgressBar()

View File

@@ -7,7 +7,7 @@ import sys
from contextlib import contextmanager from contextlib import contextmanager
from dataclasses import dataclass, field from dataclasses import dataclass, field
from contextvars import ContextVar from contextvars import ContextVar
from typing import Any, Dict, List, Optional, Sequence from typing import Any, Dict, List, Optional, Sequence, Callable
from SYS.models import PipelineStageContext from SYS.models import PipelineStageContext
from SYS.logger import log, debug, is_debug_enabled from SYS.logger import log, debug, is_debug_enabled
from SYS.worker import WorkerManagerRegistry, WorkerStages from SYS.worker import WorkerManagerRegistry, WorkerStages
@@ -15,6 +15,9 @@ from SYS.cli_parsing import SelectionSyntax, SelectionFilterSyntax
from SYS.rich_display import stdout_console from SYS.rich_display import stdout_console
from SYS.background_notifier import ensure_background_notifier from SYS.background_notifier import ensure_background_notifier
from SYS.result_table import Table from SYS.result_table import Table
import re
from datetime import datetime
from SYS.cmdlet_catalog import import_cmd_module
HELP_EXAMPLE_SOURCE_COMMANDS = { HELP_EXAMPLE_SOURCE_COMMANDS = {
".help-example", ".help-example",
@@ -946,8 +949,9 @@ def get_last_result_table_source_command() -> Optional[str]:
Command name (e.g., 'download-file') or None if not set Command name (e.g., 'download-file') or None if not set
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.last_result_table) and hasattr(state.last_result_table, "source_command"): table = state.last_result_table
return state.last_result_table.source_command if table is not None and _is_selectable_table(table) and hasattr(table, "source_command"):
return getattr(table, "source_command")
return None return None
@@ -958,8 +962,9 @@ def get_last_result_table_source_args() -> List[str]:
List of arguments (e.g., ['https://example.com']) or empty list List of arguments (e.g., ['https://example.com']) or empty list
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.last_result_table) and hasattr(state.last_result_table, "source_args"): table = state.last_result_table
return state.last_result_table.source_args or [] if table is not None and _is_selectable_table(table) and hasattr(table, "source_args"):
return getattr(table, "source_args") or []
return [] return []
@@ -973,22 +978,26 @@ def get_last_result_table_row_selection_args(row_index: int) -> Optional[List[st
Selection arguments (e.g., ['-item', '3']) or None Selection arguments (e.g., ['-item', '3']) or None
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.last_result_table) and hasattr(state.last_result_table, "rows"): table = state.last_result_table
if 0 <= row_index < len(state.last_result_table.rows): if table is not None and _is_selectable_table(table) and hasattr(table, "rows"):
row = state.last_result_table.rows[row_index] rows = table.rows
if 0 <= row_index < len(rows):
row = rows[row_index]
if hasattr(row, "selection_args"): if hasattr(row, "selection_args"):
return row.selection_args return getattr(row, "selection_args")
return None return None
def get_last_result_table_row_selection_action(row_index: int) -> Optional[List[str]]: def get_last_result_table_row_selection_action(row_index: int) -> Optional[List[str]]:
"""Get the expanded stage tokens for a row in the last result table.""" """Get the expanded stage tokens for a row in the last result table."""
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.last_result_table) and hasattr(state.last_result_table, "rows"): table = state.last_result_table
if 0 <= row_index < len(state.last_result_table.rows): if table is not None and _is_selectable_table(table) and hasattr(table, "rows"):
row = state.last_result_table.rows[row_index] rows = table.rows
if 0 <= row_index < len(rows):
row = rows[row_index]
if hasattr(row, "selection_action"): if hasattr(row, "selection_action"):
return row.selection_action return getattr(row, "selection_action")
return None return None
def set_current_stage_table(result_table: Optional[Any]) -> None: def set_current_stage_table(result_table: Optional[Any]) -> None:
@@ -1019,8 +1028,9 @@ def get_current_stage_table_source_command() -> Optional[str]:
Command name (e.g., 'download-file') or None Command name (e.g., 'download-file') or None
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.current_stage_table) and hasattr(state.current_stage_table, "source_command"): table = state.current_stage_table
return state.current_stage_table.source_command if table is not None and _is_selectable_table(table) and hasattr(table, "source_command"):
return getattr(table, "source_command")
return None return None
@@ -1031,8 +1041,9 @@ def get_current_stage_table_source_args() -> List[str]:
List of arguments or empty list List of arguments or empty list
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.current_stage_table) and hasattr(state.current_stage_table, "source_args"): table = state.current_stage_table
return state.current_stage_table.source_args or [] if table is not None and _is_selectable_table(table) and hasattr(table, "source_args"):
return getattr(table, "source_args") or []
return [] return []
@@ -1046,22 +1057,26 @@ def get_current_stage_table_row_selection_args(row_index: int) -> Optional[List[
Selection arguments or None Selection arguments or None
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.current_stage_table) and hasattr(state.current_stage_table, "rows"): table = state.current_stage_table
if 0 <= row_index < len(state.current_stage_table.rows): if table is not None and _is_selectable_table(table) and hasattr(table, "rows"):
row = state.current_stage_table.rows[row_index] rows = table.rows
if 0 <= row_index < len(rows):
row = rows[row_index]
if hasattr(row, "selection_args"): if hasattr(row, "selection_args"):
return row.selection_args return getattr(row, "selection_args")
return None return None
def get_current_stage_table_row_selection_action(row_index: int) -> Optional[List[str]]: def get_current_stage_table_row_selection_action(row_index: int) -> Optional[List[str]]:
"""Get the expanded stage tokens for a row in the current stage table.""" """Get the expanded stage tokens for a row in the current stage table."""
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.current_stage_table) and hasattr(state.current_stage_table, "rows"): table = state.current_stage_table
if 0 <= row_index < len(state.current_stage_table.rows): if table is not None and _is_selectable_table(table) and hasattr(table, "rows"):
row = state.current_stage_table.rows[row_index] rows = table.rows
if 0 <= row_index < len(rows):
row = rows[row_index]
if hasattr(row, "selection_action"): if hasattr(row, "selection_action"):
return row.selection_action return getattr(row, "selection_action")
return None return None
@@ -1072,9 +1087,11 @@ def get_current_stage_table_row_source_index(row_index: int) -> Optional[int]:
back to the original item order (e.g., playlist or provider order). back to the original item order (e.g., playlist or provider order).
""" """
state = _get_pipeline_state() state = _get_pipeline_state()
if _is_selectable_table(state.current_stage_table) and hasattr(state.current_stage_table, "rows"): table = state.current_stage_table
if 0 <= row_index < len(state.current_stage_table.rows): if table is not None and _is_selectable_table(table) and hasattr(table, "rows"):
row = state.current_stage_table.rows[row_index] rows = table.rows
if 0 <= row_index < len(rows):
row = rows[row_index]
return getattr(row, "source_index", None) return getattr(row, "source_index", None)
return None return None

View File

@@ -33,12 +33,15 @@ except ImportError:
TEXTUAL_AVAILABLE = False TEXTUAL_AVAILABLE = False
# Import ResultModel from the API for unification # Import ResultModel from the API for typing; avoid runtime redefinition issues
try: from typing import TYPE_CHECKING
from SYS.result_table_api import ResultModel if TYPE_CHECKING:
except ImportError: from SYS.result_table_api import ResultModel # type: ignore
# Fallback if not available yet in directory structure (unlikely) else:
ResultModel = None ResultModel = None # type: ignore[assignment]
# Reuse the existing format_bytes helper under a clearer alias
from SYS.utils import format_bytes as format_mb
def _sanitize_cell_text(value: Any) -> str: def _sanitize_cell_text(value: Any) -> str:
@@ -158,6 +161,8 @@ def extract_hash_value(item: Any) -> str:
def extract_title_value(item: Any) -> str: def extract_title_value(item: Any) -> str:
data = _as_dict(item) or {} data = _as_dict(item) or {}
if not isinstance(data, dict):
data = {}
title = _get_first_dict_value(data, ["title", "name", "filename"]) title = _get_first_dict_value(data, ["title", "name", "filename"])
if not title: if not title:
title = _get_first_dict_value( title = _get_first_dict_value(
@@ -171,9 +176,11 @@ def extract_title_value(item: Any) -> str:
def extract_ext_value(item: Any) -> str: def extract_ext_value(item: Any) -> str:
data = _as_dict(item) or {} data = _as_dict(item) or {}
if not isinstance(data, dict):
data = {}
meta = data.get("metadata") if isinstance(data.get("metadata"), _md = data.get("metadata")
dict) else {} meta: Dict[str, Any] = _md if isinstance(_md, dict) else {}
raw_path = data.get("path") or data.get("target") or data.get( raw_path = data.get("path") or data.get("target") or data.get(
"filename" "filename"
) or data.get("title") ) or data.get("title")
@@ -206,8 +213,10 @@ def extract_ext_value(item: Any) -> str:
def extract_size_bytes_value(item: Any) -> Optional[int]: def extract_size_bytes_value(item: Any) -> Optional[int]:
data = _as_dict(item) or {} data = _as_dict(item) or {}
meta = data.get("metadata") if isinstance(data.get("metadata"), if not isinstance(data, dict):
dict) else {} data = {}
_md = data.get("metadata")
meta: Dict[str, Any] = _md if isinstance(_md, dict) else {}
size_val = _get_first_dict_value( size_val = _get_first_dict_value(
data, data,
@@ -749,7 +758,7 @@ class Table:
row.payload = result row.payload = result
# Handle ResultModel from the new strict API (SYS/result_table_api.py) # Handle ResultModel from the new strict API (SYS/result_table_api.py)
if ResultModel and isinstance(result, ResultModel): if ResultModel is not None and isinstance(result, ResultModel):
self._add_result_model(row, result) self._add_result_model(row, result)
# Handle TagItem from get_tag.py (tag display with index) # Handle TagItem from get_tag.py (tag display with index)
elif hasattr(result, "__class__") and result.__class__.__name__ == "TagItem": elif hasattr(result, "__class__") and result.__class__.__name__ == "TagItem":
@@ -1573,7 +1582,7 @@ class Table:
return None return None
# Remaining parts are cmdlet arguments # Remaining parts are cmdlet arguments
cmdlet_args = {} cmdlet_args: dict[str, Any] = {}
i = 1 i = 1
while i < len(parts): while i < len(parts):
part = parts[i] part = parts[i]
@@ -1906,7 +1915,7 @@ def extract_item_metadata(item: Any) -> Dict[str, Any]:
out = {} out = {}
# Handle ResultModel specifically for better detail display # Handle ResultModel specifically for better detail display
if ResultModel and isinstance(item, ResultModel): if ResultModel is not None and isinstance(item, ResultModel):
if item.title: out["Title"] = item.title if item.title: out["Title"] = item.title
if item.path: out["Path"] = item.path if item.path: out["Path"] = item.path
if item.ext: out["Ext"] = item.ext if item.ext: out["Ext"] = item.ext
@@ -1964,11 +1973,12 @@ def extract_item_metadata(item: Any) -> Dict[str, Any]:
if e: out["Ext"] = e if e: out["Ext"] = e
size = extract_size_bytes_value(item) size = extract_size_bytes_value(item)
if size: if size is not None:
out["Size"] = size out["Size"] = format_mb(size)
else: else:
s = data.get("size") or data.get("size_bytes") s = data.get("size") or data.get("size_bytes")
if s: out["Size"] = s if s is not None:
out["Size"] = str(s)
# Duration # Duration
dur = _get_first_dict_value(data, ["duration_seconds", "duration"]) dur = _get_first_dict_value(data, ["duration_seconds", "duration"])
@@ -1977,21 +1987,16 @@ def extract_item_metadata(item: Any) -> Dict[str, Any]:
# URL # URL
url = _get_first_dict_value(data, ["url", "URL"]) url = _get_first_dict_value(data, ["url", "URL"])
if url: out["Url"] = str(url) if url else ""
out["Url"] = url
else:
out["Url"] = None # Explicitly None for <null> display
# Relationships # Relationships
rels = _get_first_dict_value(data, ["relationships", "rel"]) rels = _get_first_dict_value(data, ["relationships", "rel"])
if rels: out["Relations"] = str(rels) if rels else ""
out["Relations"] = rels
else:
out["Relations"] = None
# Tags Summary # Tags Summary
tags = _get_first_dict_value(data, ["tags", "tag"]) tags = _get_first_dict_value(data, ["tags", "tag"])
if tags: out["Tags"] = tags if tags:
out["Tags"] = ", ".join([str(t) for t in (tags if isinstance(tags, (list, tuple)) else [tags])])
return out return out

View File

@@ -11,7 +11,7 @@ from __future__ import annotations
import contextlib import contextlib
import sys import sys
from typing import Any, Iterator, TextIO from typing import Any, Iterator, TextIO, List, Dict, Optional, Tuple, cast
from rich.console import Console from rich.console import Console
from rich.panel import Panel from rich.panel import Panel
@@ -200,8 +200,8 @@ def render_image_to_console(image_path: str | Path, max_width: int | None = None
if not path.exists() or not path.is_file(): if not path.exists() or not path.is_file():
return return
with Image.open(path) as img: with Image.open(path) as opened_img:
img = img.convert("RGB") img = opened_img.convert("RGB")
orig_w, orig_h = img.size orig_w, orig_h = img.size
# Determine target dimensions # Determine target dimensions
@@ -235,14 +235,21 @@ def render_image_to_console(image_path: str | Path, max_width: int | None = None
img = img.resize((target_w, target_h), Image.Resampling.BILINEAR) img = img.resize((target_w, target_h), Image.Resampling.BILINEAR)
pixels = img.load() pixels = img.load()
if pixels is None:
return
# Render using upper half block (U+2580) # Render using upper half block (U+2580)
# Each character row in terminal represents 2 pixel rows in image. # Each character row in terminal represents 2 pixel rows in image.
for y in range(0, target_h - 1, 2): for y in range(0, target_h - 1, 2):
line = Text() line = Text()
for x in range(target_w): for x in range(target_w):
r1, g1, b1 = pixels[x, y] rgb1 = cast(tuple, pixels[x, y])
r2, g2, b2 = pixels[x, y + 1] rgb2 = cast(tuple, pixels[x, y + 1])
try:
r1, g1, b1 = int(rgb1[0]), int(rgb1[1]), int(rgb1[2])
r2, g2, b2 = int(rgb2[0]), int(rgb2[1]), int(rgb2[2])
except Exception:
r1 = g1 = b1 = r2 = g2 = b2 = 0
# Foreground is top pixel, background is bottom pixel # Foreground is top pixel, background is bottom pixel
line.append( line.append(
"", "",

View File

@@ -21,7 +21,7 @@ from dataclasses import dataclass, field
from fnmatch import fnmatch from fnmatch import fnmatch
from urllib.parse import urlparse from urllib.parse import urlparse
import SYS.utils_constant from SYS.utils_constant import mime_maps
try: try:
import cbor2 import cbor2
@@ -140,7 +140,7 @@ def create_metadata_sidecar(file_path: Path, metadata: dict) -> None:
metadata["hash"] = sha256_file(file_path) metadata["hash"] = sha256_file(file_path)
metadata["size"] = Path(file_path).stat().st_size metadata["size"] = Path(file_path).stat().st_size
format_found = False format_found = False
for mime_type, ext_map in SYS.utils_constant.mime_maps.items(): for mime_type, ext_map in mime_maps.items():
for key, info in ext_map.items(): for key, info in ext_map.items():
if info.get("ext") == file_ext: if info.get("ext") == file_ext:
metadata["type"] = mime_type metadata["type"] = mime_type
@@ -516,7 +516,7 @@ def get_api_key(config: dict[str, Any], service: str, key_path: str) -> str | No
""" """
try: try:
parts = key_path.split(".") parts = key_path.split(".")
value = config value: Any = config
for part in parts: for part in parts:
if isinstance(value, dict): if isinstance(value, dict):
value = value.get(part) value = value.get(part)

View File

@@ -1,4 +1,6 @@
mime_maps = { from typing import Any, Dict
mime_maps: Dict[str, Dict[str, Dict[str, Any]]] = {
"image": { "image": {
"jpg": { "jpg": {
"ext": ".jpg", "ext": ".jpg",

View File

@@ -5,7 +5,7 @@ import io
import sys import sys
import uuid import uuid
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional, Set, TextIO from typing import Any, Dict, Optional, Set, TextIO, Sequence
from SYS.config import get_local_storage_path from SYS.config import get_local_storage_path
from SYS.worker_manager import WorkerManager from SYS.worker_manager import WorkerManager
@@ -189,9 +189,7 @@ class WorkerManagerRegistry:
manager.expire_running_workers( manager.expire_running_workers(
older_than_seconds=120, older_than_seconds=120,
worker_id_prefix="cli_%", worker_id_prefix="cli_%",
reason=( reason="CLI session ended unexpectedly; marking worker as failed",
"CLI session ended unexpectedly; marking worker as failed",
),
) )
except Exception: except Exception:
pass pass

View File

@@ -47,8 +47,8 @@ class Worker:
self.details = "" self.details = ""
self.error_message = "" self.error_message = ""
self.result = "pending" self.result = "pending"
self._stdout_buffer = [] self._stdout_buffer: list[str] = []
self._steps_buffer = [] self._steps_buffer: list[str] = []
def log_step(self, step_text: str) -> None: def log_step(self, step_text: str) -> None:
"""Log a step for this worker. """Log a step for this worker.
@@ -108,18 +108,26 @@ class Worker:
logger.error(f"Error getting steps for worker {self.id}: {e}") logger.error(f"Error getting steps for worker {self.id}: {e}")
return "" return ""
def update_progress(self, progress: str = "", details: str = "") -> None: def update_progress(self, progress: float | str = 0.0, details: str = "") -> None:
"""Update worker progress. """Update worker progress.
Args: Args:
progress: Progress string (e.g., "50%") progress: Progress value (float) or textual like "50%"; will be coerced to float
details: Additional details details: Additional details
""" """
self.progress = progress self.progress = str(progress)
self.details = details self.details = details
try: try:
if self.manager: if self.manager:
self.manager.update_worker(self.id, progress, details) # Normalize to a float value for the manager API (0-100)
try:
if isinstance(progress, str) and progress.endswith('%'):
progress_value = float(progress.rstrip('%'))
else:
progress_value = float(progress)
except Exception:
progress_value = 0.0
self.manager.update_worker(self.id, progress_value, details)
except Exception as e: except Exception as e:
logger.error(f"Error updating worker {self.id}: {e}") logger.error(f"Error updating worker {self.id}: {e}")
@@ -165,7 +173,7 @@ class WorkerLoggingHandler(logging.StreamHandler):
self.db = db self.db = db
self.manager = manager self.manager = manager
self.buffer_size = buffer_size self.buffer_size = buffer_size
self.buffer = [] self.buffer: list[str] = []
self._lock = Lock() self._lock = Lock()
# Set a format that includes timestamp and level # Set a format that includes timestamp and level
@@ -278,14 +286,6 @@ class WorkerManager:
self._stdout_flush_bytes = 4096 self._stdout_flush_bytes = 4096
self._stdout_flush_interval = 0.75 self._stdout_flush_interval = 0.75
def close(self) -> None:
"""Close the database connection."""
if self.db:
try:
with self._db_lock:
self.db.close()
except Exception:
pass
def __enter__(self): def __enter__(self):
"""Context manager entry.""" """Context manager entry."""
@@ -478,7 +478,7 @@ class WorkerManager:
True if update was successful True if update was successful
""" """
try: try:
kwargs = {} kwargs: dict[str, Any] = {}
if progress > 0: if progress > 0:
kwargs["progress"] = progress kwargs["progress"] = progress
if current_step: if current_step:

View File

@@ -10,6 +10,7 @@ from typing import Any, Dict, List, Optional, Tuple
from SYS.logger import debug, log from SYS.logger import debug, log
from SYS.utils import sha256_file, expand_path from SYS.utils import sha256_file, expand_path
from SYS.config import get_local_storage_path
from Store._base import Store from Store._base import Store
@@ -56,7 +57,7 @@ class Folder(Store):
"""""" """"""
# Track which locations have already been migrated to avoid repeated migrations # Track which locations have already been migrated to avoid repeated migrations
_migrated_locations = set() _migrated_locations: set[str] = set()
# Cache scan results to avoid repeated full scans across repeated instantiations # Cache scan results to avoid repeated full scans across repeated instantiations
_scan_cache: Dict[str, _scan_cache: Dict[str,
Tuple[bool, Tuple[bool,
@@ -65,7 +66,7 @@ class Folder(Store):
int]]] = {} int]]] = {}
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "NAME", "key": "NAME",
@@ -1498,11 +1499,12 @@ class Folder(Store):
debug(f"Failed to get file for hash {file_hash}: {exc}") debug(f"Failed to get file for hash {file_hash}: {exc}")
return None return None
def get_metadata(self, file_hash: str) -> Optional[Dict[str, Any]]: def get_metadata(self, file_hash: str, **kwargs: Any) -> Optional[Dict[str, Any]]:
"""Get metadata for a file from the database by hash. """Get metadata for a file from the database by hash.
Args: Args:
file_hash: SHA256 hash of the file (64-char hex string) file_hash: SHA256 hash of the file (64-char hex string)
**kwargs: Additional options
Returns: Returns:
Dict with metadata fields (ext, size, hash, duration, etc.) or None if not found Dict with metadata fields (ext, size, hash, duration, etc.) or None if not found
@@ -1613,7 +1615,7 @@ class Folder(Store):
debug(f"get_tags failed for local file: {exc}") debug(f"get_tags failed for local file: {exc}")
return [], "unknown" return [], "unknown"
def add_tag(self, hash: str, tag: List[str], **kwargs: Any) -> bool: def add_tag(self, file_identifier: str, tags: List[str], **kwargs: Any) -> bool:
"""Add tags to a local file by hash (via API_folder_store). """Add tags to a local file by hash (via API_folder_store).
Handles namespace collapsing: when adding namespace:value, removes existing namespace:* tags. Handles namespace collapsing: when adding namespace:value, removes existing namespace:* tags.
@@ -1628,14 +1630,14 @@ class Folder(Store):
try: try:
with API_folder_store(Path(self._location)) as db: with API_folder_store(Path(self._location)) as db:
existing_tags = [ existing_tags = [
t for t in (db.get_tags(hash) or []) t for t in (db.get_tags(file_identifier) or [])
if isinstance(t, str) and t.strip() if isinstance(t, str) and t.strip()
] ]
from SYS.metadata import compute_namespaced_tag_overwrite from SYS.metadata import compute_namespaced_tag_overwrite
_to_remove, _to_add, merged = compute_namespaced_tag_overwrite( _to_remove, _to_add, merged = compute_namespaced_tag_overwrite(
existing_tags, tag or [] existing_tags, tags or []
) )
if not _to_remove and not _to_add: if not _to_remove and not _to_add:
return True return True
@@ -1644,7 +1646,7 @@ class Folder(Store):
# To enforce lowercase-only tags and namespace overwrites, rewrite the full tag set. # To enforce lowercase-only tags and namespace overwrites, rewrite the full tag set.
cursor = db.connection.cursor() cursor = db.connection.cursor()
cursor.execute("DELETE FROM tag WHERE hash = ?", cursor.execute("DELETE FROM tag WHERE hash = ?",
(hash, (file_identifier,
)) ))
for t in merged: for t in merged:
t = str(t).strip().lower() t = str(t).strip().lower()

View File

@@ -30,7 +30,7 @@ class HydrusNetwork(Store):
""" """
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{ {
"key": "NAME", "key": "NAME",
@@ -723,6 +723,10 @@ class HydrusNetwork(Store):
if text: if text:
pattern_hints.append(text) pattern_hints.append(text)
pattern_hint = pattern_hints[0] if pattern_hints else "" pattern_hint = pattern_hints[0] if pattern_hints else ""
hashes: list[str] = []
file_ids: list[int] = []
if ":" in query_lower and not query_lower.startswith(":"): if ":" in query_lower and not query_lower.startswith(":"):
namespace, pattern = query_lower.split(":", 1) namespace, pattern = query_lower.split(":", 1)
namespace = namespace.strip().lower() namespace = namespace.strip().lower()
@@ -765,8 +769,8 @@ class HydrusNetwork(Store):
response = client._perform_request( response = client._perform_request(
spec spec
) # type: ignore[attr-defined] ) # type: ignore[attr-defined]
hashes: list[str] = [] hashes = []
file_ids: list[int] = [] file_ids = []
if isinstance(response, dict): if isinstance(response, dict):
raw_hashes = response.get("hashes") or response.get( raw_hashes = response.get("hashes") or response.get(
"file_hashes" "file_hashes"
@@ -870,11 +874,11 @@ class HydrusNetwork(Store):
freeform_predicates = [f"{query_lower}*"] freeform_predicates = [f"{query_lower}*"]
# Search files with the tags (unless url: search already produced metadata) # Search files with the tags (unless url: search already produced metadata)
results = [] results: list[dict[str, Any]] = []
if metadata_list is None: if metadata_list is None:
file_ids: list[int] = [] file_ids = []
hashes: list[str] = [] hashes = []
if freeform_union_search: if freeform_union_search:
if not title_predicates and not freeform_predicates: if not title_predicates and not freeform_predicates:
@@ -929,7 +933,7 @@ class HydrusNetwork(Store):
# Fast path: ext-only search. Avoid fetching metadata for an unbounded # Fast path: ext-only search. Avoid fetching metadata for an unbounded
# system:everything result set; fetch in chunks until we have enough. # system:everything result set; fetch in chunks until we have enough.
if ext_only and ext_filter: if ext_only and ext_filter:
results: list[dict[str, Any]] = [] results = []
if not file_ids and not hashes: if not file_ids and not hashes:
debug(f"{prefix} 0 result(s)") debug(f"{prefix} 0 result(s)")
return [] return []
@@ -1930,7 +1934,7 @@ class HydrusNetwork(Store):
try: try:
if service_key: if service_key:
# Mutate tags for many hashes in a single request # Mutate tags for many hashes in a single request
client.mutate_tags_by_key(hashes=hashes, service_key=service_key, add_tags=list(tag_tuple)) client.mutate_tags_by_key(hash=hashes, service_key=service_key, add_tags=list(tag_tuple))
any_success = True any_success = True
continue continue
except Exception as exc: except Exception as exc:

View File

@@ -30,7 +30,7 @@ from Store._base import Store
class ZeroTier(Store): class ZeroTier(Store):
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
return [ return [
{"key": "NAME", "label": "Store Name", "default": "", "required": True}, {"key": "NAME", "label": "Store Name", "default": "", "required": True},
{"key": "NETWORK_ID", "label": "ZeroTier Network ID", "default": "", "required": True}, {"key": "NETWORK_ID", "label": "ZeroTier Network ID", "default": "", "required": True},

View File

@@ -13,7 +13,7 @@ from typing import Any, Dict, List, Optional, Tuple
class Store(ABC): class Store(ABC):
@classmethod @classmethod
def config(cls) -> List[Dict[str, Any]]: def config_schema(cls) -> List[Dict[str, Any]]:
"""Return configuration schema for this store. """Return configuration schema for this store.
Returns a list of dicts: Returns a list of dicts:

View File

@@ -91,10 +91,10 @@ def _discover_store_classes() -> Dict[str, Type[BaseStore]]:
def _required_keys_for(store_cls: Type[BaseStore]) -> list[str]: def _required_keys_for(store_cls: Type[BaseStore]) -> list[str]:
# Support new config() schema # Support new config_schema() schema
if hasattr(store_cls, "config") and callable(store_cls.config): if hasattr(store_cls, "config_schema") and callable(store_cls.config_schema):
try: try:
schema = store_cls.config() schema = store_cls.config_schema()
keys = [] keys = []
if isinstance(schema, list): if isinstance(schema, list):
for field in schema: for field in schema:

View File

@@ -380,7 +380,7 @@ class ConfigModal(ModalScreen):
if stype in classes: if stype in classes:
cls = classes[stype] cls = classes[stype]
if hasattr(cls, "config") and callable(cls.config): if hasattr(cls, "config") and callable(cls.config):
for field_def in cls.config(): for field_def in cls.config_schema():
k = field_def.get("key") k = field_def.get("key")
if k: if k:
provider_schema_map[k.upper()] = field_def provider_schema_map[k.upper()] = field_def
@@ -395,7 +395,7 @@ class ConfigModal(ModalScreen):
try: try:
pcls = get_provider_class(item_name) pcls = get_provider_class(item_name)
if pcls and hasattr(pcls, "config") and callable(pcls.config): if pcls and hasattr(pcls, "config") and callable(pcls.config):
for field_def in pcls.config(): for field_def in pcls.config_schema():
k = field_def.get("key") k = field_def.get("key")
if k: if k:
provider_schema_map[k.upper()] = field_def provider_schema_map[k.upper()] = field_def
@@ -667,7 +667,7 @@ class ConfigModal(ModalScreen):
for stype, cls in all_classes.items(): for stype, cls in all_classes.items():
if hasattr(cls, "config") and callable(cls.config): if hasattr(cls, "config") and callable(cls.config):
try: try:
if cls.config(): if cls.config_schema():
options.append(stype) options.append(stype)
except Exception: except Exception:
pass pass
@@ -680,7 +680,7 @@ class ConfigModal(ModalScreen):
pcls = get_provider_class(ptype) pcls = get_provider_class(ptype)
if pcls and hasattr(pcls, "config") and callable(pcls.config): if pcls and hasattr(pcls, "config") and callable(pcls.config):
try: try:
if pcls.config(): if pcls.config_schema():
options.append(ptype) options.append(ptype)
except Exception: except Exception:
pass pass
@@ -856,7 +856,7 @@ class ConfigModal(ModalScreen):
cls = classes[stype] cls = classes[stype]
# Use schema for defaults if present # Use schema for defaults if present
if hasattr(cls, "config") and callable(cls.config): if hasattr(cls, "config") and callable(cls.config):
for field_def in cls.config(): for field_def in cls.config_schema():
key = field_def.get("key") key = field_def.get("key")
if key: if key:
val = field_def.get("default", "") val = field_def.get("default", "")
@@ -890,7 +890,7 @@ class ConfigModal(ModalScreen):
if pcls: if pcls:
# Use schema for defaults # Use schema for defaults
if hasattr(pcls, "config") and callable(pcls.config): if hasattr(pcls, "config") and callable(pcls.config):
for field_def in pcls.config(): for field_def in pcls.config_schema():
key = field_def.get("key") key = field_def.get("key")
if key: if key:
new_config[key] = field_def.get("default", "") new_config[key] = field_def.get("default", "")
@@ -988,7 +988,7 @@ class ConfigModal(ModalScreen):
if pcls: if pcls:
# Collect required keys from schema # Collect required keys from schema
if hasattr(pcls, "config") and callable(pcls.config): if hasattr(pcls, "config") and callable(pcls.config):
for field_def in pcls.config(): for field_def in pcls.config_schema():
if field_def.get("required"): if field_def.get("required"):
k = field_def.get("key") k = field_def.get("key")
if k and k not in required_keys: if k and k not in required_keys:

View File

@@ -153,6 +153,9 @@ class SearchModal(ModalScreen):
return return
source = self.source_select.value source = self.source_select.value
if not source or not isinstance(source, str):
logger.warning("[search-modal] No source selected")
return
# Clear existing results # Clear existing results
self.results_table.clear(columns=True) self.results_table.clear(columns=True)

View File

@@ -4,15 +4,13 @@ from typing import Any, Dict, List, Sequence, Tuple
import sys import sys
from SYS import pipeline as ctx from SYS import pipeline as ctx
from . import _shared as sh from ._shared import (
Cmdlet,
Cmdlet, CmdletArg, SharedArgs, parse_cmdlet_args, get_field, normalize_hash = ( CmdletArg,
sh.Cmdlet, SharedArgs,
sh.CmdletArg, parse_cmdlet_args,
sh.SharedArgs, get_field,
sh.parse_cmdlet_args, normalize_hash,
sh.get_field,
sh.normalize_hash,
) )
from SYS.logger import log from SYS.logger import log
from Store import Store from Store import Store

View File

@@ -8,14 +8,12 @@ import sys
import re import re
from fnmatch import fnmatch from fnmatch import fnmatch
from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse
from . import _shared as sh from ._shared import (
Cmdlet,
Cmdlet, SharedArgs, parse_cmdlet_args, get_field, normalize_hash = ( SharedArgs,
sh.Cmdlet, parse_cmdlet_args,
sh.SharedArgs, get_field,
sh.parse_cmdlet_args, normalize_hash,
sh.get_field,
sh.normalize_hash,
) )
from SYS.logger import log from SYS.logger import log
from SYS.result_table import Table from SYS.result_table import Table

View File

@@ -18,9 +18,7 @@ from SYS.rich_display import (
show_available_providers_panel, show_available_providers_panel,
) )
from . import _shared as sh from ._shared import (
(
Cmdlet, Cmdlet,
CmdletArg, CmdletArg,
SharedArgs, SharedArgs,
@@ -29,15 +27,6 @@ from . import _shared as sh
normalize_hash, normalize_hash,
first_title_tag, first_title_tag,
parse_hash_query, parse_hash_query,
) = (
sh.Cmdlet,
sh.CmdletArg,
sh.SharedArgs,
sh.get_field,
sh.should_show_help,
sh.normalize_hash,
sh.first_title_tag,
sh.parse_hash_query,
) )
from SYS import pipeline as ctx from SYS import pipeline as ctx

View File

@@ -16,7 +16,7 @@ def _register_cmdlet_object(cmdlet_obj, registry: Dict[str, CmdletFn]) -> None:
registry[cmdlet_obj.name.replace("_", "-").lower()] = run_fn registry[cmdlet_obj.name.replace("_", "-").lower()] = run_fn
# Cmdlet uses 'alias' (List[str]). Some older objects may use 'aliases'. # Cmdlet uses 'alias' (List[str]). Some older objects may use 'aliases'.
aliases = [] aliases: list[str] = []
if hasattr(cmdlet_obj, "alias") and getattr(cmdlet_obj, "alias"): if hasattr(cmdlet_obj, "alias") and getattr(cmdlet_obj, "alias"):
aliases.extend(getattr(cmdlet_obj, "alias") or []) aliases.extend(getattr(cmdlet_obj, "alias") or [])
if hasattr(cmdlet_obj, "aliases") and getattr(cmdlet_obj, "aliases"): if hasattr(cmdlet_obj, "aliases") and getattr(cmdlet_obj, "aliases"):

View File

@@ -55,11 +55,13 @@ from __future__ import annotations
import argparse import argparse
import os import os
import platform import platform
import re
from pathlib import Path from pathlib import Path
import shutil import shutil
import subprocess import subprocess
import sys import sys
import time import time
from typing import Optional
def run(cmd: list[str], quiet: bool = False, debug: bool = False, cwd: Optional[Path] = None) -> None: def run(cmd: list[str], quiet: bool = False, debug: bool = False, cwd: Optional[Path] = None) -> None:
@@ -1088,7 +1090,7 @@ def main() -> int:
# 7. CLI Verification # 7. CLI Verification
pb.update("Verifying CLI configuration...") pb.update("Verifying CLI configuration...")
try: try:
rc = subprocess.run( cli_verify_result = subprocess.run(
[ [
str(venv_python), str(venv_python),
"-c", "-c",
@@ -1098,7 +1100,7 @@ def main() -> int:
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
check=False, check=False,
) )
if rc.returncode != 0: if cli_verify_result.returncode != 0:
cmd = [ cmd = [
str(venv_python), str(venv_python),
"-c", "-c",
@@ -1335,7 +1337,7 @@ if (Test-Path (Join-Path $repo 'CLI.py')) {
else: else:
# POSIX # POSIX
# If running as root (id 0), prefer /usr/bin or /usr/local/bin which are standard on PATH # If running as root (id 0), prefer /usr/bin or /usr/local/bin which are standard on PATH
if os.getuid() == 0: if hasattr(os, "getuid") and os.getuid() == 0:
user_bin = Path("/usr/local/bin") user_bin = Path("/usr/local/bin")
if not os.access(user_bin, os.W_OK): if not os.access(user_bin, os.W_OK):
user_bin = Path("/usr/bin") user_bin = Path("/usr/bin")

View File

@@ -369,7 +369,11 @@ def is_elevated() -> bool:
return False return False
else: else:
try: try:
return os.geteuid() == 0 # Use getattr for platform-specific os methods to satisfy Mypy
geteuid = getattr(os, "geteuid", None)
if geteuid:
return bool(geteuid() == 0)
return False
except Exception: except Exception:
return False return False
except Exception: except Exception:
@@ -476,9 +480,9 @@ def fix_permissions_unix(
user = getpass.getuser() user = getpass.getuser()
try: try:
pw = pwd.getpwnam(user) pw = pwd.getpwnam(user) # type: ignore[attr-defined]
uid = pw.pw_uid uid = pw.pw_uid
gid = pw.pw_gid if not group else grp.getgrnam(group).gr_gid gid = pw.pw_gid if not group else grp.getgrnam(group).gr_gid # type: ignore[attr-defined]
except Exception: except Exception:
logging.warning("Could not resolve user/group to uid/gid; skipping chown.") logging.warning("Could not resolve user/group to uid/gid; skipping chown.")
return False return False
@@ -500,12 +504,14 @@ def fix_permissions_unix(
except Exception: except Exception:
# Best-effort fallback: chown/chmod individual entries # Best-effort fallback: chown/chmod individual entries
for root_dir, dirs, files in os.walk(path): for root_dir, dirs, files in os.walk(path):
if hasattr(os, "chown"):
try: try:
os.chown(root_dir, uid, gid) os.chown(root_dir, uid, gid)
except Exception: except Exception:
pass pass
for fn in files: for fn in files:
fpath = os.path.join(root_dir, fn) fpath = os.path.join(root_dir, fn)
if hasattr(os, "chown"):
try: try:
os.chown(fpath, uid, gid) os.chown(fpath, uid, gid)
except Exception: except Exception:
@@ -1454,11 +1460,12 @@ def main(argv: Optional[list[str]] = None) -> int:
if p.exists(): if p.exists():
client_found = p client_found = p
break break
run_client_script = None
if client_found: if client_found:
# Prefer run_client helper located in the cloned repo; if missing, fall back to top-level scripts folder helper. # Prefer run_client helper located in the cloned repo; if missing, fall back to top-level scripts folder helper.
script_dir = Path(__file__).resolve().parent script_dir = Path(__file__).resolve().parent
helper_candidates = [dest / "run_client.py", script_dir / "run_client.py"] helper_candidates = [dest / "run_client.py", script_dir / "run_client.py"]
run_client_script = None
for cand in helper_candidates: for cand in helper_candidates:
if cand.exists(): if cand.exists():
run_client_script = cand run_client_script = cand
@@ -1477,7 +1484,7 @@ def main(argv: Optional[list[str]] = None) -> int:
) )
else: else:
if getattr(args, "install_service", False): if getattr(args, "install_service", False):
if run_client_script.exists(): if run_client_script and run_client_script.exists():
cmd = [ cmd = [
str(venv_py), str(venv_py),
str(run_client_script), str(run_client_script),
@@ -1513,7 +1520,7 @@ def main(argv: Optional[list[str]] = None) -> int:
dest / "run_client.py", dest / "run_client.py",
) )
if getattr(args, "uninstall_service", False): if getattr(args, "uninstall_service", False):
if run_client_script.exists(): if run_client_script and run_client_script.exists():
cmd = [ cmd = [
str(venv_py), str(venv_py),
str(run_client_script), str(run_client_script),

View File

@@ -27,6 +27,7 @@ from SYS.models import (
) )
from SYS.pipeline_progress import PipelineProgress from SYS.pipeline_progress import PipelineProgress
from SYS.utils import ensure_directory, sha256_file from SYS.utils import ensure_directory, sha256_file
from SYS.metadata import extract_ytdlp_tags
_YTDLP_TRANSFER_STATE: Dict[str, Dict[str, Any]] = {} _YTDLP_TRANSFER_STATE: Dict[str, Dict[str, Any]] = {}
@@ -37,7 +38,7 @@ try:
except Exception as exc: # pragma: no cover - handled at runtime except Exception as exc: # pragma: no cover - handled at runtime
yt_dlp = None # type: ignore yt_dlp = None # type: ignore
gen_extractors = None # type: ignore gen_extractors = None # type: ignore
YTDLP_IMPORT_ERROR = exc YTDLP_IMPORT_ERROR: Optional[Exception] = exc
else: else:
YTDLP_IMPORT_ERROR = None YTDLP_IMPORT_ERROR = None
@@ -739,16 +740,16 @@ class YtDlpTool:
# Progress + utility helpers for yt-dlp driven downloads (previously in cmdlet/download_media). # Progress + utility helpers for yt-dlp driven downloads (previously in cmdlet/download_media).
_YTDLP_PROGRESS_BAR = ProgressBar() _YTDLP_PROGRESS_BAR = ProgressBar()
_YTDLP_TRANSFER_STATE: Dict[str, Dict[str, Any]] = {}
_YTDLP_PROGRESS_ACTIVITY_LOCK = threading.Lock()
_YTDLP_PROGRESS_LAST_ACTIVITY = 0.0
_YTDLP_PROGRESS_ACTIVITY_LOCK = threading.Lock() _YTDLP_PROGRESS_ACTIVITY_LOCK = threading.Lock()
_YTDLP_PROGRESS_LAST_ACTIVITY = 0.0 _YTDLP_PROGRESS_LAST_ACTIVITY = 0.0
_SUBTITLE_EXTS = (".vtt", ".srt", ".ass", ".ssa", ".lrc") _SUBTITLE_EXTS = (".vtt", ".srt", ".ass", ".ssa", ".lrc")
def _progress_label(status: Dict[str, Any]) -> str: def _progress_label(status: Optional[Dict[str, Any]]) -> str:
info_dict = status.get("info_dict") if isinstance(status.get("info_dict"), dict) else {} if not status:
return "unknown"
raw_info = status.get("info_dict")
info_dict = raw_info if isinstance(raw_info, dict) else {}
candidates = [ candidates = [
status.get("filename"), status.get("filename"),
@@ -1244,7 +1245,7 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
debug( debug(
f"Skipping probe for playlist (item selection: {opts.playlist_items}), proceeding with download" f"Skipping probe for playlist (item selection: {opts.playlist_items}), proceeding with download"
) )
probe_result = {"url": opts.url} probe_result: Optional[Dict[str, Any]] = {"url": opts.url}
else: else:
probe_cookiefile = None probe_cookiefile = None
try: try:
@@ -1286,7 +1287,7 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
debug(f"[yt-dlp] force_keyframes_at_cuts: {ytdl_options.get('force_keyframes_at_cuts', False)}") debug(f"[yt-dlp] force_keyframes_at_cuts: {ytdl_options.get('force_keyframes_at_cuts', False)}")
session_id = None session_id = None
first_section_info = {} first_section_info: Dict[str, Any] = {}
if ytdl_options.get("download_sections"): if ytdl_options.get("download_sections"):
live_ui, _ = PipelineProgress(pipeline_context).ui_and_pipe_index() live_ui, _ = PipelineProgress(pipeline_context).ui_and_pipe_index()
quiet_sections = bool(opts.quiet) or (live_ui is not None) quiet_sections = bool(opts.quiet) or (live_ui is not None)
@@ -1447,20 +1448,20 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
raise DownloadError(str(exc)) from exc raise DownloadError(str(exc)) from exc
file_hash = sha256_file(media_path) file_hash = sha256_file(media_path)
tags = [] section_tags: List[str] = []
title = "" title = ""
if first_section_info: if first_section_info:
title = first_section_info.get("title", "") title = first_section_info.get("title", "")
if title: if title:
tags.append(f"title:{title}") section_tags.append(f"title:{title}")
debug(f"Added title tag for section download: {title}") debug(f"Added title tag for section download: {title}")
if first_section_info: if first_section_info:
info_dict = first_section_info info_dict_sec = first_section_info
else: else:
info_dict = {"id": media_path.stem, "title": title or media_path.stem, "ext": media_path.suffix.lstrip(".")} info_dict_sec = {"id": media_path.stem, "title": title or media_path.stem, "ext": media_path.suffix.lstrip(".")}
return DownloadMediaResult(path=media_path, info=info_dict, tag=tags, source_url=opts.url, hash_value=file_hash, paths=media_paths) return DownloadMediaResult(path=media_path, info=info_dict_sec, tag=section_tags, source_url=opts.url, hash_value=file_hash, paths=media_paths)
if not isinstance(info, dict): if not isinstance(info, dict):
log(f"Unexpected yt-dlp response: {type(info)}", file=sys.stderr) log(f"Unexpected yt-dlp response: {type(info)}", file=sys.stderr)
@@ -1483,7 +1484,7 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
hash_value = None hash_value = None
tags: List[str] = [] tags: List[str] = []
if extract_ytdlp_tags: if extract_ytdlp_tags is not None:
try: try:
tags = extract_ytdlp_tags(entry) tags = extract_ytdlp_tags(entry)
except Exception as exc: except Exception as exc:
@@ -1524,10 +1525,10 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
if debug_logger is not None: if debug_logger is not None:
debug_logger.write_record("hash-error", {"path": str(media_path), "error": str(exc)}) debug_logger.write_record("hash-error", {"path": str(media_path), "error": str(exc)})
tags = [] tags_res: List[str] = []
if extract_ytdlp_tags: if extract_ytdlp_tags is not None:
try: try:
tags = extract_ytdlp_tags(entry) tags_res = extract_ytdlp_tags(entry)
except Exception as exc: except Exception as exc:
log(f"Error extracting tags: {exc}", file=sys.stderr) log(f"Error extracting tags: {exc}", file=sys.stderr)
@@ -1546,7 +1547,7 @@ def download_media(opts: DownloadOptions, *, debug_logger: Optional[DebugLogger]
}, },
) )
return DownloadMediaResult(path=media_path, info=entry, tag=tags, source_url=source_url, hash_value=hash_value) return DownloadMediaResult(path=media_path, info=entry, tag=tags_res, source_url=source_url, hash_value=hash_value)
def _download_with_timeout(opts: DownloadOptions, timeout_seconds: int = 300) -> Any: def _download_with_timeout(opts: DownloadOptions, timeout_seconds: int = 300) -> Any: