fdf
This commit is contained in:
@@ -38,6 +38,9 @@ from API.httpx_shared import get_shared_httpx_client
|
||||
|
||||
# Default configuration
|
||||
DEFAULT_TIMEOUT = 30.0
|
||||
_CONTENT_DISPOSITION_FILENAME_RE = re.compile(
|
||||
r'filename\*?=(?:"([^"]*)"|([^;\s]*))'
|
||||
)
|
||||
DEFAULT_RETRIES = 3
|
||||
DEFAULT_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
||||
|
||||
@@ -661,7 +664,7 @@ def download_direct_file(
|
||||
content_type = ""
|
||||
|
||||
if content_disposition:
|
||||
match = re.search(r'filename\*?=(?:"([^"]*)"|([^;\s]*))', content_disposition)
|
||||
match = _CONTENT_DISPOSITION_FILENAME_RE.search(content_disposition)
|
||||
if match:
|
||||
extracted_name = match.group(1) or match.group(2)
|
||||
if extracted_name:
|
||||
|
||||
107
CLI.py
107
CLI.py
@@ -31,7 +31,6 @@ if not os.environ.get("MM_DEBUG"):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
import httpx
|
||||
import json
|
||||
import shlex
|
||||
import sys
|
||||
@@ -58,6 +57,16 @@ from SYS.rich_display import (
|
||||
stderr_console,
|
||||
stdout_console,
|
||||
)
|
||||
from cmdnat._status_shared import (
|
||||
add_startup_check as _shared_add_startup_check,
|
||||
default_provider_ping_targets as _default_provider_ping_targets,
|
||||
has_provider as _has_provider,
|
||||
has_store_subtype as _has_store_subtype,
|
||||
has_tool as _has_tool,
|
||||
ping_first as _ping_first,
|
||||
ping_url as _ping_url,
|
||||
provider_display_name as _provider_display_name,
|
||||
)
|
||||
|
||||
|
||||
def _install_rich_traceback(*, show_locals: bool = False) -> None:
|
||||
@@ -1858,10 +1867,6 @@ Come to love it when others take what you share, as there is no greater joy
|
||||
startup_table._interactive(True)._perseverance(True)
|
||||
startup_table.set_value_case("upper")
|
||||
|
||||
def _upper(value: Any) -> str:
|
||||
text = "" if value is None else str(value)
|
||||
return text.upper()
|
||||
|
||||
def _add_startup_check(
|
||||
status: str,
|
||||
name: str,
|
||||
@@ -1871,50 +1876,15 @@ Come to love it when others take what you share, as there is no greater joy
|
||||
files: int | str | None = None,
|
||||
detail: str = "",
|
||||
) -> None:
|
||||
row = startup_table.add_row()
|
||||
row.add_column("STATUS", _upper(status))
|
||||
row.add_column("NAME", _upper(name))
|
||||
row.add_column("PROVIDER", _upper(provider or ""))
|
||||
row.add_column("STORE", _upper(store or ""))
|
||||
row.add_column("FILES", "" if files is None else str(files))
|
||||
row.add_column("DETAIL", _upper(detail or ""))
|
||||
|
||||
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
|
||||
store_cfg = cfg.get("store")
|
||||
if not isinstance(store_cfg, dict):
|
||||
return False
|
||||
bucket = store_cfg.get(subtype)
|
||||
if not isinstance(bucket, dict):
|
||||
return False
|
||||
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
|
||||
|
||||
def _has_provider(cfg: dict, name: str) -> bool:
|
||||
provider_cfg = cfg.get("provider")
|
||||
if not isinstance(provider_cfg, dict):
|
||||
return False
|
||||
block = provider_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
def _has_tool(cfg: dict, name: str) -> bool:
|
||||
tool_cfg = cfg.get("tool")
|
||||
if not isinstance(tool_cfg, dict):
|
||||
return False
|
||||
block = tool_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
def _ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
|
||||
try:
|
||||
from API.HTTP import HTTPClient
|
||||
|
||||
with HTTPClient(timeout=timeout, retries=1) as client:
|
||||
resp = client.get(url, allow_redirects=True)
|
||||
code = int(getattr(resp, "status_code", 0) or 0)
|
||||
ok = 200 <= code < 500
|
||||
return ok, f"{url} (HTTP {code})"
|
||||
except httpx.TimeoutException:
|
||||
return False, f"{url} (timeout)"
|
||||
except Exception as exc:
|
||||
return False, f"{url} ({type(exc).__name__})"
|
||||
_shared_add_startup_check(
|
||||
startup_table,
|
||||
status,
|
||||
name,
|
||||
provider=provider,
|
||||
store=store,
|
||||
files=files,
|
||||
detail=detail,
|
||||
)
|
||||
|
||||
config = self._config_loader.load()
|
||||
debug_enabled = bool(config.get("debug", False))
|
||||
@@ -2015,47 +1985,8 @@ Come to love it when others take what you share, as there is no greater joy
|
||||
file_availability = list_file_providers(config) or {}
|
||||
meta_availability = list_metadata_providers(config) or {}
|
||||
|
||||
def _provider_display_name(key: str) -> str:
|
||||
k = (key or "").strip()
|
||||
low = k.lower()
|
||||
if low == "openlibrary":
|
||||
return "OpenLibrary"
|
||||
if low == "alldebrid":
|
||||
return "AllDebrid"
|
||||
if low == "youtube":
|
||||
return "YouTube"
|
||||
return k[:1].upper() + k[1:] if k else "Provider"
|
||||
|
||||
already_checked = {"matrix"}
|
||||
|
||||
def _default_provider_ping_targets(provider_key: str) -> list[str]:
|
||||
prov = (provider_key or "").strip().lower()
|
||||
if prov == "openlibrary":
|
||||
return ["https://openlibrary.org"]
|
||||
if prov == "youtube":
|
||||
return ["https://www.youtube.com"]
|
||||
if prov == "bandcamp":
|
||||
return ["https://bandcamp.com"]
|
||||
if prov == "libgen":
|
||||
from Provider.libgen import MIRRORS
|
||||
|
||||
mirrors = [
|
||||
str(x).rstrip("/") for x in (MIRRORS or [])
|
||||
if str(x).strip()
|
||||
]
|
||||
return [m + "/json.php" for m in mirrors]
|
||||
return []
|
||||
|
||||
def _ping_first(urls: list[str]) -> tuple[bool, str]:
|
||||
for u in urls:
|
||||
ok, detail = _ping_url(u)
|
||||
if ok:
|
||||
return True, detail
|
||||
if urls:
|
||||
ok, detail = _ping_url(urls[0])
|
||||
return ok, detail
|
||||
return False, "No ping target"
|
||||
|
||||
for provider_name in provider_cfg.keys():
|
||||
prov = str(provider_name or "").strip().lower()
|
||||
if not prov or prov in already_checked:
|
||||
|
||||
@@ -15,6 +15,7 @@ from API.HTTP import HTTPClient, _download_direct_file
|
||||
from API.alldebrid import AllDebridClient, parse_magnet_or_hash, is_torrent_file
|
||||
from ProviderCore.base import Provider, SearchResult
|
||||
from SYS.provider_helpers import TableProviderMixin
|
||||
from SYS.item_accessors import get_field as _extract_value
|
||||
from SYS.utils import sanitize_filename
|
||||
from SYS.logger import log, debug
|
||||
from SYS.models import DownloadError, PipeObject
|
||||
@@ -339,25 +340,6 @@ def _looks_like_torrent_source(candidate: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _extract_value(source: Any, field: str) -> Any:
|
||||
if source is None:
|
||||
return None
|
||||
if isinstance(source, dict):
|
||||
if field in source:
|
||||
return source.get(field)
|
||||
else:
|
||||
try:
|
||||
value = getattr(source, field)
|
||||
except Exception:
|
||||
value = None
|
||||
if value is not None:
|
||||
return value
|
||||
extra = getattr(source, "extra", None)
|
||||
if isinstance(extra, dict) and field in extra:
|
||||
return extra.get(field)
|
||||
return None
|
||||
|
||||
|
||||
def _dispatch_alldebrid_magnet_search(
|
||||
magnet_id: int,
|
||||
config: Dict[str, Any],
|
||||
|
||||
@@ -11,7 +11,7 @@ import sys
|
||||
import tempfile
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
|
||||
from SYS.logger import log
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -29,6 +29,7 @@ _CONFIG_CACHE: Dict[str, Any] = {}
|
||||
_LAST_SAVED_CONFIG: Dict[str, Any] = {}
|
||||
_CONFIG_SAVE_MAX_RETRIES = 5
|
||||
_CONFIG_SAVE_RETRY_DELAY = 0.15
|
||||
_CONFIG_MISSING = object()
|
||||
|
||||
|
||||
class ConfigSaveConflict(Exception):
|
||||
@@ -61,6 +62,94 @@ def clear_config_cache() -> None:
|
||||
_LAST_SAVED_CONFIG = {}
|
||||
|
||||
|
||||
def get_nested_config_value(config: Dict[str, Any], *path: str) -> Any:
|
||||
cur: Any = config
|
||||
for key in path:
|
||||
if not isinstance(cur, dict):
|
||||
return None
|
||||
cur = cur.get(key)
|
||||
return cur
|
||||
|
||||
|
||||
def coerce_config_value(
|
||||
value: Any,
|
||||
existing_value: Any = _CONFIG_MISSING,
|
||||
*,
|
||||
on_error: Optional[Callable[[str], None]] = None,
|
||||
) -> Any:
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
text = value.strip()
|
||||
lowered = text.lower()
|
||||
|
||||
if existing_value is _CONFIG_MISSING:
|
||||
if lowered in {"true", "false"}:
|
||||
return lowered == "true"
|
||||
if text.isdigit():
|
||||
return int(text)
|
||||
return value
|
||||
|
||||
if isinstance(existing_value, bool):
|
||||
if lowered in {"true", "yes", "1", "on"}:
|
||||
return True
|
||||
if lowered in {"false", "no", "0", "off"}:
|
||||
return False
|
||||
if on_error is not None:
|
||||
on_error(f"Warning: Could not convert '{value}' to boolean. Using string.")
|
||||
return value
|
||||
|
||||
if isinstance(existing_value, int) and not isinstance(existing_value, bool):
|
||||
try:
|
||||
return int(text)
|
||||
except ValueError:
|
||||
if on_error is not None:
|
||||
on_error(f"Warning: Could not convert '{value}' to int. Using string.")
|
||||
return value
|
||||
|
||||
if isinstance(existing_value, float):
|
||||
try:
|
||||
return float(text)
|
||||
except ValueError:
|
||||
if on_error is not None:
|
||||
on_error(f"Warning: Could not convert '{value}' to float. Using string.")
|
||||
return value
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def set_nested_config_value(
|
||||
config: Dict[str, Any],
|
||||
key_path: str | Sequence[str],
|
||||
value: Any,
|
||||
*,
|
||||
on_error: Optional[Callable[[str], None]] = None,
|
||||
) -> bool:
|
||||
if not isinstance(config, dict):
|
||||
return False
|
||||
|
||||
if isinstance(key_path, str):
|
||||
keys = [part for part in key_path.split(".") if part]
|
||||
else:
|
||||
keys = [str(part) for part in (key_path or []) if str(part)]
|
||||
|
||||
if not keys:
|
||||
return False
|
||||
|
||||
current = config
|
||||
for key in keys[:-1]:
|
||||
next_value = current.get(key)
|
||||
if not isinstance(next_value, dict):
|
||||
next_value = {}
|
||||
current[key] = next_value
|
||||
current = next_value
|
||||
|
||||
last_key = keys[-1]
|
||||
existing_value = current[last_key] if last_key in current else _CONFIG_MISSING
|
||||
current[last_key] = coerce_config_value(value, existing_value, on_error=on_error)
|
||||
return True
|
||||
|
||||
|
||||
def get_hydrus_instance(
|
||||
config: Dict[str, Any], instance_name: str = "home"
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
|
||||
104
SYS/detail_view_helpers.py
Normal file
104
SYS/detail_view_helpers.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterable, Optional, Sequence
|
||||
|
||||
|
||||
def _labelize_key(key: str) -> str:
|
||||
return str(key or "").replace("_", " ").title()
|
||||
|
||||
|
||||
def _normalize_tags_value(tags: Any) -> Optional[str]:
|
||||
if tags is None:
|
||||
return None
|
||||
if isinstance(tags, str):
|
||||
text = tags.strip()
|
||||
return text or None
|
||||
if isinstance(tags, Sequence):
|
||||
seen: list[str] = []
|
||||
for tag in tags:
|
||||
text = str(tag or "").strip()
|
||||
if text and text not in seen:
|
||||
seen.append(text)
|
||||
return ", ".join(seen) if seen else None
|
||||
text = str(tags).strip()
|
||||
return text or None
|
||||
|
||||
|
||||
def prepare_detail_metadata(
|
||||
subject: Any,
|
||||
*,
|
||||
include_subject_fields: bool = False,
|
||||
title: Optional[str] = None,
|
||||
hash_value: Optional[str] = None,
|
||||
store: Optional[str] = None,
|
||||
path: Optional[str] = None,
|
||||
tags: Any = None,
|
||||
prefer_existing_tags: bool = True,
|
||||
extra_fields: Optional[dict[str, Any]] = None,
|
||||
) -> dict[str, Any]:
|
||||
from SYS.result_table import extract_item_metadata
|
||||
|
||||
metadata = extract_item_metadata(subject) or {}
|
||||
|
||||
if include_subject_fields and isinstance(subject, dict):
|
||||
for key, value in subject.items():
|
||||
if str(key).startswith("_") or key in {"selection_action", "selection_args"}:
|
||||
continue
|
||||
label = _labelize_key(str(key))
|
||||
if label not in metadata and value is not None:
|
||||
metadata[label] = value
|
||||
|
||||
if title:
|
||||
metadata["Title"] = title
|
||||
if hash_value:
|
||||
metadata["Hash"] = hash_value
|
||||
if store:
|
||||
metadata["Store"] = store
|
||||
if path:
|
||||
metadata["Path"] = path
|
||||
|
||||
tags_text = _normalize_tags_value(tags)
|
||||
if tags_text and (not prefer_existing_tags or not metadata.get("Tags")):
|
||||
metadata["Tags"] = tags_text
|
||||
|
||||
for key, value in (extra_fields or {}).items():
|
||||
if value is not None:
|
||||
metadata[str(key)] = value
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
def create_detail_view(
|
||||
title: str,
|
||||
metadata: dict[str, Any],
|
||||
*,
|
||||
table_name: Optional[str] = None,
|
||||
source_command: Optional[tuple[str, Sequence[str]]] = None,
|
||||
init_command: Optional[tuple[str, Sequence[str]]] = None,
|
||||
max_columns: Optional[int] = None,
|
||||
exclude_tags: bool = False,
|
||||
value_case: Optional[str] = "preserve",
|
||||
perseverance: bool = True,
|
||||
) -> Any:
|
||||
from SYS.result_table import ItemDetailView
|
||||
|
||||
kwargs: dict[str, Any] = {"item_metadata": metadata}
|
||||
if max_columns is not None:
|
||||
kwargs["max_columns"] = max_columns
|
||||
if exclude_tags:
|
||||
kwargs["exclude_tags"] = True
|
||||
|
||||
table = ItemDetailView(title, **kwargs)
|
||||
if table_name:
|
||||
table = table.set_table(table_name)
|
||||
if value_case:
|
||||
table = table.set_value_case(value_case)
|
||||
if perseverance:
|
||||
table = table._perseverance(True)
|
||||
if source_command:
|
||||
name, args = source_command
|
||||
table.set_source_command(name, list(args))
|
||||
if init_command:
|
||||
name, args = init_command
|
||||
table = table.init_command(name, list(args))
|
||||
return table
|
||||
132
SYS/item_accessors.py
Normal file
132
SYS/item_accessors.py
Normal file
@@ -0,0 +1,132 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Any, Iterable, Optional
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
def get_field(obj: Any, field: str, default: Optional[Any] = None) -> Any:
|
||||
if isinstance(obj, list):
|
||||
if not obj:
|
||||
return default
|
||||
obj = obj[0]
|
||||
|
||||
if isinstance(obj, dict):
|
||||
return obj.get(field, default)
|
||||
|
||||
value = getattr(obj, field, None)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
extra_value = getattr(obj, "extra", None)
|
||||
if isinstance(extra_value, dict):
|
||||
return extra_value.get(field, default)
|
||||
|
||||
return default
|
||||
|
||||
|
||||
def first_field(obj: Any, fields: Iterable[str], default: Optional[Any] = None) -> Any:
|
||||
for field in fields:
|
||||
value = get_field(obj, str(field), None)
|
||||
if value is not None:
|
||||
return value
|
||||
return default
|
||||
|
||||
|
||||
def get_text_field(obj: Any, *fields: str, default: str = "") -> str:
|
||||
value = first_field(obj, fields, default=None)
|
||||
if value is None:
|
||||
return default
|
||||
text = str(value).strip()
|
||||
return text if text else default
|
||||
|
||||
|
||||
def get_column_text(obj: Any, *labels: str) -> Optional[str]:
|
||||
columns = get_field(obj, "columns")
|
||||
if not isinstance(columns, list):
|
||||
return None
|
||||
wanted = {str(label or "").strip().lower() for label in labels if str(label or "").strip()}
|
||||
if not wanted:
|
||||
return None
|
||||
for pair in columns:
|
||||
try:
|
||||
if not isinstance(pair, (list, tuple)) or len(pair) != 2:
|
||||
continue
|
||||
key, value = pair
|
||||
if str(key or "").strip().lower() not in wanted:
|
||||
continue
|
||||
text = str(value or "").strip()
|
||||
if text:
|
||||
return text
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def get_int_field(obj: Any, *fields: str) -> Optional[int]:
|
||||
value = first_field(obj, fields, default=None)
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (int, float)):
|
||||
return int(value)
|
||||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_extension_field(obj: Any, *fields: str) -> str:
|
||||
text = get_text_field(obj, *(fields or ("ext", "extension")), default="")
|
||||
return text.lstrip(".") if text else ""
|
||||
|
||||
|
||||
def get_result_title(obj: Any, *fields: str) -> Optional[str]:
|
||||
text = get_text_field(obj, *(fields or ("title", "name", "filename")), default="")
|
||||
if text:
|
||||
return text
|
||||
return get_column_text(obj, "title", "name")
|
||||
|
||||
|
||||
def extract_item_tags(obj: Any) -> list[str]:
|
||||
return get_string_list(obj, "tag")
|
||||
|
||||
|
||||
def get_string_list(obj: Any, field: str) -> list[str]:
|
||||
value = get_field(obj, field)
|
||||
if isinstance(value, list):
|
||||
return [str(item).strip() for item in value if item is not None and str(item).strip()]
|
||||
if isinstance(value, str):
|
||||
text = value.strip()
|
||||
return [text] if text else []
|
||||
return []
|
||||
|
||||
|
||||
def set_field(obj: Any, field: str, value: Any) -> bool:
|
||||
if isinstance(obj, dict):
|
||||
obj[field] = value
|
||||
return True
|
||||
try:
|
||||
setattr(obj, field, value)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def get_sha256_hex(obj: Any, *fields: str) -> Optional[str]:
|
||||
value = get_text_field(obj, *(fields or ("hash",)))
|
||||
if value and _SHA256_RE.fullmatch(value):
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
|
||||
def get_store_name(obj: Any, *fields: str) -> Optional[str]:
|
||||
value = get_text_field(obj, *(fields or ("store",)))
|
||||
return value or None
|
||||
|
||||
|
||||
def get_http_url(obj: Any, *fields: str) -> Optional[str]:
|
||||
value = get_text_field(obj, *(fields or ("url", "target")))
|
||||
if value.lower().startswith(("http://", "https://")):
|
||||
return value
|
||||
return None
|
||||
158
SYS/payload_builders.py
Normal file
158
SYS/payload_builders.py
Normal file
@@ -0,0 +1,158 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
|
||||
def normalize_file_extension(ext_value: Any) -> str:
|
||||
ext = str(ext_value or "").strip().lstrip(".")
|
||||
for sep in (" ", "|", "(", "[", "{", ",", ";"):
|
||||
if sep in ext:
|
||||
ext = ext.split(sep, 1)[0]
|
||||
break
|
||||
if "." in ext:
|
||||
ext = ext.split(".")[-1]
|
||||
ext = "".join(ch for ch in ext if ch.isalnum())
|
||||
return ext[:5]
|
||||
|
||||
|
||||
def extract_title_tag_value(tags: Sequence[str]) -> Optional[str]:
|
||||
for tag in tags:
|
||||
text = str(tag or "").strip()
|
||||
if text.lower().startswith("title:"):
|
||||
value = text.split(":", 1)[1].strip()
|
||||
if value:
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _derive_title(
|
||||
title: Optional[str],
|
||||
fallback_title: Optional[str],
|
||||
path: Optional[str],
|
||||
url: Any,
|
||||
hash_value: Optional[str],
|
||||
) -> str:
|
||||
for candidate in (title, fallback_title):
|
||||
text = str(candidate or "").strip()
|
||||
if text:
|
||||
return text
|
||||
|
||||
path_text = str(path or "").strip()
|
||||
if path_text:
|
||||
try:
|
||||
return Path(path_text).stem or path_text
|
||||
except Exception:
|
||||
return path_text
|
||||
|
||||
if isinstance(url, str):
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
name = Path(unquote(parsed.path)).stem
|
||||
if name:
|
||||
return name
|
||||
except Exception:
|
||||
pass
|
||||
text = url.strip()
|
||||
if text:
|
||||
return text
|
||||
|
||||
if isinstance(url, list):
|
||||
for candidate in url:
|
||||
text = str(candidate or "").strip()
|
||||
if text:
|
||||
return text
|
||||
|
||||
return str(hash_value or "").strip()
|
||||
|
||||
|
||||
def build_file_result_payload(
|
||||
*,
|
||||
title: Optional[str] = None,
|
||||
fallback_title: Optional[str] = None,
|
||||
path: Optional[str] = None,
|
||||
url: Any = None,
|
||||
hash_value: Optional[str] = None,
|
||||
store: Optional[str] = None,
|
||||
tag: Optional[Sequence[str]] = None,
|
||||
ext: Any = None,
|
||||
size_bytes: Optional[int] = None,
|
||||
columns: Optional[Sequence[tuple[str, Any]]] = None,
|
||||
source: Optional[str] = None,
|
||||
table: Optional[str] = None,
|
||||
detail: Optional[str] = None,
|
||||
**extra: Any,
|
||||
) -> Dict[str, Any]:
|
||||
resolved_title = _derive_title(title, fallback_title, path, url, hash_value)
|
||||
resolved_path = str(path).strip() if path is not None and str(path).strip() else None
|
||||
resolved_store = str(store).strip() if store is not None and str(store).strip() else None
|
||||
resolved_ext = normalize_file_extension(ext)
|
||||
if not resolved_ext:
|
||||
for candidate in (resolved_path, resolved_title):
|
||||
text = str(candidate or "").strip()
|
||||
if not text:
|
||||
continue
|
||||
try:
|
||||
resolved_ext = normalize_file_extension(Path(text).suffix)
|
||||
except Exception:
|
||||
resolved_ext = ""
|
||||
if resolved_ext:
|
||||
break
|
||||
|
||||
payload: Dict[str, Any] = {"title": resolved_title}
|
||||
|
||||
if resolved_path is not None:
|
||||
payload["path"] = resolved_path
|
||||
if hash_value:
|
||||
payload["hash"] = str(hash_value)
|
||||
if url not in (None, "", []):
|
||||
payload["url"] = url
|
||||
if resolved_store is not None:
|
||||
payload["store"] = resolved_store
|
||||
if tag is not None:
|
||||
payload["tag"] = list(tag)
|
||||
if resolved_ext:
|
||||
payload["ext"] = resolved_ext
|
||||
if size_bytes is not None:
|
||||
payload["size_bytes"] = size_bytes
|
||||
if columns is not None:
|
||||
payload["columns"] = list(columns)
|
||||
if source:
|
||||
payload["source"] = source
|
||||
if table:
|
||||
payload["table"] = table
|
||||
if detail is not None:
|
||||
payload["detail"] = str(detail)
|
||||
|
||||
payload.update(extra)
|
||||
return payload
|
||||
|
||||
|
||||
def build_table_result_payload(
|
||||
*,
|
||||
columns: Sequence[tuple[str, Any]],
|
||||
title: Optional[str] = None,
|
||||
table: Optional[str] = None,
|
||||
detail: Optional[str] = None,
|
||||
selection_args: Optional[Sequence[Any]] = None,
|
||||
selection_action: Optional[Sequence[Any]] = None,
|
||||
**extra: Any,
|
||||
) -> Dict[str, Any]:
|
||||
payload: Dict[str, Any] = {
|
||||
"columns": [(str(label), value) for label, value in columns],
|
||||
}
|
||||
|
||||
if title is not None:
|
||||
payload["title"] = str(title)
|
||||
if table:
|
||||
payload["table"] = table
|
||||
if detail is not None:
|
||||
payload["detail"] = str(detail)
|
||||
if selection_args:
|
||||
payload["_selection_args"] = [str(arg) for arg in selection_args if arg is not None]
|
||||
if selection_action:
|
||||
payload["_selection_action"] = [str(arg) for arg in selection_action if arg is not None]
|
||||
|
||||
payload.update(extra)
|
||||
return payload
|
||||
60
SYS/result_publication.py
Normal file
60
SYS/result_publication.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterable, Optional
|
||||
|
||||
|
||||
def resolve_publication_subject(
|
||||
items: Iterable[Any] | None,
|
||||
subject: Any = None,
|
||||
) -> Any:
|
||||
if subject is not None:
|
||||
return subject
|
||||
resolved_items = list(items or [])
|
||||
if not resolved_items:
|
||||
return None
|
||||
if len(resolved_items) == 1:
|
||||
return resolved_items[0]
|
||||
return resolved_items
|
||||
|
||||
|
||||
def publish_result_table(
|
||||
pipeline_context: Any,
|
||||
result_table: Any,
|
||||
items: Iterable[Any] | None = None,
|
||||
*,
|
||||
subject: Any = None,
|
||||
overlay: bool = False,
|
||||
) -> None:
|
||||
resolved_items = list(items or [])
|
||||
resolved_subject = resolve_publication_subject(resolved_items, subject)
|
||||
if overlay:
|
||||
pipeline_context.set_last_result_table_overlay(
|
||||
result_table,
|
||||
resolved_items,
|
||||
subject=resolved_subject,
|
||||
)
|
||||
return
|
||||
pipeline_context.set_last_result_table(
|
||||
result_table,
|
||||
resolved_items,
|
||||
subject=resolved_subject,
|
||||
)
|
||||
|
||||
|
||||
def overlay_existing_result_table(
|
||||
pipeline_context: Any,
|
||||
*,
|
||||
subject: Any = None,
|
||||
) -> bool:
|
||||
table = pipeline_context.get_last_result_table()
|
||||
items = list(pipeline_context.get_last_result_items() or [])
|
||||
if table is None or not items:
|
||||
return False
|
||||
publish_result_table(
|
||||
pipeline_context,
|
||||
table,
|
||||
items,
|
||||
subject=subject,
|
||||
overlay=True,
|
||||
)
|
||||
return True
|
||||
10
SYS/result_table_helpers.py
Normal file
10
SYS/result_table_helpers.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterable
|
||||
|
||||
|
||||
def add_row_columns(table: Any, columns: Iterable[tuple[str, Any]]) -> Any:
|
||||
row = table.add_row()
|
||||
for label, value in columns:
|
||||
row.add_column(str(label), "" if value is None else str(value))
|
||||
return row
|
||||
157
SYS/selection_builder.py
Normal file
157
SYS/selection_builder.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Any, Iterable, List, Optional, Sequence, Tuple
|
||||
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
def looks_like_url(value: Any, *, extra_prefixes: Iterable[str] = ()) -> bool:
|
||||
try:
|
||||
text = str(value or "").strip().lower()
|
||||
except Exception:
|
||||
return False
|
||||
if not text:
|
||||
return False
|
||||
prefixes = ("http://", "https://", "magnet:", "torrent:") + tuple(
|
||||
str(prefix).strip().lower() for prefix in extra_prefixes if str(prefix).strip()
|
||||
)
|
||||
return text.startswith(prefixes)
|
||||
|
||||
|
||||
def normalize_selection_args(selection_args: Any) -> Optional[List[str]]:
|
||||
if isinstance(selection_args, (list, tuple)):
|
||||
return [str(arg) for arg in selection_args if arg is not None]
|
||||
if selection_args is not None:
|
||||
return [str(selection_args)]
|
||||
return None
|
||||
|
||||
|
||||
def normalize_hash_for_selection(value: Any) -> str:
|
||||
text = str(value or "").strip()
|
||||
if _SHA256_RE.fullmatch(text):
|
||||
return text.lower()
|
||||
return text
|
||||
|
||||
|
||||
def build_hash_store_selection(
|
||||
hash_value: Any,
|
||||
store_value: Any,
|
||||
*,
|
||||
action_name: str = "get-metadata",
|
||||
) -> Tuple[Optional[List[str]], Optional[List[str]]]:
|
||||
hash_text = normalize_hash_for_selection(hash_value)
|
||||
store_text = str(store_value or "").strip()
|
||||
if not hash_text or not store_text:
|
||||
return None, None
|
||||
args = ["-query", f"hash:{hash_text}", "-store", store_text]
|
||||
return args, [action_name] + list(args)
|
||||
|
||||
|
||||
def build_default_selection(
|
||||
*,
|
||||
path_value: Any,
|
||||
hash_value: Any = None,
|
||||
store_value: Any = None,
|
||||
) -> Tuple[Optional[List[str]], Optional[List[str]]]:
|
||||
path_text = str(path_value or "").strip()
|
||||
hash_args, hash_action = build_hash_store_selection(hash_value, store_value)
|
||||
|
||||
if path_text:
|
||||
if looks_like_url(path_text):
|
||||
if hash_args and "/view_file" in path_text:
|
||||
return hash_args, hash_action
|
||||
args = ["-url", path_text]
|
||||
return args, ["download-file", "-url", path_text]
|
||||
|
||||
if hash_args:
|
||||
return hash_args, hash_action
|
||||
|
||||
try:
|
||||
from SYS.utils import expand_path
|
||||
|
||||
resolved_path = str(expand_path(path_text))
|
||||
except Exception:
|
||||
resolved_path = path_text
|
||||
|
||||
args = ["-path", resolved_path]
|
||||
return args, ["get-file", "-path", resolved_path]
|
||||
|
||||
return hash_args, hash_action
|
||||
|
||||
|
||||
def extract_selection_fields(
|
||||
item: Any,
|
||||
*,
|
||||
extra_url_prefixes: Iterable[str] = (),
|
||||
) -> Tuple[Optional[List[str]], Optional[List[str]], Optional[str]]:
|
||||
selection_args: Any = None
|
||||
selection_action: Any = None
|
||||
item_url: Any = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
selection_args = item.get("_selection_args") or item.get("selection_args")
|
||||
selection_action = item.get("_selection_action") or item.get("selection_action")
|
||||
item_url = item.get("url") or item.get("path") or item.get("target")
|
||||
nested_values = [item.get("metadata"), item.get("full_metadata"), item.get("extra")]
|
||||
else:
|
||||
item_url = getattr(item, "url", None) or getattr(item, "path", None) or getattr(item, "target", None)
|
||||
nested_values = [
|
||||
getattr(item, "metadata", None),
|
||||
getattr(item, "full_metadata", None),
|
||||
getattr(item, "extra", None),
|
||||
]
|
||||
|
||||
for nested in nested_values:
|
||||
if not isinstance(nested, dict):
|
||||
continue
|
||||
selection_args = selection_args or nested.get("_selection_args") or nested.get("selection_args")
|
||||
selection_action = selection_action or nested.get("_selection_action") or nested.get("selection_action")
|
||||
item_url = item_url or nested.get("url") or nested.get("source_url") or nested.get("target")
|
||||
|
||||
normalized_args = normalize_selection_args(selection_args)
|
||||
normalized_action = normalize_selection_args(selection_action)
|
||||
|
||||
if item_url and not looks_like_url(item_url, extra_prefixes=extra_url_prefixes):
|
||||
item_url = None
|
||||
|
||||
return normalized_args, normalized_action, str(item_url) if item_url else None
|
||||
|
||||
|
||||
def selection_args_have_url(
|
||||
args_list: Sequence[str],
|
||||
*,
|
||||
extra_url_prefixes: Iterable[str] = (),
|
||||
) -> bool:
|
||||
for idx, arg in enumerate(args_list):
|
||||
low = str(arg or "").strip().lower()
|
||||
if low in {"-url", "--url"} and idx + 1 < len(args_list):
|
||||
return True
|
||||
if looks_like_url(arg, extra_prefixes=extra_url_prefixes):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def extract_urls_from_selection_args(
|
||||
args_list: Sequence[str],
|
||||
*,
|
||||
extra_url_prefixes: Iterable[str] = (),
|
||||
) -> List[str]:
|
||||
urls: List[str] = []
|
||||
idx = 0
|
||||
while idx < len(args_list):
|
||||
token = str(args_list[idx] or "")
|
||||
low = token.strip().lower()
|
||||
if low in {"-url", "--url"} and idx + 1 < len(args_list):
|
||||
candidate = str(args_list[idx + 1] or "").strip()
|
||||
if looks_like_url(candidate, extra_prefixes=extra_url_prefixes) and candidate not in urls:
|
||||
urls.append(candidate)
|
||||
idx += 2
|
||||
continue
|
||||
if looks_like_url(token, extra_prefixes=extra_url_prefixes):
|
||||
candidate = token.strip()
|
||||
if candidate not in urls:
|
||||
urls.append(candidate)
|
||||
idx += 1
|
||||
return urls
|
||||
@@ -10,7 +10,16 @@ from textual.screen import ModalScreen
|
||||
from textual.widgets import Static, Button, Input, Label, ListView, ListItem, Rule, Select, Checkbox
|
||||
from pathlib import Path
|
||||
|
||||
from SYS.config import load_config, save_config, save_config_and_verify, reload_config, global_config, count_changed_entries, ConfigSaveConflict
|
||||
from SYS.config import (
|
||||
load_config,
|
||||
save_config,
|
||||
save_config_and_verify,
|
||||
reload_config,
|
||||
global_config,
|
||||
count_changed_entries,
|
||||
ConfigSaveConflict,
|
||||
coerce_config_value,
|
||||
)
|
||||
from SYS.database import db
|
||||
from SYS.logger import log, debug
|
||||
from Store.registry import _discover_store_classes, _required_keys_for
|
||||
@@ -1142,15 +1151,7 @@ class ConfigModal(ModalScreen):
|
||||
return
|
||||
|
||||
# Try to preserve boolean/integer types
|
||||
processed_value = raw_value
|
||||
if isinstance(raw_value, str):
|
||||
low = raw_value.lower()
|
||||
if low == "true":
|
||||
processed_value = True
|
||||
elif low == "false":
|
||||
processed_value = False
|
||||
elif raw_value.isdigit():
|
||||
processed_value = int(raw_value)
|
||||
processed_value = coerce_config_value(raw_value, existing_value)
|
||||
|
||||
if widget_id.startswith("global-"):
|
||||
self.config_data[key] = processed_value
|
||||
|
||||
@@ -11,14 +11,18 @@ import sys
|
||||
import tempfile
|
||||
import time
|
||||
from collections.abc import Iterable as IterableABC
|
||||
from functools import lru_cache
|
||||
from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from SYS import models
|
||||
from SYS import pipeline as pipeline_context
|
||||
from SYS.item_accessors import get_field as _item_accessor_get_field
|
||||
from SYS.payload_builders import build_file_result_payload, build_table_result_payload
|
||||
from SYS.result_publication import publish_result_table
|
||||
from SYS.result_table import Table
|
||||
from SYS.rich_display import stderr_console as get_stderr_console
|
||||
from rich.prompt import Confirm
|
||||
@@ -944,6 +948,18 @@ def build_pipeline_preview(raw_urls: Sequence[str], piped_items: Sequence[Any])
|
||||
return preview
|
||||
|
||||
|
||||
@lru_cache(maxsize=4096)
|
||||
def _normalize_hash_cached(hash_hex: str) -> Optional[str]:
|
||||
text = hash_hex.strip().lower()
|
||||
if not text:
|
||||
return None
|
||||
if len(text) != 64:
|
||||
return None
|
||||
if not all(ch in "0123456789abcdef" for ch in text):
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def normalize_hash(hash_hex: Optional[str]) -> Optional[str]:
|
||||
"""Normalize a hash string to lowercase, or return None if invalid.
|
||||
|
||||
@@ -955,14 +971,7 @@ def normalize_hash(hash_hex: Optional[str]) -> Optional[str]:
|
||||
"""
|
||||
if not isinstance(hash_hex, str):
|
||||
return None
|
||||
text = hash_hex.strip().lower()
|
||||
if not text:
|
||||
return None
|
||||
if len(text) != 64:
|
||||
return None
|
||||
if not all(ch in "0123456789abcdef" for ch in text):
|
||||
return None
|
||||
return text
|
||||
return _normalize_hash_cached(hash_hex)
|
||||
|
||||
|
||||
def resolve_hash_for_cmdlet(
|
||||
@@ -1007,6 +1016,270 @@ def resolve_hash_for_cmdlet(
|
||||
return None
|
||||
|
||||
|
||||
def resolve_item_store_hash(
|
||||
item: Any,
|
||||
*,
|
||||
override_store: Optional[str] = None,
|
||||
override_hash: Optional[str] = None,
|
||||
hash_field: str = "hash",
|
||||
store_field: str = "store",
|
||||
path_fields: Sequence[str] = ("path", "target"),
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
"""Resolve store name and normalized hash from a result item."""
|
||||
store_name = str(override_store or get_field(item, store_field) or "").strip()
|
||||
raw_hash = get_field(item, hash_field)
|
||||
|
||||
raw_path = None
|
||||
for field_name in path_fields:
|
||||
candidate = get_field(item, field_name)
|
||||
if candidate:
|
||||
raw_path = candidate
|
||||
break
|
||||
|
||||
resolved_hash = resolve_hash_for_cmdlet(
|
||||
str(raw_hash) if raw_hash else None,
|
||||
str(raw_path) if raw_path else None,
|
||||
str(override_hash) if override_hash else None,
|
||||
)
|
||||
return store_name, resolved_hash
|
||||
|
||||
|
||||
def get_store_backend(
|
||||
config: Optional[Dict[str, Any]],
|
||||
store_name: Optional[str],
|
||||
*,
|
||||
store_registry: Any = None,
|
||||
suppress_debug: bool = False,
|
||||
) -> Tuple[Optional[Any], Any, Optional[Exception]]:
|
||||
"""Resolve a store backend, optionally reusing an existing registry.
|
||||
|
||||
Returns a tuple of ``(backend, store_registry, exc)`` so callers can keep
|
||||
their command-specific error messages while avoiding repeated registry setup
|
||||
and ``store[name]`` boilerplate.
|
||||
"""
|
||||
registry = store_registry
|
||||
if registry is None:
|
||||
try:
|
||||
from Store import Store
|
||||
|
||||
registry = Store(config or {}, suppress_debug=suppress_debug)
|
||||
except Exception as exc:
|
||||
return None, None, exc
|
||||
|
||||
backend_name = str(store_name or "").strip()
|
||||
if not backend_name:
|
||||
return None, registry, KeyError("Missing store name")
|
||||
|
||||
try:
|
||||
return registry[backend_name], registry, None
|
||||
except Exception as exc:
|
||||
return None, registry, exc
|
||||
|
||||
|
||||
def get_preferred_store_backend(
|
||||
config: Optional[Dict[str, Any]],
|
||||
store_name: Optional[str],
|
||||
*,
|
||||
store_registry: Any = None,
|
||||
suppress_debug: bool = True,
|
||||
) -> Tuple[Optional[Any], Any, Optional[Exception]]:
|
||||
"""Prefer a targeted backend instance before falling back to registry lookup."""
|
||||
direct_exc: Optional[Exception] = None
|
||||
try:
|
||||
from Store.registry import get_backend_instance
|
||||
|
||||
backend = get_backend_instance(
|
||||
config or {},
|
||||
str(store_name or ""),
|
||||
suppress_debug=suppress_debug,
|
||||
)
|
||||
if backend is not None:
|
||||
return backend, store_registry, None
|
||||
except Exception as exc:
|
||||
direct_exc = exc
|
||||
|
||||
backend, registry, lookup_exc = get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
store_registry=store_registry,
|
||||
suppress_debug=suppress_debug,
|
||||
)
|
||||
if backend is not None:
|
||||
return backend, registry, None
|
||||
return None, registry, direct_exc or lookup_exc
|
||||
|
||||
|
||||
def coalesce_hash_value_pairs(
|
||||
pairs: Sequence[Tuple[str, Sequence[str]]],
|
||||
) -> List[Tuple[str, List[str]]]:
|
||||
"""Merge duplicate hash/value pairs while preserving first-seen value order."""
|
||||
merged: Dict[str, List[str]] = {}
|
||||
for hash_value, values in pairs:
|
||||
normalized_hash = str(hash_value or "").strip()
|
||||
if not normalized_hash:
|
||||
continue
|
||||
bucket = merged.setdefault(normalized_hash, [])
|
||||
seen = set(bucket)
|
||||
for value in values or []:
|
||||
text = str(value or "").strip()
|
||||
if not text or text in seen:
|
||||
continue
|
||||
seen.add(text)
|
||||
bucket.append(text)
|
||||
return [(hash_value, items) for hash_value, items in merged.items() if items]
|
||||
|
||||
|
||||
def run_store_hash_value_batches(
|
||||
config: Optional[Dict[str, Any]],
|
||||
batch: Dict[str, List[Tuple[str, Sequence[str]]]],
|
||||
*,
|
||||
bulk_method_name: str,
|
||||
single_method_name: str,
|
||||
store_registry: Any = None,
|
||||
suppress_debug: bool = False,
|
||||
pass_config_to_bulk: bool = True,
|
||||
pass_config_to_single: bool = True,
|
||||
) -> Tuple[Any, List[Tuple[str, int, int]]]:
|
||||
"""Dispatch grouped hash/value batches across stores.
|
||||
|
||||
Returns ``(store_registry, stats)`` where ``stats`` contains
|
||||
``(store_name, item_count, value_count)`` for each dispatched store.
|
||||
Missing stores are skipped so callers can preserve existing warning behavior.
|
||||
"""
|
||||
registry = store_registry
|
||||
stats: List[Tuple[str, int, int]] = []
|
||||
for store_name, pairs in batch.items():
|
||||
backend, registry, _exc = get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
store_registry=registry,
|
||||
suppress_debug=suppress_debug,
|
||||
)
|
||||
if backend is None:
|
||||
continue
|
||||
|
||||
bulk_pairs = coalesce_hash_value_pairs(pairs)
|
||||
if not bulk_pairs:
|
||||
continue
|
||||
|
||||
bulk_fn = getattr(backend, bulk_method_name, None)
|
||||
if callable(bulk_fn):
|
||||
if pass_config_to_bulk:
|
||||
bulk_fn(bulk_pairs, config=config)
|
||||
else:
|
||||
bulk_fn(bulk_pairs)
|
||||
else:
|
||||
single_fn = getattr(backend, single_method_name)
|
||||
for hash_value, values in bulk_pairs:
|
||||
if pass_config_to_single:
|
||||
single_fn(hash_value, values, config=config)
|
||||
else:
|
||||
single_fn(hash_value, values)
|
||||
|
||||
stats.append(
|
||||
(
|
||||
store_name,
|
||||
len(bulk_pairs),
|
||||
sum(len(values or []) for _hash_value, values in bulk_pairs),
|
||||
)
|
||||
)
|
||||
|
||||
return registry, stats
|
||||
|
||||
|
||||
def run_store_note_batches(
|
||||
config: Optional[Dict[str, Any]],
|
||||
batch: Dict[str, List[Tuple[str, str, str]]],
|
||||
*,
|
||||
store_registry: Any = None,
|
||||
suppress_debug: bool = False,
|
||||
on_store_error: Optional[Callable[[str, Exception], None]] = None,
|
||||
on_unsupported_store: Optional[Callable[[str], None]] = None,
|
||||
on_item_error: Optional[Callable[[str, str, str, Exception], None]] = None,
|
||||
) -> Tuple[Any, int]:
|
||||
"""Dispatch grouped note writes across stores while preserving item-level errors."""
|
||||
registry = store_registry
|
||||
success_count = 0
|
||||
for store_name, items in batch.items():
|
||||
backend, registry, exc = get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
store_registry=registry,
|
||||
suppress_debug=suppress_debug,
|
||||
)
|
||||
if backend is None:
|
||||
if on_store_error is not None and exc is not None:
|
||||
on_store_error(store_name, exc)
|
||||
continue
|
||||
if not hasattr(backend, "set_note"):
|
||||
if on_unsupported_store is not None:
|
||||
on_unsupported_store(store_name)
|
||||
continue
|
||||
|
||||
for hash_value, note_name, note_text in items:
|
||||
try:
|
||||
if backend.set_note(hash_value, note_name, note_text, config=config):
|
||||
success_count += 1
|
||||
except Exception as item_exc:
|
||||
if on_item_error is not None:
|
||||
on_item_error(store_name, hash_value, note_name, item_exc)
|
||||
|
||||
return registry, success_count
|
||||
|
||||
|
||||
def collect_store_hash_value_batch(
|
||||
items: Sequence[Any],
|
||||
*,
|
||||
store_registry: Any,
|
||||
value_resolver: Callable[[Any], Optional[Sequence[str]]],
|
||||
override_hash: Optional[str] = None,
|
||||
override_store: Optional[str] = None,
|
||||
on_warning: Optional[Callable[[str], None]] = None,
|
||||
) -> Tuple[Dict[str, List[Tuple[str, List[str]]]], List[Any]]:
|
||||
"""Collect validated store/hash/value batches while preserving passthrough items."""
|
||||
batch: Dict[str, List[Tuple[str, List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
for item in items:
|
||||
pass_through.append(item)
|
||||
|
||||
raw_hash = override_hash or get_field(item, "hash")
|
||||
raw_store = override_store or get_field(item, "store")
|
||||
if not raw_hash or not raw_store:
|
||||
if on_warning is not None:
|
||||
on_warning("Item missing hash/store; skipping")
|
||||
continue
|
||||
|
||||
normalized = normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
if on_warning is not None:
|
||||
on_warning("Item has invalid hash; skipping")
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
if on_warning is not None:
|
||||
on_warning("Item has empty store; skipping")
|
||||
continue
|
||||
|
||||
try:
|
||||
is_available = bool(store_registry.is_available(store_text))
|
||||
except Exception:
|
||||
is_available = False
|
||||
if not is_available:
|
||||
if on_warning is not None:
|
||||
on_warning(f"Store '{store_text}' not configured; skipping")
|
||||
continue
|
||||
|
||||
values = [str(value).strip() for value in (value_resolver(item) or []) if str(value).strip()]
|
||||
if not values:
|
||||
continue
|
||||
|
||||
batch.setdefault(store_text, []).append((normalized, values))
|
||||
|
||||
return batch, pass_through
|
||||
|
||||
|
||||
def parse_hash_query(query: Optional[str]) -> List[str]:
|
||||
"""Parse a unified query string for `hash:` into normalized SHA256 hashes.
|
||||
|
||||
@@ -1054,6 +1327,36 @@ def parse_single_hash_query(query: Optional[str]) -> Optional[str]:
|
||||
return hashes[0]
|
||||
|
||||
|
||||
def require_hash_query(
|
||||
query: Optional[str],
|
||||
error_message: str,
|
||||
*,
|
||||
log_file: Any = None,
|
||||
) -> Tuple[List[str], bool]:
|
||||
"""Parse a multi-hash query and log a caller-provided error on invalid input."""
|
||||
hashes = parse_hash_query(query)
|
||||
if query and not hashes:
|
||||
kwargs = {"file": log_file} if log_file is not None else {}
|
||||
log(error_message, **kwargs)
|
||||
return [], False
|
||||
return hashes, True
|
||||
|
||||
|
||||
def require_single_hash_query(
|
||||
query: Optional[str],
|
||||
error_message: str,
|
||||
*,
|
||||
log_file: Any = None,
|
||||
) -> Tuple[Optional[str], bool]:
|
||||
"""Parse a single-hash query and log a caller-provided error on invalid input."""
|
||||
query_hash = parse_single_hash_query(query)
|
||||
if query and not query_hash:
|
||||
kwargs = {"file": log_file} if log_file is not None else {}
|
||||
log(error_message, **kwargs)
|
||||
return None, False
|
||||
return query_hash, True
|
||||
|
||||
|
||||
def get_hash_for_operation(
|
||||
override_hash: Optional[str],
|
||||
result: Any,
|
||||
@@ -1180,26 +1483,7 @@ def get_field(obj: Any, field: str, default: Optional[Any] = None) -> Any:
|
||||
get_field(result, "hash") # From dict or object
|
||||
get_field(result, "table", "unknown") # With default
|
||||
"""
|
||||
# Handle lists by accessing the first element
|
||||
if isinstance(obj, list):
|
||||
if not obj:
|
||||
return default
|
||||
obj = obj[0]
|
||||
|
||||
if isinstance(obj, dict):
|
||||
return obj.get(field, default)
|
||||
else:
|
||||
# Try direct attribute access first
|
||||
value = getattr(obj, field, None)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# For PipeObjects, also check the extra field
|
||||
extra_val = getattr(obj, "extra", None)
|
||||
if isinstance(extra_val, dict):
|
||||
return extra_val.get(field, default)
|
||||
|
||||
return default
|
||||
return _item_accessor_get_field(obj, field, default)
|
||||
|
||||
|
||||
def should_show_help(args: Sequence[str]) -> bool:
|
||||
@@ -1636,33 +1920,22 @@ def create_pipe_object_result(
|
||||
Returns:
|
||||
Dict with all PipeObject fields for emission
|
||||
"""
|
||||
result: Dict[str, Any] = {
|
||||
"source": source,
|
||||
"id": identifier,
|
||||
"path": file_path,
|
||||
"action": f"cmdlet:{cmdlet_name}", # Format: cmdlet:cmdlet_name
|
||||
}
|
||||
result = build_file_result_payload(
|
||||
title=title,
|
||||
path=file_path,
|
||||
hash_value=hash_value,
|
||||
store=source,
|
||||
tag=tag,
|
||||
source=source,
|
||||
id=identifier,
|
||||
action=f"cmdlet:{cmdlet_name}",
|
||||
**extra,
|
||||
)
|
||||
|
||||
if title:
|
||||
result["title"] = title
|
||||
if hash_value:
|
||||
result["hash"] = hash_value
|
||||
if is_temp:
|
||||
result["is_temp"] = True
|
||||
if parent_hash:
|
||||
result["parent_hash"] = parent_hash
|
||||
if tag:
|
||||
result["tag"] = tag
|
||||
|
||||
# Canonical store field: use source for compatibility
|
||||
try:
|
||||
if source:
|
||||
result["store"] = source
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Add any extra fields
|
||||
result.update(extra)
|
||||
|
||||
return result
|
||||
|
||||
@@ -2153,6 +2426,32 @@ def normalize_result_input(result: Any) -> List[Dict[str, Any]]:
|
||||
return []
|
||||
|
||||
|
||||
def normalize_result_items(
|
||||
result: Any,
|
||||
*,
|
||||
include_falsey_single: bool = False,
|
||||
) -> List[Any]:
|
||||
"""Normalize piped input to a raw item list without converting item types."""
|
||||
if isinstance(result, list):
|
||||
return list(result)
|
||||
if result is None:
|
||||
return []
|
||||
if include_falsey_single or result:
|
||||
return [result]
|
||||
return []
|
||||
|
||||
|
||||
def value_has_content(value: Any) -> bool:
|
||||
"""Return True when a value should be treated as present for payload building."""
|
||||
if value is None:
|
||||
return False
|
||||
if isinstance(value, str):
|
||||
return bool(value.strip())
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return len(value) > 0
|
||||
return True
|
||||
|
||||
|
||||
def filter_results_by_temp(results: List[Any], include_temp: bool = False) -> List[Any]:
|
||||
"""Filter results by temporary status.
|
||||
|
||||
@@ -2380,6 +2679,46 @@ def extract_url_from_result(result: Any) -> list[str]:
|
||||
return normalize_urls(url)
|
||||
|
||||
|
||||
def merge_urls(existing: Any, incoming: Sequence[Any]) -> list[str]:
|
||||
"""Merge URL values into a normalized, de-duplicated list."""
|
||||
from SYS.metadata import normalize_urls
|
||||
|
||||
merged: list[str] = []
|
||||
for value in normalize_urls(existing):
|
||||
if value not in merged:
|
||||
merged.append(value)
|
||||
for value in normalize_urls(list(incoming or [])):
|
||||
if value not in merged:
|
||||
merged.append(value)
|
||||
return merged
|
||||
|
||||
|
||||
def remove_urls(existing: Any, remove: Sequence[Any]) -> list[str]:
|
||||
"""Remove URL values from an existing URL field and return survivors."""
|
||||
from SYS.metadata import normalize_urls
|
||||
|
||||
current = normalize_urls(existing)
|
||||
remove_set = {value for value in normalize_urls(list(remove or [])) if value}
|
||||
if not remove_set:
|
||||
return current
|
||||
return [value for value in current if value not in remove_set]
|
||||
|
||||
|
||||
def set_item_urls(item: Any, urls: Sequence[Any]) -> None:
|
||||
"""Persist normalized URL values back onto a dict/object result item."""
|
||||
normalized = merge_urls([], list(urls or []))
|
||||
payload: Any = normalized[0] if len(normalized) == 1 else list(normalized)
|
||||
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
item["url"] = payload
|
||||
return
|
||||
if hasattr(item, "url"):
|
||||
setattr(item, "url", payload)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
|
||||
def extract_relationships(result: Any) -> Optional[Dict[str, Any]]:
|
||||
if isinstance(result, models.PipeObject):
|
||||
relationships = result.get_relationships()
|
||||
@@ -3270,14 +3609,9 @@ def check_url_exists_in_storage(
|
||||
ext = extracted.get("ext") if isinstance(extracted, dict) else ""
|
||||
size_val = extracted.get("size") if isinstance(extracted, dict) else None
|
||||
|
||||
return {
|
||||
"title": str(title),
|
||||
"store": str(get_field(hit, "store") or backend_name),
|
||||
"hash": str(file_hash or ""),
|
||||
"ext": str(ext or ""),
|
||||
"size": size_val,
|
||||
"url": original_url,
|
||||
"columns": [
|
||||
return build_table_result_payload(
|
||||
title=str(title),
|
||||
columns=[
|
||||
("Title", str(title)),
|
||||
("Store", str(get_field(hit, "store") or backend_name)),
|
||||
("Hash", str(file_hash or "")),
|
||||
@@ -3285,7 +3619,12 @@ def check_url_exists_in_storage(
|
||||
("Size", size_val),
|
||||
("URL", original_url),
|
||||
],
|
||||
}
|
||||
store=str(get_field(hit, "store") or backend_name),
|
||||
hash=str(file_hash or ""),
|
||||
ext=str(ext or ""),
|
||||
size=size_val,
|
||||
url=original_url,
|
||||
)
|
||||
|
||||
def _search_backend_url_hits(
|
||||
backend: Any,
|
||||
@@ -3443,18 +3782,18 @@ def check_url_exists_in_storage(
|
||||
|
||||
seen_pairs.add((original_url, str(backend_name)))
|
||||
matched_urls.add(original_url)
|
||||
display_row = {
|
||||
"title": "(exists)",
|
||||
"store": str(backend_name),
|
||||
"hash": found_hash or "",
|
||||
"url": original_url,
|
||||
"columns": [
|
||||
display_row = build_table_result_payload(
|
||||
title="(exists)",
|
||||
columns=[
|
||||
("Title", "(exists)"),
|
||||
("Store", str(backend_name)),
|
||||
("Hash", found_hash or ""),
|
||||
("URL", original_url),
|
||||
],
|
||||
}
|
||||
store=str(backend_name),
|
||||
hash=found_hash or "",
|
||||
url=original_url,
|
||||
)
|
||||
match_rows.append(display_row)
|
||||
continue
|
||||
|
||||
@@ -3700,11 +4039,7 @@ def display_and_persist_items(
|
||||
|
||||
setattr(table, "_rendered_by_cmdlet", True)
|
||||
|
||||
# Use provided subject or default to first item
|
||||
if subject is None:
|
||||
subject = items[0] if len(items) == 1 else list(items)
|
||||
|
||||
# Persist table for @N selection across command boundaries
|
||||
pipeline_context.set_last_result_table(table, list(items), subject=subject)
|
||||
publish_result_table(pipeline_context, table, items, subject=subject)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -12,7 +12,9 @@ from urllib.parse import urlparse
|
||||
from SYS import models
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.logger import log, debug, is_debug_enabled
|
||||
from SYS.payload_builders import build_table_result_payload
|
||||
from SYS.pipeline_progress import PipelineProgress
|
||||
from SYS.result_publication import overlay_existing_result_table, publish_result_table
|
||||
from SYS.utils_constant import ALL_SUPPORTED_EXTENSIONS
|
||||
from Store import Store
|
||||
from API.HTTP import _download_direct_file
|
||||
@@ -444,27 +446,18 @@ class Add_File(Cmdlet):
|
||||
ext = str(file_info.get("ext") or "").lstrip(".")
|
||||
size = file_info.get("size", 0)
|
||||
|
||||
row_item = {
|
||||
"path":
|
||||
str(p) if p is not None else "",
|
||||
"hash":
|
||||
hp,
|
||||
"title":
|
||||
clean_title,
|
||||
"columns": [
|
||||
("Title",
|
||||
clean_title),
|
||||
("Hash",
|
||||
hp),
|
||||
("Size",
|
||||
size),
|
||||
("Ext",
|
||||
ext),
|
||||
row_item = build_table_result_payload(
|
||||
title=clean_title,
|
||||
columns=[
|
||||
("Title", clean_title),
|
||||
("Hash", hp),
|
||||
("Size", size),
|
||||
("Ext", ext),
|
||||
],
|
||||
# Used by @N replay (CLI will combine selected rows into -path file1,file2,...)
|
||||
"_selection_args": ["-path",
|
||||
str(p) if p is not None else ""],
|
||||
}
|
||||
selection_args=["-path", str(p) if p is not None else ""],
|
||||
path=str(p) if p is not None else "",
|
||||
hash=hp,
|
||||
)
|
||||
rows.append(row_item)
|
||||
table.add_result(row_item)
|
||||
|
||||
@@ -537,8 +530,7 @@ class Add_File(Cmdlet):
|
||||
else:
|
||||
pipe_obj.extra = {}
|
||||
|
||||
merged_urls.extend(cli_urls)
|
||||
merged_urls = normalize_urls(merged_urls)
|
||||
merged_urls = sh.merge_urls(merged_urls, cli_urls)
|
||||
if merged_urls:
|
||||
pipe_obj.extra["url"] = merged_urls
|
||||
except Exception:
|
||||
@@ -827,13 +819,15 @@ class Add_File(Cmdlet):
|
||||
except Exception as exc:
|
||||
debug(f"[add-file] Item details render failed: {exc}")
|
||||
|
||||
ctx.set_last_result_table_overlay(
|
||||
publish_result_table(
|
||||
ctx,
|
||||
table,
|
||||
items,
|
||||
subject={
|
||||
"store": store,
|
||||
"hash": hashes
|
||||
}
|
||||
},
|
||||
overlay=True,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1673,7 +1667,7 @@ class Add_File(Cmdlet):
|
||||
table = Table("Result")
|
||||
table.add_result(payload)
|
||||
# Overlay so @1 refers to this add-file result without overwriting search history
|
||||
ctx.set_last_result_table_overlay(table, [payload], subject=payload)
|
||||
publish_result_table(ctx, table, [payload], subject=payload, overlay=True)
|
||||
except Exception:
|
||||
# If table rendering fails, still keep @ selection items
|
||||
try:
|
||||
@@ -1734,15 +1728,13 @@ class Add_File(Cmdlet):
|
||||
try:
|
||||
table = ctx.get_last_result_table()
|
||||
items = ctx.get_last_result_items()
|
||||
if table is not None and items:
|
||||
ctx.set_last_result_table_overlay(
|
||||
table,
|
||||
items,
|
||||
subject={
|
||||
"store": store,
|
||||
"hash": hash_value
|
||||
}
|
||||
)
|
||||
overlay_existing_result_table(
|
||||
ctx,
|
||||
subject={
|
||||
"store": store,
|
||||
"hash": hash_value
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -2484,58 +2476,36 @@ class Add_File(Cmdlet):
|
||||
if not pairs:
|
||||
continue
|
||||
try:
|
||||
backend = store[backend_name]
|
||||
backend, store, _exc = sh.get_store_backend(
|
||||
config,
|
||||
backend_name,
|
||||
store_registry=store,
|
||||
)
|
||||
if backend is None:
|
||||
continue
|
||||
|
||||
items = sh.coalesce_hash_value_pairs(pairs)
|
||||
if not items:
|
||||
continue
|
||||
|
||||
bulk = getattr(backend, "add_url_bulk", None)
|
||||
if callable(bulk):
|
||||
try:
|
||||
bulk(items)
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
single = getattr(backend, "add_url", None)
|
||||
if callable(single):
|
||||
for h, u in items:
|
||||
try:
|
||||
single(h, u)
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Merge URLs per hash and de-duplicate.
|
||||
merged: Dict[str,
|
||||
List[str]] = {}
|
||||
for file_hash, urls in pairs:
|
||||
h = str(file_hash or "").strip().lower()
|
||||
if len(h) != 64:
|
||||
continue
|
||||
url_list: List[str] = []
|
||||
try:
|
||||
for u in urls or []:
|
||||
s = str(u or "").strip()
|
||||
if s:
|
||||
url_list.append(s)
|
||||
except Exception:
|
||||
url_list = []
|
||||
if not url_list:
|
||||
continue
|
||||
|
||||
bucket = merged.setdefault(h, [])
|
||||
seen = set(bucket)
|
||||
for u in url_list:
|
||||
if u in seen:
|
||||
continue
|
||||
seen.add(u)
|
||||
bucket.append(u)
|
||||
|
||||
items: List[tuple[str,
|
||||
List[str]]] = [(h,
|
||||
u) for h, u in merged.items() if u]
|
||||
if not items:
|
||||
continue
|
||||
|
||||
bulk = getattr(backend, "add_url_bulk", None)
|
||||
if callable(bulk):
|
||||
try:
|
||||
bulk(items)
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
single = getattr(backend, "add_url", None)
|
||||
if callable(single):
|
||||
for h, u in items:
|
||||
try:
|
||||
single(h, u)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
@staticmethod
|
||||
def _apply_pending_tag_associations(
|
||||
pending: Dict[str,
|
||||
@@ -2552,30 +2522,15 @@ class Add_File(Cmdlet):
|
||||
except Exception:
|
||||
return
|
||||
|
||||
for backend_name, pairs in (pending or {}).items():
|
||||
if not pairs:
|
||||
continue
|
||||
try:
|
||||
backend = store[backend_name]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Try bulk variant first
|
||||
bulk = getattr(backend, "add_tags_bulk", None)
|
||||
if callable(bulk):
|
||||
try:
|
||||
bulk([(h, t) for h, t in pairs])
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
single = getattr(backend, "add_tag", None)
|
||||
if callable(single):
|
||||
for h, t in pairs:
|
||||
try:
|
||||
single(h, t)
|
||||
except Exception:
|
||||
continue
|
||||
sh.run_store_hash_value_batches(
|
||||
config,
|
||||
pending or {},
|
||||
bulk_method_name="add_tags_bulk",
|
||||
single_method_name="add_tag",
|
||||
store_registry=store,
|
||||
pass_config_to_bulk=False,
|
||||
pass_config_to_single=False,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _load_sidecar_bundle(
|
||||
|
||||
@@ -18,8 +18,6 @@ normalize_hash = sh.normalize_hash
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
normalize_result_input = sh.normalize_result_input
|
||||
should_show_help = sh.should_show_help
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Add_Note(Cmdlet):
|
||||
@@ -171,14 +169,6 @@ class Add_Note(Cmdlet):
|
||||
|
||||
return tokens
|
||||
|
||||
def _resolve_hash(
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str],
|
||||
) -> Optional[str]:
|
||||
return sh.resolve_hash_for_cmdlet(raw_hash, raw_path, override_hash)
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
||||
@@ -217,8 +207,12 @@ class Add_Note(Cmdlet):
|
||||
# Direct targeting mode: apply note once to the explicit target and
|
||||
# pass through any piped items unchanged.
|
||||
try:
|
||||
store_registry = Store(config)
|
||||
backend = store_registry[str(store_override)]
|
||||
backend, _store_registry, exc = sh.get_store_backend(
|
||||
config,
|
||||
str(store_override),
|
||||
)
|
||||
if backend is None:
|
||||
raise exc or KeyError(store_override)
|
||||
ok = bool(
|
||||
backend.set_note(
|
||||
str(hash_override),
|
||||
@@ -262,7 +256,7 @@ class Add_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
store_registry = Store(config)
|
||||
store_registry = None
|
||||
planned_ops = 0
|
||||
|
||||
# Batch write plan: store -> [(hash, name, text), ...]
|
||||
@@ -307,9 +301,12 @@ class Add_Note(Cmdlet):
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
store_name = str(store_override or res.get("store") or "").strip()
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
store_name, resolved_hash = sh.resolve_item_store_hash(
|
||||
res,
|
||||
override_store=str(store_override) if store_override else None,
|
||||
override_hash=str(hash_override) if hash_override else None,
|
||||
path_fields=("path",),
|
||||
)
|
||||
|
||||
if not store_name:
|
||||
log(
|
||||
@@ -318,11 +315,6 @@ class Add_Note(Cmdlet):
|
||||
)
|
||||
continue
|
||||
|
||||
resolved_hash = self._resolve_hash(
|
||||
raw_hash=str(raw_hash) if raw_hash else None,
|
||||
raw_path=str(raw_path) if raw_path else None,
|
||||
override_hash=str(hash_override) if hash_override else None,
|
||||
)
|
||||
if not resolved_hash:
|
||||
log(
|
||||
"[add_note] Warning: Item missing usable hash; skipping",
|
||||
@@ -343,23 +335,23 @@ class Add_Note(Cmdlet):
|
||||
|
||||
|
||||
# Execute batch operations
|
||||
success_count = 0
|
||||
for store_name, ops in note_ops.items():
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
if not hasattr(backend, "set_note"):
|
||||
log(f"[add_note] Store '{store_name}' does not support notes", file=sys.stderr)
|
||||
continue
|
||||
|
||||
for (h, name, text) in ops:
|
||||
try:
|
||||
if backend.set_note(h, name, text, config=config):
|
||||
success_count += 1
|
||||
except Exception as e:
|
||||
log(f"[add_note] Write failed {store_name}:{h} ({name}): {e}", file=sys.stderr)
|
||||
|
||||
except Exception as e:
|
||||
log(f"[add_note] Store access failed '{store_name}': {e}", file=sys.stderr)
|
||||
def _on_store_error(store_name: str, exc: Exception) -> None:
|
||||
log(f"[add_note] Store access failed '{store_name}': {exc}", file=sys.stderr)
|
||||
|
||||
def _on_unsupported_store(store_name: str) -> None:
|
||||
log(f"[add_note] Store '{store_name}' does not support notes", file=sys.stderr)
|
||||
|
||||
def _on_item_error(store_name: str, hash_value: str, note_name_value: str, exc: Exception) -> None:
|
||||
log(f"[add_note] Write failed {store_name}:{hash_value} ({note_name_value}): {exc}", file=sys.stderr)
|
||||
|
||||
store_registry, success_count = sh.run_store_note_batches(
|
||||
config,
|
||||
note_ops,
|
||||
store_registry=store_registry,
|
||||
on_store_error=_on_store_error,
|
||||
on_unsupported_store=_on_unsupported_store,
|
||||
on_item_error=_on_item_error,
|
||||
)
|
||||
|
||||
if planned_ops > 0:
|
||||
msg = f"✓ add-note: Updated {success_count}/{planned_ops} notes across {len(note_ops)} stores"
|
||||
|
||||
@@ -8,6 +8,7 @@ from pathlib import Path
|
||||
import sys
|
||||
|
||||
from SYS.logger import log
|
||||
from SYS.item_accessors import get_sha256_hex, get_store_name
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
@@ -20,7 +21,6 @@ parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
normalize_result_input = sh.normalize_result_input
|
||||
should_show_help = sh.should_show_help
|
||||
get_field = sh.get_field
|
||||
from Store import Store
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="add-relationship",
|
||||
@@ -68,14 +68,7 @@ CMDLET = Cmdlet(
|
||||
)
|
||||
|
||||
|
||||
def _normalize_hash_hex(value: Optional[str]) -> Optional[str]:
|
||||
"""Normalize a hash hex string to lowercase 64-char format."""
|
||||
if not value or not isinstance(value, str):
|
||||
return None
|
||||
normalized = value.strip().lower()
|
||||
if len(normalized) == 64 and all(c in "0123456789abcdef" for c in normalized):
|
||||
return normalized
|
||||
return None
|
||||
_normalize_hash_hex = sh.normalize_hash
|
||||
|
||||
|
||||
def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]:
|
||||
@@ -279,23 +272,10 @@ def _resolve_items_from_at(token: str) -> Optional[list[Any]]:
|
||||
def _extract_hash_and_store(item: Any) -> tuple[Optional[str], Optional[str]]:
|
||||
"""Extract (hash_hex, store) from a result item (dict/object)."""
|
||||
try:
|
||||
h = get_field(item,
|
||||
"hash_hex") or get_field(item,
|
||||
"hash") or get_field(item,
|
||||
"file_hash")
|
||||
s = get_field(item, "store")
|
||||
|
||||
hash_norm = _normalize_hash_hex(str(h) if h is not None else None)
|
||||
|
||||
store_norm: Optional[str]
|
||||
if s is None:
|
||||
store_norm = None
|
||||
else:
|
||||
store_norm = str(s).strip()
|
||||
if not store_norm:
|
||||
store_norm = None
|
||||
|
||||
return hash_norm, store_norm
|
||||
return (
|
||||
get_sha256_hex(item, "hash_hex", "hash", "file_hash"),
|
||||
get_store_name(item, "store"),
|
||||
)
|
||||
except Exception:
|
||||
return None, None
|
||||
|
||||
@@ -461,9 +441,12 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
parsed = parse_cmdlet_args(_args, CMDLET)
|
||||
arg_path: Optional[Path] = None
|
||||
override_store = parsed.get("store")
|
||||
override_hashes = sh.parse_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not override_hashes:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
override_hashes, query_valid = sh.require_hash_query(
|
||||
parsed.get("query"),
|
||||
"Invalid -query value (expected hash:<sha256>)",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
king_arg = parsed.get("king")
|
||||
alt_arg = parsed.get("alt")
|
||||
@@ -618,14 +601,13 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
is_folder_store = False
|
||||
store_root: Optional[Path] = None
|
||||
if store_name:
|
||||
try:
|
||||
store = Store(config)
|
||||
backend = store[str(store_name)]
|
||||
backend, _store_registry, _exc = sh.get_store_backend(config, str(store_name))
|
||||
if backend is not None:
|
||||
loc = getattr(backend, "location", None)
|
||||
if callable(loc):
|
||||
is_folder_store = True
|
||||
store_root = Path(str(loc()))
|
||||
except Exception:
|
||||
else:
|
||||
backend = None
|
||||
is_folder_store = False
|
||||
store_root = None
|
||||
|
||||
@@ -6,6 +6,9 @@ import sys
|
||||
import re
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.item_accessors import extract_item_tags, get_string_list, set_field
|
||||
from SYS.payload_builders import extract_title_tag_value
|
||||
from SYS.result_publication import publish_result_table
|
||||
|
||||
from SYS import models
|
||||
from SYS import pipeline as ctx
|
||||
@@ -24,7 +27,6 @@ collapse_namespace_tag = sh.collapse_namespace_tag
|
||||
should_show_help = sh.should_show_help
|
||||
get_field = sh.get_field
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
_FIELD_NAME_RE = re.compile(r"^[A-Za-z0-9_]+$")
|
||||
|
||||
@@ -239,33 +241,15 @@ def _try_compile_extract_template(
|
||||
|
||||
def _extract_title_tag(tags: List[str]) -> Optional[str]:
|
||||
"""Return the value of the first title: tag if present."""
|
||||
for t in tags:
|
||||
if t.lower().startswith("title:"):
|
||||
value = t.split(":", 1)[1].strip()
|
||||
return value or None
|
||||
return None
|
||||
return extract_title_tag_value(tags)
|
||||
|
||||
|
||||
def _extract_item_tags(res: Any) -> List[str]:
|
||||
if isinstance(res, models.PipeObject):
|
||||
raw = getattr(res, "tag", None)
|
||||
elif isinstance(res, dict):
|
||||
raw = res.get("tag")
|
||||
else:
|
||||
raw = None
|
||||
|
||||
if isinstance(raw, list):
|
||||
return [str(t) for t in raw if t is not None]
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
return [raw]
|
||||
return []
|
||||
return extract_item_tags(res)
|
||||
|
||||
|
||||
def _set_item_tags(res: Any, tags: List[str]) -> None:
|
||||
if isinstance(res, models.PipeObject):
|
||||
res.tag = tags
|
||||
elif isinstance(res, dict):
|
||||
res["tag"] = tags
|
||||
set_field(res, "tag", tags)
|
||||
|
||||
|
||||
def _apply_title_to_result(res: Any, title_value: Optional[str]) -> None:
|
||||
@@ -401,7 +385,7 @@ def _refresh_result_table_title(
|
||||
|
||||
# Keep the underlying history intact; update only the overlay so @.. can
|
||||
# clear the overlay then continue back to prior tables (e.g., the search list).
|
||||
ctx.set_last_result_table_overlay(new_table, updated_items)
|
||||
publish_result_table(ctx, new_table, updated_items, overlay=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -439,30 +423,21 @@ def _refresh_tag_view(
|
||||
refresh_args: List[str] = ["-query", f"hash:{target_hash}"]
|
||||
|
||||
# Build a lean subject so get-tag fetches fresh tags instead of reusing cached payloads.
|
||||
def _value_has_content(value: Any) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
if isinstance(value, str):
|
||||
return bool(value.strip())
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return len(value) > 0
|
||||
return True
|
||||
|
||||
def _build_refresh_subject() -> Dict[str, Any]:
|
||||
payload: Dict[str, Any] = {}
|
||||
payload["hash"] = target_hash
|
||||
if _value_has_content(store_name):
|
||||
if sh.value_has_content(store_name):
|
||||
payload["store"] = store_name
|
||||
|
||||
path_value = target_path or get_field(subject, "path")
|
||||
if not _value_has_content(path_value):
|
||||
if not sh.value_has_content(path_value):
|
||||
path_value = get_field(subject, "target")
|
||||
if _value_has_content(path_value):
|
||||
if sh.value_has_content(path_value):
|
||||
payload["path"] = path_value
|
||||
|
||||
for key in ("title", "name", "url", "relations", "service_name"):
|
||||
val = get_field(subject, key)
|
||||
if _value_has_content(val):
|
||||
if sh.value_has_content(val):
|
||||
payload[key] = val
|
||||
|
||||
extra_value = get_field(subject, "extra")
|
||||
@@ -473,7 +448,7 @@ def _refresh_tag_view(
|
||||
}
|
||||
if cleaned:
|
||||
payload["extra"] = cleaned
|
||||
elif _value_has_content(extra_value):
|
||||
elif sh.value_has_content(extra_value):
|
||||
payload["extra"] = extra_value
|
||||
|
||||
return payload
|
||||
@@ -570,15 +545,15 @@ class Add_Tag(Cmdlet):
|
||||
extract_debug = bool(parsed.get("extract-debug", False))
|
||||
extract_debug_rx, extract_debug_err = _try_compile_extract_template(extract_template)
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log(
|
||||
"[add_tag] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"[add_tag] Error: -query must be of the form hash:<sha256>",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
hash_override = normalize_hash(query_hash) if query_hash else None
|
||||
hash_override = query_hash
|
||||
|
||||
# If add-tag is in the middle of a pipeline (has downstream stages), default to
|
||||
# including temp files. This enables common flows like:
|
||||
@@ -879,21 +854,11 @@ class Add_Tag(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
resolved_hash = (
|
||||
normalize_hash(hash_override)
|
||||
if hash_override else normalize_hash(raw_hash)
|
||||
resolved_hash = sh.resolve_hash_for_cmdlet(
|
||||
str(raw_hash) if raw_hash else None,
|
||||
str(raw_path) if raw_path else None,
|
||||
str(hash_override) if hash_override else None,
|
||||
)
|
||||
if not resolved_hash and raw_path:
|
||||
try:
|
||||
p = Path(str(raw_path))
|
||||
stem = p.stem
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef"
|
||||
for c in stem.lower()):
|
||||
resolved_hash = stem.lower()
|
||||
elif p.exists() and p.is_file():
|
||||
resolved_hash = sha256_file(p)
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
|
||||
if not resolved_hash:
|
||||
log(
|
||||
@@ -903,9 +868,13 @@ class Add_Tag(Cmdlet):
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
try:
|
||||
backend = store_registry[str(store_name)]
|
||||
except Exception as exc:
|
||||
backend, store_registry, exc = sh.get_store_backend(
|
||||
config,
|
||||
str(store_name),
|
||||
store_registry=store_registry,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
log(
|
||||
f"[add_tag] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
|
||||
@@ -49,9 +49,11 @@ class Add_Url(sh.Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("Error: -query must be of the form hash:<sha256>")
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"Error: -query must be of the form hash:<sha256>",
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Bulk input is common in pipelines; treat a list of PipeObjects as a batch.
|
||||
@@ -117,125 +119,53 @@ class Add_Url(sh.Cmdlet):
|
||||
try:
|
||||
storage = Store(config)
|
||||
|
||||
def _merge_urls(existing: Any, incoming: List[str]) -> List[str]:
|
||||
out: List[str] = []
|
||||
try:
|
||||
if isinstance(existing, str):
|
||||
out.extend(
|
||||
[p.strip() for p in existing.split(",") if p.strip()]
|
||||
)
|
||||
elif isinstance(existing, (list, tuple)):
|
||||
out.extend([str(u).strip() for u in existing if str(u).strip()])
|
||||
except Exception:
|
||||
out = []
|
||||
for u in incoming:
|
||||
if u and u not in out:
|
||||
out.append(u)
|
||||
return out
|
||||
|
||||
def _set_item_url(item: Any, merged: List[str]) -> None:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
if len(merged) == 1:
|
||||
item["url"] = merged[0]
|
||||
else:
|
||||
item["url"] = list(merged)
|
||||
return
|
||||
# PipeObject-like
|
||||
if hasattr(item, "url"):
|
||||
if len(merged) == 1:
|
||||
setattr(item, "url", merged[0])
|
||||
else:
|
||||
setattr(item, "url", list(merged))
|
||||
except Exception:
|
||||
return
|
||||
|
||||
# Build batches per store.
|
||||
store_override = parsed.get("store")
|
||||
batch: Dict[str,
|
||||
List[Tuple[str,
|
||||
List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
if results:
|
||||
for item in results:
|
||||
pass_through.append(item)
|
||||
def _warn(message: str) -> None:
|
||||
ctx.print_if_visible(f"[add-url] Warning: {message}", file=sys.stderr)
|
||||
|
||||
raw_hash = query_hash or sh.get_field(item, "hash")
|
||||
raw_store = store_override or sh.get_field(item, "store")
|
||||
if not raw_hash or not raw_store:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item missing hash/store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
normalized = sh.normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item has invalid hash; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
ctx.print_if_visible(
|
||||
"[add-url] Warning: Item has empty store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
# Validate backend exists (skip PATH/unknown).
|
||||
if not storage.is_available(store_text):
|
||||
ctx.print_if_visible(
|
||||
f"[add-url] Warning: Store '{store_text}' not configured; skipping",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
batch.setdefault(store_text, []).append((normalized, list(urls)))
|
||||
batch, pass_through = sh.collect_store_hash_value_batch(
|
||||
results,
|
||||
store_registry=storage,
|
||||
value_resolver=lambda _item: list(urls),
|
||||
override_hash=query_hash,
|
||||
override_store=store_override,
|
||||
on_warning=_warn,
|
||||
)
|
||||
|
||||
# Execute per-store batches.
|
||||
for store_text, pairs in batch.items():
|
||||
try:
|
||||
backend = storage[store_text]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Coalesce duplicates per hash before passing to backend.
|
||||
merged: Dict[str,
|
||||
List[str]] = {}
|
||||
for h, ulist in pairs:
|
||||
merged.setdefault(h, [])
|
||||
for u in ulist or []:
|
||||
if u and u not in merged[h]:
|
||||
merged[h].append(u)
|
||||
|
||||
bulk_pairs = [(h, merged[h]) for h in merged.keys()]
|
||||
|
||||
bulk_fn = getattr(backend, "add_url_bulk", None)
|
||||
if callable(bulk_fn):
|
||||
bulk_fn(bulk_pairs, config=config)
|
||||
else:
|
||||
for h, ulist in bulk_pairs:
|
||||
backend.add_url(h, ulist, config=config)
|
||||
|
||||
storage, batch_stats = sh.run_store_hash_value_batches(
|
||||
config,
|
||||
batch,
|
||||
bulk_method_name="add_url_bulk",
|
||||
single_method_name="add_url",
|
||||
store_registry=storage,
|
||||
)
|
||||
for store_text, item_count, _value_count in batch_stats:
|
||||
ctx.print_if_visible(
|
||||
f"✓ add-url: {len(urls)} url(s) for {len(bulk_pairs)} item(s) in '{store_text}'",
|
||||
f"✓ add-url: {len(urls)} url(s) for {item_count} item(s) in '{store_text}'",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Pass items through unchanged (but update url field for convenience).
|
||||
for item in pass_through:
|
||||
existing = sh.get_field(item, "url")
|
||||
merged = _merge_urls(existing, list(urls))
|
||||
_set_item_url(item, merged)
|
||||
merged = sh.merge_urls(existing, list(urls))
|
||||
sh.set_item_urls(item, merged)
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
# Single-item mode
|
||||
backend = storage[str(store_name)]
|
||||
backend, storage, exc = sh.get_store_backend(
|
||||
config,
|
||||
str(store_name),
|
||||
store_registry=storage,
|
||||
)
|
||||
if backend is None:
|
||||
log(f"Error: Storage backend '{store_name}' not configured")
|
||||
return 1
|
||||
backend.add_url(str(file_hash), urls, config=config)
|
||||
ctx.print_if_visible(
|
||||
f"✓ add-url: {len(urls)} url(s) added",
|
||||
@@ -243,14 +173,11 @@ class Add_Url(sh.Cmdlet):
|
||||
)
|
||||
if result is not None:
|
||||
existing = sh.get_field(result, "url")
|
||||
merged = _merge_urls(existing, list(urls))
|
||||
_set_item_url(result, merged)
|
||||
merged = sh.merge_urls(existing, list(urls))
|
||||
sh.set_item_urls(result, merged)
|
||||
ctx.emit(result)
|
||||
return 0
|
||||
|
||||
except KeyError:
|
||||
log(f"Error: Storage backend '{store_name}' not configured")
|
||||
return 1
|
||||
except Exception as exc:
|
||||
log(f"Error adding URL: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Any, Dict, List, Sequence, Set
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from SYS.logger import log
|
||||
from SYS.item_accessors import get_http_url, get_sha256_hex, get_store_name
|
||||
from SYS.utils import extract_hydrus_hash_from_url
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
@@ -27,41 +28,16 @@ create_pipe_object_result = sh.create_pipe_object_result
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
should_show_help = sh.should_show_help
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
def _extract_sha256_hex(item: Any) -> str:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
h = item.get("hash")
|
||||
else:
|
||||
h = getattr(item, "hash", None)
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
return get_sha256_hex(item, "hash") or ""
|
||||
|
||||
|
||||
def _extract_store_name(item: Any) -> str:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
s = item.get("store")
|
||||
else:
|
||||
s = getattr(item, "store", None)
|
||||
return str(s or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return get_store_name(item, "store") or ""
|
||||
|
||||
|
||||
def _extract_url(item: Any) -> str:
|
||||
try:
|
||||
u = sh.get_field(item, "url") or sh.get_field(item, "target")
|
||||
if isinstance(u, str) and u.strip().lower().startswith(("http://", "https://")):
|
||||
return u.strip()
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
return get_http_url(item, "url", "target") or ""
|
||||
|
||||
|
||||
def _extract_hash_from_hydrus_file_url(url: str) -> str:
|
||||
@@ -217,10 +193,9 @@ def _resolve_existing_or_fetch_path(item: Any,
|
||||
store_name = _extract_store_name(item)
|
||||
if file_hash and store_name:
|
||||
try:
|
||||
from Store import Store
|
||||
|
||||
store = Store(config)
|
||||
backend = store[store_name]
|
||||
backend, _store_registry, _exc = sh.get_store_backend(config, store_name)
|
||||
if backend is None:
|
||||
return None, None
|
||||
src = backend.get_file(file_hash)
|
||||
if isinstance(src, Path):
|
||||
if src.exists():
|
||||
@@ -320,11 +295,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# This cmdlet always creates the archive in the configured output directory and emits it.
|
||||
|
||||
# Collect piped items; archive-file is a batch command (single output).
|
||||
items: List[Any] = []
|
||||
if isinstance(result, list):
|
||||
items = list(result)
|
||||
elif result is not None:
|
||||
items = [result]
|
||||
items: List[Any] = sh.normalize_result_items(
|
||||
result,
|
||||
include_falsey_single=True,
|
||||
)
|
||||
|
||||
if not items:
|
||||
log("No piped items provided to archive-file", file=sys.stderr)
|
||||
|
||||
@@ -7,6 +7,7 @@ import shutil
|
||||
import subprocess
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.payload_builders import build_file_result_payload
|
||||
from SYS.utils import sha256_file
|
||||
from . import _shared as sh
|
||||
from SYS import pipeline as ctx
|
||||
@@ -279,13 +280,15 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
title = extract_title_from_result(item) or output_path.stem
|
||||
|
||||
ctx.emit({
|
||||
"path": str(output_path),
|
||||
"title": title,
|
||||
"hash": out_hash,
|
||||
"media_kind": target_kind,
|
||||
"source_path": str(input_path),
|
||||
})
|
||||
ctx.emit(
|
||||
build_file_result_payload(
|
||||
title=title,
|
||||
path=str(output_path),
|
||||
hash_value=out_hash,
|
||||
media_kind=target_kind,
|
||||
source_path=str(input_path),
|
||||
)
|
||||
)
|
||||
|
||||
if delete_src:
|
||||
try:
|
||||
|
||||
@@ -11,6 +11,7 @@ from Store import Store
|
||||
from . import _shared as sh
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.result_table_helpers import add_row_columns
|
||||
from SYS.result_table import Table, _format_size
|
||||
from SYS.rich_display import stdout_console
|
||||
|
||||
@@ -487,21 +488,18 @@ class Delete_File(sh.Cmdlet):
|
||||
reason_tokens.append(token)
|
||||
i += 1
|
||||
|
||||
override_hash = sh.parse_single_hash_query(
|
||||
override_query
|
||||
) if override_query else None
|
||||
if override_query and not override_hash:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
override_hash, query_valid = sh.require_single_hash_query(
|
||||
override_query,
|
||||
"Invalid -query value (expected hash:<sha256>)",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
reason = " ".join(token for token in reason_tokens
|
||||
if str(token).strip()).strip()
|
||||
|
||||
items = []
|
||||
if isinstance(result, list):
|
||||
items = result
|
||||
elif result:
|
||||
items = [result]
|
||||
items = sh.normalize_result_items(result)
|
||||
|
||||
if not items:
|
||||
log("No items to delete", file=sys.stderr)
|
||||
@@ -526,16 +524,16 @@ class Delete_File(sh.Cmdlet):
|
||||
table = Table("Deleted")
|
||||
table._interactive(True)._perseverance(True)
|
||||
for row in deleted_rows:
|
||||
result_row = table.add_row()
|
||||
result_row.add_column("Title", row.get("title", ""))
|
||||
result_row.add_column("Store", row.get("store", ""))
|
||||
result_row.add_column("Hash", row.get("hash", ""))
|
||||
result_row.add_column(
|
||||
"Size",
|
||||
_format_size(row.get("size_bytes"),
|
||||
integer_only=False)
|
||||
add_row_columns(
|
||||
table,
|
||||
[
|
||||
("Title", row.get("title", "")),
|
||||
("Store", row.get("store", "")),
|
||||
("Hash", row.get("hash", "")),
|
||||
("Size", _format_size(row.get("size_bytes"), integer_only=False)),
|
||||
("Ext", row.get("ext", "")),
|
||||
],
|
||||
)
|
||||
result_row.add_column("Ext", row.get("ext", ""))
|
||||
|
||||
# Display-only: print directly and do not affect selection/history.
|
||||
try:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
from typing import Any, Dict, Sequence
|
||||
import sys
|
||||
|
||||
from SYS.logger import log
|
||||
@@ -17,8 +16,6 @@ parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
normalize_result_input = sh.normalize_result_input
|
||||
get_field = sh.get_field
|
||||
should_show_help = sh.should_show_help
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Delete_Note(Cmdlet):
|
||||
@@ -50,14 +47,6 @@ class Delete_Note(Cmdlet):
|
||||
pass
|
||||
self.register()
|
||||
|
||||
def _resolve_hash(
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str],
|
||||
) -> Optional[str]:
|
||||
return sh.resolve_hash_for_cmdlet(raw_hash, raw_path, override_hash)
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
||||
@@ -66,12 +55,12 @@ class Delete_Note(Cmdlet):
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
store_override = parsed.get("store")
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log(
|
||||
"[delete_note] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"[delete_note] Error: -query must be of the form hash:<sha256>",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
note_name_override = str(parsed.get("name") or "").strip()
|
||||
# Allow piping note rows from get-note: the selected item carries note_name.
|
||||
@@ -97,7 +86,7 @@ class Delete_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
store_registry = Store(config)
|
||||
store_registry = None
|
||||
deleted = 0
|
||||
|
||||
for res in results:
|
||||
@@ -117,9 +106,12 @@ class Delete_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
store_name = str(store_override or res.get("store") or "").strip()
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
store_name, resolved_hash = sh.resolve_item_store_hash(
|
||||
res,
|
||||
override_store=str(store_override) if store_override else None,
|
||||
override_hash=str(query_hash) if query_hash else None,
|
||||
path_fields=("path",),
|
||||
)
|
||||
|
||||
if not store_name:
|
||||
log(
|
||||
@@ -128,18 +120,16 @@ class Delete_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
resolved_hash = self._resolve_hash(
|
||||
raw_hash=str(raw_hash) if raw_hash else None,
|
||||
raw_path=str(raw_path) if raw_path else None,
|
||||
override_hash=str(query_hash) if query_hash else None,
|
||||
)
|
||||
if not resolved_hash:
|
||||
ctx.emit(res)
|
||||
continue
|
||||
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
except Exception as exc:
|
||||
backend, store_registry, exc = sh.get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
store_registry=store_registry,
|
||||
)
|
||||
if backend is None:
|
||||
log(
|
||||
f"[delete_note] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
@@ -15,7 +14,6 @@ parse_tag_arguments = sh.parse_tag_arguments
|
||||
should_show_help = sh.should_show_help
|
||||
get_field = sh.get_field
|
||||
from SYS.logger import debug, log
|
||||
from Store import Store
|
||||
|
||||
|
||||
def _refresh_tag_view_if_current(
|
||||
@@ -80,31 +78,22 @@ def _refresh_tag_view_if_current(
|
||||
refresh_args.extend(["-query", f"hash:{file_hash}"])
|
||||
|
||||
# Build a lean subject so get-tag fetches fresh tags instead of reusing cached payloads.
|
||||
def _value_has_content(value: Any) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
if isinstance(value, str):
|
||||
return bool(value.strip())
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return len(value) > 0
|
||||
return True
|
||||
|
||||
def _build_refresh_subject() -> Dict[str, Any]:
|
||||
payload: Dict[str, Any] = {}
|
||||
payload["hash"] = file_hash
|
||||
store_value = store_name or get_field(subject, "store")
|
||||
if _value_has_content(store_value):
|
||||
if sh.value_has_content(store_value):
|
||||
payload["store"] = store_value
|
||||
|
||||
path_value = path or get_field(subject, "path")
|
||||
if not _value_has_content(path_value):
|
||||
if not sh.value_has_content(path_value):
|
||||
path_value = get_field(subject, "target")
|
||||
if _value_has_content(path_value):
|
||||
if sh.value_has_content(path_value):
|
||||
payload["path"] = path_value
|
||||
|
||||
for key in ("title", "name", "url", "relations", "service_name"):
|
||||
val = get_field(subject, key)
|
||||
if _value_has_content(val):
|
||||
if sh.value_has_content(val):
|
||||
payload[key] = val
|
||||
|
||||
extra_value = get_field(subject, "extra")
|
||||
@@ -115,7 +104,7 @@ def _refresh_tag_view_if_current(
|
||||
}
|
||||
if cleaned:
|
||||
payload["extra"] = cleaned
|
||||
elif _value_has_content(extra_value):
|
||||
elif sh.value_has_content(extra_value):
|
||||
payload["extra"] = extra_value
|
||||
|
||||
return payload
|
||||
@@ -201,11 +190,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
rest.append(a)
|
||||
i += 1
|
||||
|
||||
override_hash = sh.parse_single_hash_query(
|
||||
override_query
|
||||
) if override_query else None
|
||||
if override_query and not override_hash:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
override_hash, query_valid = sh.require_single_hash_query(
|
||||
override_query,
|
||||
"Invalid -query value (expected hash:<sha256>)",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Selection syntax (@...) is handled by the pipeline runner, not by this cmdlet.
|
||||
@@ -242,11 +232,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
# Normalize result to a list for processing
|
||||
items_to_process = []
|
||||
if isinstance(result, list):
|
||||
items_to_process = result
|
||||
elif result:
|
||||
items_to_process = [result]
|
||||
items_to_process = sh.normalize_result_items(result)
|
||||
|
||||
# Process each item
|
||||
success_count = 0
|
||||
@@ -358,14 +344,7 @@ def _process_deletion(
|
||||
)
|
||||
return False
|
||||
|
||||
resolved_hash = normalize_hash(file_hash) if file_hash else None
|
||||
if not resolved_hash and path:
|
||||
try:
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
resolved_hash = sha256_file(Path(path))
|
||||
except Exception:
|
||||
resolved_hash = None
|
||||
resolved_hash = sh.resolve_hash_for_cmdlet(file_hash, path, None)
|
||||
|
||||
if not resolved_hash:
|
||||
log(
|
||||
@@ -376,7 +355,13 @@ def _process_deletion(
|
||||
|
||||
def _fetch_existing_tags() -> list[str]:
|
||||
try:
|
||||
backend = Store(config, suppress_debug=True)[store_name]
|
||||
backend, _store_registry, _exc = sh.get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
return []
|
||||
existing, _src = backend.get_tag(resolved_hash, config=config)
|
||||
return list(existing or [])
|
||||
except Exception:
|
||||
@@ -403,7 +388,13 @@ def _process_deletion(
|
||||
return False
|
||||
|
||||
try:
|
||||
backend = Store(config, suppress_debug=True)[store_name]
|
||||
backend, _store_registry, exc = sh.get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
raise exc or KeyError(store_name)
|
||||
ok = backend.delete_tag(resolved_hash, list(tags), config=config)
|
||||
if ok:
|
||||
preview = resolved_hash[:12] + ("…" if len(resolved_hash) > 12 else "")
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Sequence, Tuple
|
||||
import sys
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from . import _shared as sh
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
@@ -45,9 +46,11 @@ class Delete_Url(Cmdlet):
|
||||
"""Delete URL from file via hash+store backend."""
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("Error: -query must be of the form hash:<sha256>")
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"Error: -query must be of the form hash:<sha256>",
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Bulk input is common in pipelines; treat a list of PipeObjects as a batch.
|
||||
@@ -105,77 +108,13 @@ class Delete_Url(Cmdlet):
|
||||
try:
|
||||
storage = Store(config)
|
||||
|
||||
def _remove_urls(existing: Any, remove: List[str]) -> Any:
|
||||
# Preserve prior shape: keep str when 1 url, list when multiple.
|
||||
current: List[str] = []
|
||||
try:
|
||||
if isinstance(existing, str):
|
||||
current = [p.strip() for p in existing.split(",") if p.strip()]
|
||||
elif isinstance(existing, (list, tuple)):
|
||||
current = [str(u).strip() for u in existing if str(u).strip()]
|
||||
except Exception:
|
||||
current = []
|
||||
remove_set = {u
|
||||
for u in (remove or []) if u}
|
||||
new_urls = [u for u in current if u not in remove_set]
|
||||
if len(new_urls) == 1:
|
||||
return new_urls[0]
|
||||
return new_urls
|
||||
|
||||
def _set_item_url(item: Any, merged: Any) -> None:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
item["url"] = merged
|
||||
return
|
||||
if hasattr(item, "url"):
|
||||
setattr(item, "url", merged)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
store_override = parsed.get("store")
|
||||
batch: Dict[str,
|
||||
List[Tuple[str,
|
||||
List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
if results:
|
||||
for item in results:
|
||||
pass_through.append(item)
|
||||
def _warn(message: str) -> None:
|
||||
ctx.print_if_visible(f"[delete-url] Warning: {message}", file=sys.stderr)
|
||||
|
||||
raw_hash = query_hash or get_field(item, "hash")
|
||||
raw_store = store_override or get_field(item, "store")
|
||||
if not raw_hash or not raw_store:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item missing hash/store; skipping",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
normalized = normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has invalid hash; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has empty store; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
if not storage.is_available(store_text):
|
||||
ctx.print_if_visible(
|
||||
f"[delete-url] Warning: Store '{store_text}' not configured; skipping",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
# Determine which URLs to delete.
|
||||
# - If user passed an explicit <url>, apply it to all items.
|
||||
# - Otherwise, when piping url rows from get-url, delete the url(s) from each item.
|
||||
def _resolve_item_urls(item: Any) -> List[str]:
|
||||
item_urls = list(urls_from_cli)
|
||||
if not item_urls:
|
||||
item_urls = [
|
||||
@@ -184,41 +123,28 @@ class Delete_Url(Cmdlet):
|
||||
) if str(u).strip()
|
||||
]
|
||||
if not item_urls:
|
||||
ctx.print_if_visible(
|
||||
"[delete-url] Warning: Item has no url field; skipping",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
_warn("Item has no url field; skipping")
|
||||
return item_urls
|
||||
|
||||
batch.setdefault(store_text, []).append((normalized, item_urls))
|
||||
batch, pass_through = sh.collect_store_hash_value_batch(
|
||||
results,
|
||||
store_registry=storage,
|
||||
value_resolver=_resolve_item_urls,
|
||||
override_hash=query_hash,
|
||||
override_store=store_override,
|
||||
on_warning=_warn,
|
||||
)
|
||||
|
||||
for store_text, pairs in batch.items():
|
||||
try:
|
||||
backend = storage[store_text]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
merged: Dict[str,
|
||||
List[str]] = {}
|
||||
for h, ulist in pairs:
|
||||
merged.setdefault(h, [])
|
||||
for u in ulist or []:
|
||||
if u and u not in merged[h]:
|
||||
merged[h].append(u)
|
||||
bulk_pairs = [(h, merged[h]) for h in merged.keys()]
|
||||
|
||||
bulk_fn = getattr(backend, "delete_url_bulk", None)
|
||||
if callable(bulk_fn):
|
||||
bulk_fn(bulk_pairs, config=config)
|
||||
else:
|
||||
for h, ulist in bulk_pairs:
|
||||
backend.delete_url(h, ulist, config=config)
|
||||
|
||||
deleted_count = 0
|
||||
for _h, ulist in bulk_pairs:
|
||||
deleted_count += len(ulist or [])
|
||||
storage, batch_stats = sh.run_store_hash_value_batches(
|
||||
config,
|
||||
batch,
|
||||
bulk_method_name="delete_url_bulk",
|
||||
single_method_name="delete_url",
|
||||
store_registry=storage,
|
||||
)
|
||||
for store_text, item_count, deleted_count in batch_stats:
|
||||
ctx.print_if_visible(
|
||||
f"✓ delete-url: {deleted_count} url(s) for {len(bulk_pairs)} item(s) in '{store_text}'",
|
||||
f"✓ delete-url: {deleted_count} url(s) for {item_count} item(s) in '{store_text}'",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
@@ -234,7 +160,7 @@ class Delete_Url(Cmdlet):
|
||||
get_field(item, "url") or get_field(item, "source_url")
|
||||
) if str(u).strip()
|
||||
]
|
||||
_set_item_url(item, _remove_urls(existing, list(remove_set)))
|
||||
sh.set_item_urls(item, sh.remove_urls(existing, list(remove_set)))
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
@@ -249,7 +175,14 @@ class Delete_Url(Cmdlet):
|
||||
log("Error: No URL provided")
|
||||
return 1
|
||||
|
||||
backend = storage[str(store_name)]
|
||||
backend, storage, exc = sh.get_store_backend(
|
||||
config,
|
||||
str(store_name),
|
||||
store_registry=storage,
|
||||
)
|
||||
if backend is None:
|
||||
log(f"Error: Storage backend '{store_name}' not configured")
|
||||
return 1
|
||||
backend.delete_url(str(file_hash), list(urls_from_cli), config=config)
|
||||
ctx.print_if_visible(
|
||||
f"✓ delete-url: {len(urls_from_cli)} url(s) removed",
|
||||
@@ -257,13 +190,10 @@ class Delete_Url(Cmdlet):
|
||||
)
|
||||
if result is not None:
|
||||
existing = get_field(result, "url")
|
||||
_set_item_url(result, _remove_urls(existing, list(urls_from_cli)))
|
||||
sh.set_item_urls(result, sh.remove_urls(existing, list(urls_from_cli)))
|
||||
ctx.emit(result)
|
||||
return 0
|
||||
|
||||
except KeyError:
|
||||
log(f"Error: Storage backend '{store_name}' not configured")
|
||||
return 1
|
||||
except Exception as exc:
|
||||
log(f"Error deleting URL: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
@@ -19,11 +19,17 @@ from contextlib import AbstractContextManager, nullcontext
|
||||
from API.HTTP import _download_direct_file
|
||||
from SYS.models import DownloadError, DownloadOptions, DownloadMediaResult
|
||||
from SYS.logger import log, debug, is_debug_enabled
|
||||
from SYS.payload_builders import build_file_result_payload, build_table_result_payload
|
||||
from SYS.pipeline_progress import PipelineProgress
|
||||
from SYS.result_table import Table
|
||||
from SYS.rich_display import stderr_console as get_stderr_console
|
||||
from SYS import pipeline as pipeline_context
|
||||
from SYS.metadata import normalize_urls as normalize_url_list
|
||||
from SYS.selection_builder import (
|
||||
extract_selection_fields,
|
||||
extract_urls_from_selection_args,
|
||||
selection_args_have_url,
|
||||
)
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
from tool.ytdlp import (
|
||||
@@ -57,6 +63,7 @@ build_pipeline_preview = sh.build_pipeline_preview
|
||||
# URI scheme prefixes owned by AllDebrid (magic-link and emoji shorthand).
|
||||
# Defined once here so every method in this file references the same constant.
|
||||
_ALLDEBRID_PREFIXES: tuple[str, ...] = ("alldebrid:", "alldebrid🧲")
|
||||
_FORMAT_INDEX_RE = re.compile(r"^\s*#?\d+\s*$")
|
||||
|
||||
|
||||
class Download_File(Cmdlet):
|
||||
@@ -1008,9 +1015,7 @@ class Download_File(Cmdlet):
|
||||
formats_cache: Dict[str, Optional[List[Dict[str, Any]]]],
|
||||
ytdlp_tool: YtDlpTool,
|
||||
) -> Optional[str]:
|
||||
import re
|
||||
|
||||
if not query_format or not re.match(r"^\s*#?\d+\s*$", str(query_format)):
|
||||
if not query_format or not _FORMAT_INDEX_RE.match(str(query_format)):
|
||||
return None
|
||||
|
||||
try:
|
||||
@@ -1221,22 +1226,24 @@ class Download_File(Cmdlet):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
row: Dict[str, Any] = {
|
||||
"table": "download-file",
|
||||
"title": str(title or f"Item {idx}"),
|
||||
"detail": str(uploader or ""),
|
||||
"media_kind": "playlist-item",
|
||||
"playlist_index": idx,
|
||||
"_selection_args": (["-url", str(entry_url)] if entry_url else ["-url", str(url), "-item", str(idx)]),
|
||||
"url": entry_url,
|
||||
"target": entry_url,
|
||||
"columns": [
|
||||
row = build_table_result_payload(
|
||||
table="download-file",
|
||||
title=str(title or f"Item {idx}"),
|
||||
detail=str(uploader or ""),
|
||||
columns=[
|
||||
("#", str(idx)),
|
||||
("Title", str(title or "")),
|
||||
("Duration", str(duration or "")),
|
||||
("Uploader", str(uploader or "")),
|
||||
],
|
||||
}
|
||||
selection_args=(
|
||||
["-url", str(entry_url)] if entry_url else ["-url", str(url), "-item", str(idx)]
|
||||
),
|
||||
media_kind="playlist-item",
|
||||
playlist_index=idx,
|
||||
url=entry_url,
|
||||
target=entry_url,
|
||||
)
|
||||
results_list.append(row)
|
||||
table.add_result(row)
|
||||
|
||||
@@ -1782,14 +1789,11 @@ class Download_File(Cmdlet):
|
||||
desc_parts.append(size_str)
|
||||
format_desc = " | ".join(desc_parts)
|
||||
|
||||
format_dict: Dict[str, Any] = {
|
||||
"table": "download-file",
|
||||
"title": f"Format {format_id}",
|
||||
"url": url,
|
||||
"target": url,
|
||||
"detail": format_desc,
|
||||
"media_kind": "format",
|
||||
"columns": [
|
||||
format_dict = build_table_result_payload(
|
||||
table="download-file",
|
||||
title=f"Format {format_id}",
|
||||
detail=format_desc,
|
||||
columns=[
|
||||
("ID", format_id),
|
||||
("Resolution", resolution or "N/A"),
|
||||
("Ext", ext),
|
||||
@@ -1797,13 +1801,16 @@ class Download_File(Cmdlet):
|
||||
("Video", vcodec),
|
||||
("Audio", acodec),
|
||||
],
|
||||
"full_metadata": {
|
||||
selection_args=["-query", f"format:{selection_format_id}"],
|
||||
url=url,
|
||||
target=url,
|
||||
media_kind="format",
|
||||
full_metadata={
|
||||
"format_id": format_id,
|
||||
"url": url,
|
||||
"item_selector": selection_format_id,
|
||||
},
|
||||
"_selection_args": ["-query", f"format:{selection_format_id}"],
|
||||
}
|
||||
)
|
||||
|
||||
results_list.append(format_dict)
|
||||
table.add_result(format_dict)
|
||||
@@ -2379,18 +2386,18 @@ class Download_File(Cmdlet):
|
||||
if not final_url and url:
|
||||
final_url = str(url)
|
||||
|
||||
return {
|
||||
"path": str(media_path),
|
||||
"hash": hash_value,
|
||||
"title": title,
|
||||
"url": final_url,
|
||||
"tag": tag,
|
||||
"action": "cmdlet:download-file",
|
||||
"is_temp": True,
|
||||
"ytdl_format": getattr(opts, "ytdl_format", None),
|
||||
"store": getattr(opts, "storage_name", None) or getattr(opts, "storage_location", None) or "PATH",
|
||||
"media_kind": "video" if opts.mode == "video" else "audio",
|
||||
}
|
||||
return build_file_result_payload(
|
||||
title=title,
|
||||
path=str(media_path),
|
||||
hash_value=hash_value,
|
||||
url=final_url,
|
||||
tag=tag,
|
||||
store=getattr(opts, "storage_name", None) or getattr(opts, "storage_location", None) or "PATH",
|
||||
action="cmdlet:download-file",
|
||||
is_temp=True,
|
||||
ytdl_format=getattr(opts, "ytdl_format", None),
|
||||
media_kind="video" if opts.mode == "video" else "audio",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def download_streaming_url_as_pipe_objects(
|
||||
@@ -2609,22 +2616,13 @@ class Download_File(Cmdlet):
|
||||
|
||||
return out
|
||||
|
||||
@staticmethod
|
||||
def _normalize_hash_hex(value: Optional[str]) -> Optional[str]:
|
||||
if not value or not isinstance(value, str):
|
||||
return None
|
||||
candidate = value.strip().lower()
|
||||
if len(candidate) == 64 and all(c in "0123456789abcdef" for c in candidate):
|
||||
return candidate
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _extract_hash_from_search_hit(cls, hit: Any) -> Optional[str]:
|
||||
if not isinstance(hit, dict):
|
||||
return None
|
||||
for key in ("hash", "hash_hex", "file_hash", "hydrus_hash"):
|
||||
v = hit.get(key)
|
||||
normalized = cls._normalize_hash_hex(str(v) if v is not None else None)
|
||||
normalized = sh.normalize_hash(str(v) if v is not None else None)
|
||||
if normalized:
|
||||
return normalized
|
||||
return None
|
||||
@@ -2717,10 +2715,10 @@ class Download_File(Cmdlet):
|
||||
|
||||
hashes: List[str] = []
|
||||
for po in pipe_objects:
|
||||
h_val = cls._normalize_hash_hex(str(po.get("hash") or ""))
|
||||
h_val = sh.normalize_hash(str(po.get("hash") or ""))
|
||||
hashes.append(h_val or "")
|
||||
|
||||
king_hash = cls._normalize_hash_hex(source_king_hash) if source_king_hash else None
|
||||
king_hash = sh.normalize_hash(source_king_hash) if source_king_hash else None
|
||||
if not king_hash:
|
||||
king_hash = hashes[0] if hashes and hashes[0] else None
|
||||
if not king_hash:
|
||||
@@ -2774,10 +2772,10 @@ class Download_File(Cmdlet):
|
||||
# Fallback to piped items if no explicit URLs provided
|
||||
piped_items = []
|
||||
if not raw_url:
|
||||
if isinstance(result, list):
|
||||
piped_items = list(result)
|
||||
elif result is not None:
|
||||
piped_items = [result]
|
||||
piped_items = sh.normalize_result_items(
|
||||
result,
|
||||
include_falsey_single=True,
|
||||
)
|
||||
|
||||
# Handle TABLE_AUTO_STAGES routing: if a piped item has _selection_args,
|
||||
# re-invoke download-file with those args instead of processing the PipeObject itself.
|
||||
@@ -2785,68 +2783,18 @@ class Download_File(Cmdlet):
|
||||
selection_runs: List[List[str]] = []
|
||||
residual_items: List[Any] = []
|
||||
|
||||
def _looks_like_url(value: Any) -> bool:
|
||||
try:
|
||||
s_val = str(value or "").strip().lower()
|
||||
except Exception:
|
||||
return False
|
||||
return s_val.startswith(
|
||||
("http://", "https://", "magnet:", "torrent:") + _ALLDEBRID_PREFIXES
|
||||
)
|
||||
|
||||
def _extract_selection_args(item: Any) -> tuple[Optional[List[str]], Optional[str]]:
|
||||
selection_args: Optional[List[str]] = None
|
||||
item_url: Optional[str] = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
selection_args = item.get("_selection_args") or item.get("selection_args")
|
||||
item_url = item.get("url") or item.get("path") or item.get("target")
|
||||
md = item.get("metadata") or item.get("full_metadata")
|
||||
if isinstance(md, dict):
|
||||
selection_args = selection_args or md.get("_selection_args") or md.get("selection_args")
|
||||
item_url = item_url or md.get("url") or md.get("source_url")
|
||||
extra = item.get("extra")
|
||||
if isinstance(extra, dict):
|
||||
selection_args = selection_args or extra.get("_selection_args") or extra.get("selection_args")
|
||||
item_url = item_url or extra.get("url") or extra.get("source_url")
|
||||
else:
|
||||
item_url = getattr(item, "url", None) or getattr(item, "path", None) or getattr(item, "target", None)
|
||||
md = getattr(item, "metadata", None)
|
||||
if isinstance(md, dict):
|
||||
selection_args = md.get("_selection_args") or md.get("selection_args")
|
||||
item_url = item_url or md.get("url") or md.get("source_url")
|
||||
extra = getattr(item, "extra", None)
|
||||
if isinstance(extra, dict):
|
||||
selection_args = selection_args or extra.get("_selection_args") or extra.get("selection_args")
|
||||
item_url = item_url or extra.get("url") or extra.get("source_url")
|
||||
|
||||
if isinstance(selection_args, (list, tuple)):
|
||||
normalized_args = [str(arg) for arg in selection_args if arg is not None]
|
||||
elif selection_args is not None:
|
||||
normalized_args = [str(selection_args)]
|
||||
else:
|
||||
normalized_args = None
|
||||
|
||||
if item_url and not _looks_like_url(item_url):
|
||||
item_url = None
|
||||
|
||||
return normalized_args, item_url
|
||||
|
||||
def _selection_args_have_url(args_list: Sequence[str]) -> bool:
|
||||
for idx, arg in enumerate(args_list):
|
||||
low = str(arg or "").strip().lower()
|
||||
if low in {"-url", "--url"}:
|
||||
return True
|
||||
if _looks_like_url(arg):
|
||||
return True
|
||||
return False
|
||||
|
||||
for item in piped_items:
|
||||
handled = False
|
||||
try:
|
||||
normalized_args, item_url = _extract_selection_args(item)
|
||||
normalized_args, _normalized_action, item_url = extract_selection_fields(
|
||||
item,
|
||||
extra_url_prefixes=_ALLDEBRID_PREFIXES,
|
||||
)
|
||||
if normalized_args:
|
||||
if _selection_args_have_url(normalized_args):
|
||||
if selection_args_have_url(
|
||||
normalized_args,
|
||||
extra_url_prefixes=_ALLDEBRID_PREFIXES,
|
||||
):
|
||||
selection_runs.append(list(normalized_args))
|
||||
handled = True
|
||||
elif item_url:
|
||||
@@ -2860,25 +2808,11 @@ class Download_File(Cmdlet):
|
||||
if selection_runs:
|
||||
selection_urls: List[str] = []
|
||||
|
||||
def _extract_urls_from_args(args_list: Sequence[str]) -> List[str]:
|
||||
urls: List[str] = []
|
||||
idx = 0
|
||||
while idx < len(args_list):
|
||||
token = str(args_list[idx] or "")
|
||||
low = token.strip().lower()
|
||||
if low in {"-url", "--url"} and idx + 1 < len(args_list):
|
||||
candidate = str(args_list[idx + 1] or "").strip()
|
||||
if _looks_like_url(candidate):
|
||||
urls.append(candidate)
|
||||
idx += 2
|
||||
continue
|
||||
if _looks_like_url(token):
|
||||
urls.append(token.strip())
|
||||
idx += 1
|
||||
return urls
|
||||
|
||||
for run_args in selection_runs:
|
||||
for u in _extract_urls_from_args(run_args):
|
||||
for u in extract_urls_from_selection_args(
|
||||
run_args,
|
||||
extra_url_prefixes=_ALLDEBRID_PREFIXES,
|
||||
):
|
||||
if u not in selection_urls:
|
||||
selection_urls.append(u)
|
||||
|
||||
|
||||
@@ -17,10 +17,11 @@ from urllib.request import pathname2url
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from . import _shared as sh
|
||||
from SYS.item_accessors import get_result_title
|
||||
from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from SYS.config import resolve_output_dir
|
||||
from API.HTTP import _download_direct_file
|
||||
from SYS.payload_builders import build_file_result_payload
|
||||
|
||||
|
||||
class Get_File(sh.Cmdlet):
|
||||
@@ -56,9 +57,11 @@ class Get_File(sh.Cmdlet):
|
||||
parsed = sh.parse_cmdlet_args(args, self)
|
||||
debug(f"[get-file] parsed args: {parsed}")
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("Error: -query must be of the form hash:<sha256>")
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"Error: -query must be of the form hash:<sha256>",
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Extract hash and store from result or args
|
||||
@@ -87,21 +90,14 @@ class Get_File(sh.Cmdlet):
|
||||
|
||||
debug(f"[get-file] Getting storage backend: {store_name}")
|
||||
|
||||
# Prefer instantiating only the named backend to avoid initializing all configured backends
|
||||
try:
|
||||
from Store.registry import get_backend_instance
|
||||
backend = get_backend_instance(config, store_name, suppress_debug=True)
|
||||
except Exception:
|
||||
backend = None
|
||||
|
||||
backend, _store_registry, _exc = sh.get_preferred_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
# Fallback to full registry when targeted instantiation fails
|
||||
try:
|
||||
store = Store(config)
|
||||
backend = store[store_name]
|
||||
except Exception:
|
||||
log(f"Error: Storage backend '{store_name}' not found", file=sys.stderr)
|
||||
return 1
|
||||
log(f"Error: Storage backend '{store_name}' not found", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
debug(f"[get-file] Backend retrieved: {type(backend).__name__}")
|
||||
|
||||
@@ -117,18 +113,8 @@ class Get_File(sh.Cmdlet):
|
||||
|
||||
def resolve_display_title() -> str:
|
||||
candidates = [
|
||||
sh.get_field(result,
|
||||
"title"),
|
||||
sh.get_field(result,
|
||||
"name"),
|
||||
sh.get_field(result,
|
||||
"filename"),
|
||||
(metadata.get("title") if isinstance(metadata,
|
||||
dict) else None),
|
||||
(metadata.get("name") if isinstance(metadata,
|
||||
dict) else None),
|
||||
(metadata.get("filename") if isinstance(metadata,
|
||||
dict) else None),
|
||||
get_result_title(result, "title", "name", "filename"),
|
||||
get_result_title(metadata, "title", "name", "filename"),
|
||||
]
|
||||
for candidate in candidates:
|
||||
if candidate is None:
|
||||
@@ -166,12 +152,12 @@ class Get_File(sh.Cmdlet):
|
||||
debug(f"Opened in browser: {download_url}", file=sys.stderr)
|
||||
|
||||
ctx.emit(
|
||||
{
|
||||
"hash": file_hash,
|
||||
"store": store_name,
|
||||
"url": download_url,
|
||||
"title": resolve_display_title() or "Opened",
|
||||
}
|
||||
build_file_result_payload(
|
||||
title=resolve_display_title() or "Opened",
|
||||
hash_value=file_hash,
|
||||
store=store_name,
|
||||
url=download_url,
|
||||
)
|
||||
)
|
||||
return 0
|
||||
|
||||
@@ -227,12 +213,12 @@ class Get_File(sh.Cmdlet):
|
||||
|
||||
# Emit result for pipeline
|
||||
ctx.emit(
|
||||
{
|
||||
"hash": file_hash,
|
||||
"store": store_name,
|
||||
"path": str(dest_path),
|
||||
"title": filename,
|
||||
}
|
||||
build_file_result_payload(
|
||||
title=filename,
|
||||
hash_value=file_hash,
|
||||
store=store_name,
|
||||
path=str(dest_path),
|
||||
)
|
||||
)
|
||||
|
||||
debug("[get-file] Completed successfully")
|
||||
|
||||
@@ -4,7 +4,9 @@ from typing import Any, Dict, Sequence, Optional
|
||||
import json
|
||||
import sys
|
||||
|
||||
from SYS.item_accessors import get_extension_field, get_int_field
|
||||
from SYS.logger import log
|
||||
from SYS.payload_builders import build_file_result_payload
|
||||
|
||||
from . import _shared as sh
|
||||
|
||||
@@ -15,6 +17,7 @@ parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
get_field = sh.get_field
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.result_table import Table
|
||||
from SYS.result_table_helpers import add_row_columns
|
||||
|
||||
|
||||
class Get_Metadata(Cmdlet):
|
||||
@@ -176,22 +179,28 @@ class Get_Metadata(Cmdlet):
|
||||
store or ""),
|
||||
]
|
||||
|
||||
return {
|
||||
"title": title or path,
|
||||
"path": path,
|
||||
"store": store,
|
||||
"mime": mime,
|
||||
"ext": ext or "",
|
||||
"size_bytes": size_int,
|
||||
"duration_seconds": dur_int,
|
||||
"pages": pages_int,
|
||||
"imported_ts": imported_ts,
|
||||
"imported": imported_label,
|
||||
"hash": hash_value,
|
||||
"url": url,
|
||||
"tag": tag or [],
|
||||
"columns": columns,
|
||||
}
|
||||
payload = build_file_result_payload(
|
||||
title=title,
|
||||
fallback_title=path,
|
||||
path=path,
|
||||
url=url,
|
||||
hash_value=hash_value,
|
||||
store=store,
|
||||
tag=tag or [],
|
||||
ext=ext,
|
||||
size_bytes=size_int,
|
||||
columns=columns,
|
||||
)
|
||||
payload.update(
|
||||
{
|
||||
"mime": mime,
|
||||
"duration_seconds": dur_int,
|
||||
"pages": pages_int,
|
||||
"imported_ts": imported_ts,
|
||||
"imported": imported_label,
|
||||
}
|
||||
)
|
||||
return payload
|
||||
|
||||
@staticmethod
|
||||
def _add_table_body_row(table: Table, row: Dict[str, Any]) -> None:
|
||||
@@ -213,16 +222,18 @@ class Get_Metadata(Cmdlet):
|
||||
label, value = col
|
||||
lookup[str(label)] = value
|
||||
|
||||
row_obj = table.add_row()
|
||||
row_obj.add_column("Hash", lookup.get("Hash", ""))
|
||||
row_obj.add_column("MIME", lookup.get("MIME", ""))
|
||||
row_obj.add_column("Size(MB)", lookup.get("Size(MB)", ""))
|
||||
columns_to_add = [
|
||||
("Hash", lookup.get("Hash", "")),
|
||||
("MIME", lookup.get("MIME", "")),
|
||||
("Size(MB)", lookup.get("Size(MB)", "")),
|
||||
]
|
||||
if "Duration(s)" in lookup:
|
||||
row_obj.add_column("Duration(s)", lookup.get("Duration(s)", ""))
|
||||
columns_to_add.append(("Duration(s)", lookup.get("Duration(s)", "")))
|
||||
elif "Pages" in lookup:
|
||||
row_obj.add_column("Pages", lookup.get("Pages", ""))
|
||||
columns_to_add.append(("Pages", lookup.get("Pages", "")))
|
||||
else:
|
||||
row_obj.add_column("Duration(s)", "")
|
||||
columns_to_add.append(("Duration(s)", ""))
|
||||
add_row_columns(table, columns_to_add)
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Execute get-metadata cmdlet - retrieve and display file metadata.
|
||||
@@ -247,9 +258,12 @@ class Get_Metadata(Cmdlet):
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log('No hash available - use -query "hash:<sha256>"', file=sys.stderr)
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
'No hash available - use -query "hash:<sha256>"',
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Get hash and store from parsed args or result
|
||||
@@ -266,21 +280,14 @@ class Get_Metadata(Cmdlet):
|
||||
|
||||
# Use storage backend to get metadata
|
||||
try:
|
||||
# Instantiate only the required backend when possible to avoid initializing all configured backends
|
||||
try:
|
||||
from Store.registry import get_backend_instance
|
||||
backend = get_backend_instance(config, storage_source, suppress_debug=True)
|
||||
except Exception:
|
||||
backend = None
|
||||
|
||||
backend, _store_registry, _exc = sh.get_preferred_store_backend(
|
||||
config,
|
||||
storage_source,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
try:
|
||||
from Store import Store
|
||||
storage = Store(config)
|
||||
backend = storage[storage_source]
|
||||
except Exception:
|
||||
log(f"Storage backend '{storage_source}' not found", file=sys.stderr)
|
||||
return 1
|
||||
log(f"Storage backend '{storage_source}' not found", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get metadata from backend
|
||||
metadata = backend.get_metadata(file_hash)
|
||||
@@ -330,8 +337,8 @@ class Get_Metadata(Cmdlet):
|
||||
|
||||
# Extract metadata fields
|
||||
mime_type = metadata.get("mime") or metadata.get("ext", "")
|
||||
file_ext = metadata.get("ext", "") # Extract file extension separately
|
||||
file_size = metadata.get("size")
|
||||
file_ext = get_extension_field(metadata, "ext", "extension")
|
||||
file_size = get_int_field(metadata, "size", "size_bytes")
|
||||
duration_seconds = metadata.get("duration")
|
||||
if duration_seconds is None:
|
||||
duration_seconds = metadata.get("duration_seconds")
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
from typing import Any, Dict, List, Sequence
|
||||
import sys
|
||||
|
||||
from SYS.logger import log
|
||||
from SYS.detail_view_helpers import create_detail_view, prepare_detail_metadata
|
||||
from SYS.payload_builders import build_table_result_payload
|
||||
from SYS.result_publication import publish_result_table
|
||||
from SYS.result_table_helpers import add_row_columns
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from . import _shared as sh
|
||||
@@ -16,8 +19,6 @@ normalize_hash = sh.normalize_hash
|
||||
parse_cmdlet_args = sh.parse_cmdlet_args
|
||||
normalize_result_input = sh.normalize_result_input
|
||||
should_show_help = sh.should_show_help
|
||||
from Store import Store
|
||||
from SYS.utils import sha256_file
|
||||
|
||||
|
||||
class Get_Note(Cmdlet):
|
||||
@@ -45,14 +46,6 @@ class Get_Note(Cmdlet):
|
||||
pass
|
||||
self.register()
|
||||
|
||||
def _resolve_hash(
|
||||
self,
|
||||
raw_hash: Optional[str],
|
||||
raw_path: Optional[str],
|
||||
override_hash: Optional[str],
|
||||
) -> Optional[str]:
|
||||
return sh.resolve_hash_for_cmdlet(raw_hash, raw_path, override_hash)
|
||||
|
||||
def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if should_show_help(args):
|
||||
log(f"Cmdlet: {self.name}\nSummary: {self.summary}\nUsage: {self.usage}")
|
||||
@@ -60,12 +53,12 @@ class Get_Note(Cmdlet):
|
||||
|
||||
parsed = parse_cmdlet_args(args, self)
|
||||
store_override = parsed.get("store")
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log(
|
||||
"[get_note] Error: -query must be of the form hash:<sha256>",
|
||||
file=sys.stderr
|
||||
)
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"[get_note] Error: -query must be of the form hash:<sha256>",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
results = normalize_result_input(result)
|
||||
@@ -82,31 +75,32 @@ class Get_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
store_registry = Store(config)
|
||||
store_registry = None
|
||||
any_notes = False
|
||||
display_items: List[Dict[str, Any]] = []
|
||||
|
||||
# We assume single subject for get-note detail view
|
||||
main_res = results[0]
|
||||
|
||||
from SYS.result_table import ItemDetailView, extract_item_metadata
|
||||
metadata = extract_item_metadata(main_res)
|
||||
metadata = prepare_detail_metadata(main_res)
|
||||
|
||||
note_table = (
|
||||
ItemDetailView("Notes", item_metadata=metadata)
|
||||
.set_table("note")
|
||||
.set_value_case("preserve")
|
||||
._perseverance(True)
|
||||
note_table = create_detail_view(
|
||||
"Notes",
|
||||
metadata,
|
||||
table_name="note",
|
||||
source_command=("get-note", []),
|
||||
)
|
||||
note_table.set_source_command("get-note", [])
|
||||
|
||||
for res in results:
|
||||
if not isinstance(res, dict):
|
||||
continue
|
||||
|
||||
store_name = str(store_override or res.get("store") or "").strip()
|
||||
raw_hash = res.get("hash")
|
||||
raw_path = res.get("path")
|
||||
store_name, resolved_hash = sh.resolve_item_store_hash(
|
||||
res,
|
||||
override_store=str(store_override) if store_override else None,
|
||||
override_hash=str(query_hash) if query_hash else None,
|
||||
path_fields=("path",),
|
||||
)
|
||||
|
||||
if not store_name:
|
||||
log(
|
||||
@@ -115,11 +109,6 @@ class Get_Note(Cmdlet):
|
||||
)
|
||||
return 1
|
||||
|
||||
resolved_hash = self._resolve_hash(
|
||||
raw_hash=str(raw_hash) if raw_hash else None,
|
||||
raw_path=str(raw_path) if raw_path else None,
|
||||
override_hash=str(query_hash) if query_hash else None,
|
||||
)
|
||||
if not resolved_hash:
|
||||
continue
|
||||
|
||||
@@ -129,9 +118,12 @@ class Get_Note(Cmdlet):
|
||||
if store_name and not metadata.get("Store"):
|
||||
metadata["Store"] = store_name
|
||||
|
||||
try:
|
||||
backend = store_registry[store_name]
|
||||
except Exception as exc:
|
||||
backend, store_registry, exc = sh.get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
store_registry=store_registry,
|
||||
)
|
||||
if backend is None:
|
||||
log(
|
||||
f"[get_note] Error: Unknown store '{store_name}': {exc}",
|
||||
file=sys.stderr
|
||||
@@ -158,28 +150,27 @@ class Get_Note(Cmdlet):
|
||||
# Keep payload small for IPC/pipes.
|
||||
raw_text = raw_text[:999]
|
||||
preview = " ".join(raw_text.replace("\r", "").split("\n"))
|
||||
payload: Dict[str, Any] = {
|
||||
"store": store_name,
|
||||
"hash": resolved_hash,
|
||||
"note_name": str(k),
|
||||
"note_text": raw_text,
|
||||
"columns": [
|
||||
("Name",
|
||||
str(k)),
|
||||
("Text",
|
||||
preview.strip()),
|
||||
payload = build_table_result_payload(
|
||||
columns=[
|
||||
("Name", str(k)),
|
||||
("Text", preview.strip()),
|
||||
],
|
||||
}
|
||||
store=store_name,
|
||||
hash=resolved_hash,
|
||||
note_name=str(k),
|
||||
note_text=raw_text,
|
||||
)
|
||||
display_items.append(payload)
|
||||
if note_table is not None:
|
||||
row = note_table.add_row()
|
||||
row.add_column("Name", str(k))
|
||||
row.add_column("Text", preview.strip())
|
||||
add_row_columns(
|
||||
note_table,
|
||||
[("Name", str(k)), ("Text", preview.strip())],
|
||||
)
|
||||
|
||||
ctx.emit(payload)
|
||||
|
||||
# Always set the table overlay even if empty to show item details
|
||||
ctx.set_last_result_table_overlay(note_table, display_items, subject=result)
|
||||
publish_result_table(ctx, note_table, display_items, subject=result, overlay=True)
|
||||
|
||||
if not any_notes:
|
||||
log("No notes found.")
|
||||
|
||||
@@ -3,7 +3,11 @@ from __future__ import annotations
|
||||
from typing import Any, Dict, Sequence, Optional
|
||||
import sys
|
||||
|
||||
from SYS.detail_view_helpers import create_detail_view, prepare_detail_metadata
|
||||
from SYS.logger import log
|
||||
from SYS.result_table_helpers import add_row_columns
|
||||
from SYS.selection_builder import build_hash_store_selection
|
||||
from SYS.result_publication import publish_result_table
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
@@ -59,11 +63,12 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
continue
|
||||
i += 1
|
||||
|
||||
override_hash: str | None = (
|
||||
sh.parse_single_hash_query(override_query) if override_query else None
|
||||
override_hash, query_valid = sh.require_single_hash_query(
|
||||
override_query,
|
||||
'get-relationship requires -query "hash:<sha256>"',
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if override_query and not override_hash:
|
||||
log('get-relationship requires -query "hash:<sha256>"', file=sys.stderr)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Handle @N selection which creates a list
|
||||
@@ -326,21 +331,19 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log(f"Hydrus relationships fetch failed: {exc}", file=sys.stderr)
|
||||
|
||||
# Display results
|
||||
from SYS.result_table import ItemDetailView, extract_item_metadata
|
||||
|
||||
# Prepare metadata for the detail view
|
||||
metadata = extract_item_metadata(result)
|
||||
|
||||
if hash_hex:
|
||||
metadata["Hash"] = hash_hex
|
||||
|
||||
# Overlays
|
||||
if source_title and source_title != "Unknown":
|
||||
metadata["Title"] = source_title
|
||||
metadata = prepare_detail_metadata(
|
||||
result,
|
||||
title=(source_title if source_title and source_title != "Unknown" else None),
|
||||
hash_value=hash_hex,
|
||||
)
|
||||
|
||||
table = ItemDetailView("Relationships", item_metadata=metadata
|
||||
).init_command("get-relationship",
|
||||
[])
|
||||
table = create_detail_view(
|
||||
"Relationships",
|
||||
metadata,
|
||||
init_command=("get-relationship", []),
|
||||
value_case=None,
|
||||
perseverance=False,
|
||||
)
|
||||
|
||||
# Sort by type then title
|
||||
# Custom sort order: King first, then Derivative, then others
|
||||
@@ -364,11 +367,14 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pipeline_results = []
|
||||
|
||||
for i, item in enumerate(found_relationships):
|
||||
row = table.add_row()
|
||||
row.add_column("Type", item["type"].title())
|
||||
row.add_column("Title", item["title"])
|
||||
# row.add_column("Hash", item['hash'][:16] + "...") # User requested removal
|
||||
row.add_column("Store", item["store"])
|
||||
add_row_columns(
|
||||
table,
|
||||
[
|
||||
("Type", item["type"].title()),
|
||||
("Title", item["title"]),
|
||||
("Store", item["store"]),
|
||||
],
|
||||
)
|
||||
|
||||
# Create result object for pipeline
|
||||
res_obj = {
|
||||
@@ -384,16 +390,15 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pipeline_results.append(res_obj)
|
||||
|
||||
# Set selection args
|
||||
table.set_row_selection_args(
|
||||
i,
|
||||
["-store",
|
||||
str(item["store"]),
|
||||
"-query",
|
||||
f"hash:{item['hash']}"]
|
||||
selection_args, _selection_action = build_hash_store_selection(
|
||||
item["hash"],
|
||||
item["store"],
|
||||
)
|
||||
if selection_args:
|
||||
table.set_row_selection_args(i, selection_args)
|
||||
|
||||
# Ensure empty state is still navigable/visible
|
||||
ctx.set_last_result_table_overlay(table, pipeline_results)
|
||||
publish_result_table(ctx, table, pipeline_results, overlay=True)
|
||||
from SYS.rich_display import stdout_console
|
||||
|
||||
stdout_console().print(table)
|
||||
|
||||
@@ -26,6 +26,10 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.pipeline_progress import PipelineProgress
|
||||
from SYS.detail_view_helpers import create_detail_view, prepare_detail_metadata
|
||||
from SYS.payload_builders import extract_title_tag_value
|
||||
from SYS.result_publication import publish_result_table
|
||||
from SYS.result_table_helpers import add_row_columns
|
||||
from . import _shared as sh
|
||||
from SYS.field_access import get_field
|
||||
|
||||
@@ -259,36 +263,24 @@ def _emit_tags_as_table(
|
||||
subject: Full context object (should preserve original metadata)
|
||||
quiet: If True, don't display (emit-only mode)
|
||||
"""
|
||||
from SYS.result_table import ItemDetailView, extract_item_metadata
|
||||
|
||||
# Prepare metadata for the detail view, extracting all fields from subject first
|
||||
metadata = extract_item_metadata(subject) or {}
|
||||
|
||||
# Preserve all additional fields from subject dict if it's a dict-like object
|
||||
if isinstance(subject, dict):
|
||||
for key, value in subject.items():
|
||||
# Skip internal/control fields
|
||||
if not key.startswith("_") and key not in {"selection_action", "selection_args"}:
|
||||
# Convert keys to readable labels (snake_case -> Title Case)
|
||||
label = str(key).replace("_", " ").title()
|
||||
# Only add if not already present from extract_item_metadata
|
||||
if label not in metadata and value is not None:
|
||||
metadata[label] = value
|
||||
|
||||
# Apply explicit parameter overrides (these take priority)
|
||||
if item_title:
|
||||
metadata["Title"] = item_title
|
||||
if file_hash:
|
||||
metadata["Hash"] = file_hash
|
||||
if store:
|
||||
metadata["Store"] = service_name if service_name else store
|
||||
if path:
|
||||
metadata["Path"] = path
|
||||
metadata = prepare_detail_metadata(
|
||||
subject,
|
||||
include_subject_fields=True,
|
||||
title=item_title,
|
||||
hash_value=file_hash,
|
||||
store=(service_name if service_name else store),
|
||||
path=path,
|
||||
)
|
||||
|
||||
# Create ItemDetailView with exclude_tags=True so the panel shows file info
|
||||
# but doesn't duplicate the tag list that we show as a table below.
|
||||
table = ItemDetailView("Tags", item_metadata=metadata, max_columns=1, exclude_tags=True)
|
||||
table.set_source_command("get-tag", [])
|
||||
table = create_detail_view(
|
||||
"Tags",
|
||||
metadata,
|
||||
max_columns=1,
|
||||
exclude_tags=True,
|
||||
source_command=("get-tag", []),
|
||||
)
|
||||
|
||||
# Create TagItem for each tag and add to table
|
||||
tag_items = []
|
||||
@@ -383,12 +375,7 @@ def _extract_title_from(tags_list: List[str]) -> Optional[str]:
|
||||
return extract_title(tags_list)
|
||||
except Exception:
|
||||
pass
|
||||
for t in tags_list:
|
||||
if isinstance(t, str) and t.lower().startswith("title:"):
|
||||
val = t.split(":", 1)[1].strip()
|
||||
if val:
|
||||
return val
|
||||
return None
|
||||
return extract_title_tag_value(tags_list)
|
||||
|
||||
|
||||
def _rename_file_if_title_tag(media: Optional[Path], tags_added: List[str]) -> bool:
|
||||
@@ -1002,9 +989,12 @@ def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Extract values
|
||||
query_raw = parsed_args.get("query")
|
||||
hash_override = sh.parse_single_hash_query(query_raw)
|
||||
if query_raw and not hash_override:
|
||||
log("Invalid -query value (expected hash:<sha256>)", file=sys.stderr)
|
||||
hash_override, query_valid = sh.require_single_hash_query(
|
||||
query_raw,
|
||||
"Invalid -query value (expected hash:<sha256>)",
|
||||
log_file=sys.stderr,
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
store_key = parsed_args.get("store")
|
||||
emit_requested = parsed_args.get("emit", False)
|
||||
@@ -1023,25 +1013,16 @@ def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
display_subject = None
|
||||
|
||||
def _value_has_content(value: Any) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
if isinstance(value, str):
|
||||
return bool(value.strip())
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return len(value) > 0
|
||||
return True
|
||||
|
||||
def _resolve_subject_value(*keys: str) -> Any:
|
||||
for key in keys:
|
||||
val = get_field(result, key, None)
|
||||
if _value_has_content(val):
|
||||
if sh.value_has_content(val):
|
||||
return val
|
||||
if display_subject is None:
|
||||
return None
|
||||
for key in keys:
|
||||
val = get_field(display_subject, key, None)
|
||||
if _value_has_content(val):
|
||||
if sh.value_has_content(val):
|
||||
return val
|
||||
return None
|
||||
|
||||
@@ -1422,11 +1403,15 @@ def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
)
|
||||
for idx, item in enumerate(items):
|
||||
tags = _filter_scraped_tags(provider.to_tags(item))
|
||||
row = table.add_row()
|
||||
row.add_column("Title", item.get("title", ""))
|
||||
row.add_column("Artist", item.get("artist", ""))
|
||||
row.add_column("Album", item.get("album", ""))
|
||||
row.add_column("Year", item.get("year", ""))
|
||||
add_row_columns(
|
||||
table,
|
||||
[
|
||||
("Title", item.get("title", "")),
|
||||
("Artist", item.get("artist", "")),
|
||||
("Album", item.get("album", "")),
|
||||
("Year", item.get("year", "")),
|
||||
],
|
||||
)
|
||||
payload = {
|
||||
"tag": tags,
|
||||
"provider": provider.name,
|
||||
@@ -1447,7 +1432,7 @@ def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Store an overlay so that a subsequent `@N` selects from THIS metadata table,
|
||||
# not from the previous searchable table.
|
||||
ctx.set_last_result_table_overlay(table, selection_payload)
|
||||
publish_result_table(ctx, table, selection_payload, overlay=True)
|
||||
ctx.set_current_stage_table(table)
|
||||
return 0
|
||||
|
||||
|
||||
@@ -16,7 +16,11 @@ from ._shared import (
|
||||
normalize_hash,
|
||||
)
|
||||
from . import _shared as sh
|
||||
from SYS.detail_view_helpers import create_detail_view, prepare_detail_metadata
|
||||
from SYS.item_accessors import get_extension_field, get_int_field, get_result_title
|
||||
from SYS.logger import log
|
||||
from SYS.payload_builders import build_file_result_payload
|
||||
from SYS.result_publication import publish_result_table
|
||||
from SYS.result_table import Table
|
||||
from Store import Store
|
||||
from SYS import pipeline as ctx
|
||||
@@ -221,52 +225,15 @@ class Get_Url(Cmdlet):
|
||||
|
||||
@staticmethod
|
||||
def _extract_title_from_result(result: Any) -> Optional[str]:
|
||||
# Prefer explicit title field.
|
||||
# Fall back to ResultTable-style columns list.
|
||||
cols = None
|
||||
if isinstance(result, dict):
|
||||
cols = result.get("columns")
|
||||
else:
|
||||
cols = getattr(result, "columns", None)
|
||||
if isinstance(cols, list):
|
||||
for pair in cols:
|
||||
try:
|
||||
if isinstance(pair, (list, tuple)) and len(pair) == 2:
|
||||
k, v = pair
|
||||
if str(k or "").strip().lower() in {"title", "name"}:
|
||||
if isinstance(v, str) and v.strip():
|
||||
return v.strip()
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
return get_result_title(result, "title", "name", "filename")
|
||||
|
||||
@staticmethod
|
||||
def _extract_size_from_hit(hit: Any) -> int | None:
|
||||
for key in ("size", "file_size", "filesize", "size_bytes"):
|
||||
try:
|
||||
val = get_field(hit, key)
|
||||
except Exception:
|
||||
val = None
|
||||
if val is None:
|
||||
continue
|
||||
if isinstance(val, (int, float)):
|
||||
return int(val)
|
||||
try:
|
||||
return int(val)
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
return get_int_field(hit, "size", "file_size", "filesize", "size_bytes")
|
||||
|
||||
@staticmethod
|
||||
def _extract_ext_from_hit(hit: Any) -> str:
|
||||
for key in ("ext", "extension"):
|
||||
try:
|
||||
ext_val = get_field(hit, key)
|
||||
except Exception:
|
||||
ext_val = None
|
||||
if isinstance(ext_val, str) and ext_val.strip():
|
||||
return ext_val.strip().lstrip(".")
|
||||
return ""
|
||||
return get_extension_field(hit, "ext", "extension")
|
||||
|
||||
def _search_urls_across_stores(self,
|
||||
pattern: str,
|
||||
@@ -488,27 +455,25 @@ class Get_Url(Cmdlet):
|
||||
table.set_source_command("get-url", ["-url", search_pattern])
|
||||
|
||||
for item in items:
|
||||
payload: Dict[str, Any] = {
|
||||
# Keep fields for downstream cmdlets.
|
||||
"hash": item.hash,
|
||||
"store": item.store,
|
||||
"url": item.url,
|
||||
"title": item.title,
|
||||
"size": item.size,
|
||||
"ext": item.ext,
|
||||
# Force the visible table columns + ordering.
|
||||
"columns": [
|
||||
payload = build_file_result_payload(
|
||||
title=item.title,
|
||||
hash_value=item.hash,
|
||||
store=item.store,
|
||||
url=item.url,
|
||||
ext=item.ext,
|
||||
columns=[
|
||||
("Title", item.title),
|
||||
("Url", item.url),
|
||||
("Size", item.size),
|
||||
("Ext", item.ext),
|
||||
("Store", item.store),
|
||||
],
|
||||
}
|
||||
size=item.size,
|
||||
)
|
||||
display_items.append(payload)
|
||||
table.add_result(payload)
|
||||
|
||||
ctx.set_last_result_table(table if display_items else None, display_items, subject=result)
|
||||
publish_result_table(ctx, table if display_items else None, display_items, subject=result)
|
||||
|
||||
# Emit after table state is finalized to prevent side effects in TUI rendering
|
||||
for d in display_items:
|
||||
@@ -520,9 +485,11 @@ class Get_Url(Cmdlet):
|
||||
return 0
|
||||
|
||||
# Original mode: Get URLs for a specific file by hash+store
|
||||
query_hash = sh.parse_single_hash_query(parsed.get("query"))
|
||||
if parsed.get("query") and not query_hash:
|
||||
log("Error: -query must be of the form hash:<sha256>")
|
||||
query_hash, query_valid = sh.require_single_hash_query(
|
||||
parsed.get("query"),
|
||||
"Error: -query must be of the form hash:<sha256>",
|
||||
)
|
||||
if not query_valid:
|
||||
return 1
|
||||
|
||||
# Extract hash and store from result or args
|
||||
@@ -550,10 +517,9 @@ class Get_Url(Cmdlet):
|
||||
from SYS.metadata import normalize_urls
|
||||
urls = normalize_urls(urls)
|
||||
|
||||
from SYS.result_table import ItemDetailView, extract_item_metadata
|
||||
|
||||
# Prepare metadata for the detail view
|
||||
metadata = extract_item_metadata(result)
|
||||
metadata = prepare_detail_metadata(result)
|
||||
tag_values = None
|
||||
|
||||
# Enrich the metadata with tags if missing
|
||||
if not metadata.get("Tags"):
|
||||
@@ -577,24 +543,24 @@ class Get_Url(Cmdlet):
|
||||
pass
|
||||
|
||||
if row_tags:
|
||||
row_tags = sorted(list(set(row_tags)))
|
||||
metadata["Tags"] = ", ".join(row_tags)
|
||||
tag_values = sorted(list(set(row_tags)))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if file_hash:
|
||||
metadata["Hash"] = file_hash
|
||||
if store_name:
|
||||
metadata["Store"] = store_name
|
||||
|
||||
table = (
|
||||
ItemDetailView(
|
||||
"Urls",
|
||||
item_metadata=metadata,
|
||||
max_columns=1
|
||||
)._perseverance(True).set_table("url").set_value_case("preserve")
|
||||
metadata = prepare_detail_metadata(
|
||||
result,
|
||||
hash_value=file_hash,
|
||||
store=store_name,
|
||||
tags=tag_values,
|
||||
)
|
||||
|
||||
table = create_detail_view(
|
||||
"Urls",
|
||||
metadata,
|
||||
max_columns=1,
|
||||
table_name="url",
|
||||
source_command=("get-url", []),
|
||||
)
|
||||
table.set_source_command("get-url", [])
|
||||
|
||||
items: List[UrlItem] = []
|
||||
for u in list(urls or []):
|
||||
@@ -609,7 +575,7 @@ class Get_Url(Cmdlet):
|
||||
# Use overlay mode to avoid "merging" with the previous status/table state.
|
||||
# This is idiomatic for detail views and prevents the search table from being
|
||||
# contaminated by partial re-renders.
|
||||
ctx.set_last_result_table_overlay(table, items, subject=result)
|
||||
publish_result_table(ctx, table, items, subject=result, overlay=True)
|
||||
|
||||
# Emit items at the end for pipeline continuity
|
||||
for item in items:
|
||||
|
||||
@@ -28,6 +28,9 @@ should_show_help = sh.should_show_help
|
||||
|
||||
from SYS import pipeline as ctx
|
||||
|
||||
_CHAPTER_TITLE_SPLIT_RE = _re.compile(r"^(?P<prefix>.+?)\s+-\s+(?P<chapter>.+)$")
|
||||
_FFMPEG_TIME_RE = _re.compile(r"time=(\d{2}):(\d{2}):(\d{2})\.(\d{2})")
|
||||
|
||||
try:
|
||||
from pypdf import PdfWriter, PdfReader
|
||||
|
||||
@@ -611,13 +614,12 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
# "Book Name - Chapter"
|
||||
# If *all* titles share the same "Book Name" prefix, strip it.
|
||||
if len(chapters) >= 2:
|
||||
split_re = _re.compile(r"^(?P<prefix>.+?)\s+-\s+(?P<chapter>.+)$")
|
||||
prefixes: List[str] = []
|
||||
stripped_titles: List[str] = []
|
||||
all_match = True
|
||||
for ch in chapters:
|
||||
raw_title = str(ch.get("title") or "").strip()
|
||||
m = split_re.match(raw_title)
|
||||
m = _CHAPTER_TITLE_SPLIT_RE.match(raw_title)
|
||||
if not m:
|
||||
all_match = False
|
||||
break
|
||||
@@ -721,7 +723,6 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
)
|
||||
|
||||
# Monitor progress
|
||||
duration_re = re.compile(r"time=(\d{2}):(\d{2}):(\d{2})\.(\d{2})")
|
||||
total_duration_sec = current_time_ms / 1000.0
|
||||
|
||||
while True:
|
||||
@@ -733,7 +734,7 @@ def _merge_audio(files: List[Path], output: Path, output_format: str) -> bool:
|
||||
|
||||
if line:
|
||||
# Parse time=HH:MM:SS.mm
|
||||
match = duration_re.search(line)
|
||||
match = _FFMPEG_TIME_RE.search(line)
|
||||
if match and total_duration_sec > 0:
|
||||
h, m, s, cs = map(int, match.groups())
|
||||
current_sec = h * 3600 + m * 60 + s + cs / 100.0
|
||||
|
||||
@@ -18,6 +18,7 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
from urllib.parse import urlsplit, quote, urljoin, unquote
|
||||
|
||||
from SYS.logger import log, debug, is_debug_enabled
|
||||
from SYS.item_accessors import extract_item_tags, get_result_title
|
||||
from API.HTTP import HTTPClient
|
||||
from SYS.pipeline_progress import PipelineProgress
|
||||
from SYS.utils import ensure_directory, unique_path, unique_preserve_order
|
||||
@@ -1005,26 +1006,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# ========================================================================
|
||||
|
||||
def _extract_item_tags(item: Any) -> List[str]:
|
||||
if item is None:
|
||||
return []
|
||||
raw = get_field(item, "tag")
|
||||
if isinstance(raw, list):
|
||||
return [str(t) for t in raw if t is not None and str(t).strip()]
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
return [raw.strip()]
|
||||
return []
|
||||
return extract_item_tags(item)
|
||||
|
||||
def _extract_item_title(item: Any) -> str:
|
||||
if item is None:
|
||||
return ""
|
||||
for key in ("title", "name", "filename"):
|
||||
val = get_field(item, key)
|
||||
if val is None:
|
||||
continue
|
||||
text = str(val).strip()
|
||||
if text:
|
||||
return text
|
||||
return ""
|
||||
return get_result_title(item, "title", "name", "filename") or ""
|
||||
|
||||
def _clean_title(text: str) -> str:
|
||||
value = (text or "").strip()
|
||||
|
||||
@@ -14,6 +14,7 @@ import time
|
||||
from urllib.parse import urlparse, parse_qs, unquote, urljoin
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.payload_builders import build_file_result_payload, normalize_file_extension
|
||||
from ProviderCore.registry import get_search_provider, list_search_providers
|
||||
from SYS.rich_display import (
|
||||
show_provider_config_panel,
|
||||
@@ -21,12 +22,16 @@ from SYS.rich_display import (
|
||||
show_available_providers_panel,
|
||||
)
|
||||
from SYS.database import insert_worker, update_worker, append_worker_stdout
|
||||
from SYS.item_accessors import get_extension_field, get_int_field, get_result_title
|
||||
from SYS.selection_builder import build_default_selection
|
||||
from SYS.result_publication import publish_result_table
|
||||
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
SharedArgs,
|
||||
get_field,
|
||||
get_preferred_store_backend,
|
||||
should_show_help,
|
||||
normalize_hash,
|
||||
first_title_tag,
|
||||
@@ -34,6 +39,35 @@ from ._shared import (
|
||||
)
|
||||
from SYS import pipeline as ctx
|
||||
|
||||
_WHITESPACE_RE = re.compile(r"\s+")
|
||||
_SITE_TOKEN_RE = re.compile(r"(?:^|\s)site:([^\s,]+)", flags=re.IGNORECASE)
|
||||
_FILETYPE_TOKEN_RE = re.compile(
|
||||
r"(?:^|\s)(?:ext|filetype|type):\.?([a-z0-9]{1,12})\b",
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
_SITE_REMOVE_RE = re.compile(r"(?:^|\s)site:[^\s,]+", flags=re.IGNORECASE)
|
||||
_FILETYPE_REMOVE_RE = re.compile(
|
||||
r"(?:^|\s)(?:ext|filetype|type):\.?[a-z0-9]{1,12}\b",
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
_SCHEME_PREFIX_RE = re.compile(r"^[a-z]+:")
|
||||
_YAHOO_RU_RE = re.compile(r"/RU=([^/]+)/RK=", flags=re.IGNORECASE)
|
||||
_HTML_TAG_RE = re.compile(r"<[^>]+>")
|
||||
_DDG_RESULT_ANCHOR_RE = re.compile(
|
||||
r'<a[^>]+class="[^"]*result__a[^"]*"[^>]+href="([^"]+)"[^>]*>(.*?)</a>',
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
_GENERIC_ANCHOR_RE = re.compile(
|
||||
r'<a[^>]+href=["\']([^"\']+)["\'][^>]*>(.*?)</a>',
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
_BING_RESULT_ANCHOR_RE = re.compile(
|
||||
r'<h2[^>]*>\s*<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>',
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
_STORE_FILTER_RE = re.compile(r"\bstore:([^\s,]+)", flags=re.IGNORECASE)
|
||||
_STORE_FILTER_REMOVE_RE = re.compile(r"\s*[,]?\s*store:[^\s,]+", flags=re.IGNORECASE)
|
||||
|
||||
|
||||
class _WorkerLogger:
|
||||
def __init__(self, worker_id: str) -> None:
|
||||
@@ -230,7 +264,7 @@ class search_file(Cmdlet):
|
||||
|
||||
@staticmethod
|
||||
def _normalize_space(text: Any) -> str:
|
||||
return re.sub(r"\s+", " ", str(text or "")).strip()
|
||||
return _WHITESPACE_RE.sub(" ", str(text or "")).strip()
|
||||
|
||||
@classmethod
|
||||
def _build_web_search_plan(
|
||||
@@ -266,7 +300,7 @@ class search_file(Cmdlet):
|
||||
site_token_to_strip = ""
|
||||
seed_url = ""
|
||||
|
||||
site_match = re.search(r"(?:^|\s)site:([^\s,]+)", text, flags=re.IGNORECASE)
|
||||
site_match = _SITE_TOKEN_RE.search(text)
|
||||
if site_match:
|
||||
site_host = cls._extract_site_host(site_match.group(1))
|
||||
seed_url = str(site_match.group(1) or "").strip()
|
||||
@@ -286,7 +320,7 @@ class search_file(Cmdlet):
|
||||
lower_candidate = candidate.lower()
|
||||
if lower_candidate.startswith(("ext:", "filetype:", "type:", "site:")):
|
||||
continue
|
||||
if re.match(r"^[a-z]+:", lower_candidate) and not lower_candidate.startswith(
|
||||
if _SCHEME_PREFIX_RE.match(lower_candidate) and not lower_candidate.startswith(
|
||||
("http://", "https://")
|
||||
):
|
||||
continue
|
||||
@@ -299,11 +333,7 @@ class search_file(Cmdlet):
|
||||
if not site_host:
|
||||
return None
|
||||
|
||||
filetype_match = re.search(
|
||||
r"(?:^|\s)(?:ext|filetype|type):\.?([a-z0-9]{1,12})\b",
|
||||
text,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
filetype_match = _FILETYPE_TOKEN_RE.search(text)
|
||||
filetype = cls._normalize_extension(filetype_match.group(1)) if filetype_match else ""
|
||||
|
||||
# Feature gate: trigger this web-search mode when filetype is present
|
||||
@@ -313,13 +343,8 @@ class search_file(Cmdlet):
|
||||
return None
|
||||
|
||||
residual = text
|
||||
residual = re.sub(r"(?:^|\s)site:[^\s,]+", " ", residual, flags=re.IGNORECASE)
|
||||
residual = re.sub(
|
||||
r"(?:^|\s)(?:ext|filetype|type):\.?[a-z0-9]{1,12}\b",
|
||||
" ",
|
||||
residual,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
residual = _SITE_REMOVE_RE.sub(" ", residual)
|
||||
residual = _FILETYPE_REMOVE_RE.sub(" ", residual)
|
||||
|
||||
if site_from_positional and positional_args:
|
||||
first = str(positional_args[0] or "").strip()
|
||||
@@ -631,7 +656,7 @@ class search_file(Cmdlet):
|
||||
|
||||
# Yahoo result links often look like:
|
||||
# https://r.search.yahoo.com/.../RU=<url-encoded-target>/RK=...
|
||||
ru_match = re.search(r"/RU=([^/]+)/RK=", raw_href, flags=re.IGNORECASE)
|
||||
ru_match = _YAHOO_RU_RE.search(raw_href)
|
||||
if ru_match:
|
||||
try:
|
||||
return str(unquote(ru_match.group(1))).strip()
|
||||
@@ -664,6 +689,75 @@ class search_file(Cmdlet):
|
||||
return False
|
||||
return host == target or host.endswith(f".{target}")
|
||||
|
||||
@staticmethod
|
||||
def _itertext_join(node: Any) -> str:
|
||||
try:
|
||||
return " ".join([str(text).strip() for text in node.itertext() if str(text).strip()])
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _html_fragment_to_text(fragment: Any) -> str:
|
||||
text = _HTML_TAG_RE.sub(" ", str(fragment or ""))
|
||||
return html.unescape(text)
|
||||
|
||||
@classmethod
|
||||
def _append_web_result(
|
||||
cls,
|
||||
items: List[Dict[str, str]],
|
||||
seen_urls: set[str],
|
||||
*,
|
||||
site_host: str,
|
||||
url_text: str,
|
||||
title_text: str,
|
||||
snippet_text: str,
|
||||
) -> None:
|
||||
url_clean = str(url_text or "").strip()
|
||||
if not url_clean or not url_clean.startswith(("http://", "https://")):
|
||||
return
|
||||
if not cls._url_matches_site(url_clean, site_host):
|
||||
return
|
||||
if url_clean in seen_urls:
|
||||
return
|
||||
|
||||
seen_urls.add(url_clean)
|
||||
items.append(
|
||||
{
|
||||
"url": url_clean,
|
||||
"title": cls._normalize_space(title_text) or url_clean,
|
||||
"snippet": cls._normalize_space(snippet_text),
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _parse_web_results_with_fallback(
|
||||
cls,
|
||||
*,
|
||||
html_text: str,
|
||||
limit: int,
|
||||
lxml_parser: Any,
|
||||
regex_parser: Any,
|
||||
fallback_when_empty: bool = False,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Run an lxml-based parser with an optional regex fallback."""
|
||||
items: List[Dict[str, str]] = []
|
||||
seen_urls: set[str] = set()
|
||||
should_run_regex = False
|
||||
|
||||
try:
|
||||
from lxml import html as lxml_html
|
||||
|
||||
doc = lxml_html.fromstring(html_text or "")
|
||||
lxml_parser(doc, items, seen_urls)
|
||||
should_run_regex = fallback_when_empty and not items
|
||||
except Exception:
|
||||
should_run_regex = True
|
||||
|
||||
if should_run_regex:
|
||||
regex_parser(html_text or "", items, seen_urls)
|
||||
|
||||
return items[:limit]
|
||||
|
||||
@classmethod
|
||||
def _parse_duckduckgo_results(
|
||||
cls,
|
||||
@@ -673,36 +767,7 @@ class search_file(Cmdlet):
|
||||
limit: int,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Parse DuckDuckGo HTML results into normalized rows."""
|
||||
items: List[Dict[str, str]] = []
|
||||
seen_urls: set[str] = set()
|
||||
|
||||
def _add_item(url_text: str, title_text: str, snippet_text: str) -> None:
|
||||
url_clean = str(url_text or "").strip()
|
||||
if not url_clean:
|
||||
return
|
||||
if not url_clean.startswith(("http://", "https://")):
|
||||
return
|
||||
if not cls._url_matches_site(url_clean, site_host):
|
||||
return
|
||||
if url_clean in seen_urls:
|
||||
return
|
||||
|
||||
seen_urls.add(url_clean)
|
||||
title_clean = cls._normalize_space(title_text)
|
||||
snippet_clean = cls._normalize_space(snippet_text)
|
||||
items.append(
|
||||
{
|
||||
"url": url_clean,
|
||||
"title": title_clean or url_clean,
|
||||
"snippet": snippet_clean,
|
||||
}
|
||||
)
|
||||
|
||||
# Preferred parser path (lxml is already a project dependency).
|
||||
try:
|
||||
from lxml import html as lxml_html
|
||||
|
||||
doc = lxml_html.fromstring(html_text or "")
|
||||
def _parse_lxml(doc: Any, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
result_nodes = doc.xpath("//div[contains(@class, 'result')]")
|
||||
|
||||
for node in result_nodes:
|
||||
@@ -712,40 +777,47 @@ class search_file(Cmdlet):
|
||||
|
||||
link = links[0]
|
||||
href = cls._extract_duckduckgo_target_url(link.get("href"))
|
||||
title = " ".join([str(t).strip() for t in link.itertext() if str(t).strip()])
|
||||
title = cls._itertext_join(link)
|
||||
|
||||
snippet_nodes = node.xpath(".//*[contains(@class, 'result__snippet')]")
|
||||
snippet = ""
|
||||
if snippet_nodes:
|
||||
snippet = " ".join(
|
||||
[str(t).strip() for t in snippet_nodes[0].itertext() if str(t).strip()]
|
||||
)
|
||||
snippet = cls._itertext_join(snippet_nodes[0])
|
||||
|
||||
_add_item(href, title, snippet)
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text=snippet,
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
except Exception:
|
||||
# Fallback to regex parser below.
|
||||
pass
|
||||
|
||||
if items:
|
||||
return items[:limit]
|
||||
def _parse_regex(raw_html: str, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
for match in _DDG_RESULT_ANCHOR_RE.finditer(raw_html):
|
||||
href = cls._extract_duckduckgo_target_url(match.group(1))
|
||||
title_html = match.group(2)
|
||||
title = cls._html_fragment_to_text(title_html)
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text="",
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
# Regex fallback for environments where HTML parsing fails.
|
||||
anchor_pattern = re.compile(
|
||||
r'<a[^>]+class="[^"]*result__a[^"]*"[^>]+href="([^"]+)"[^>]*>(.*?)</a>',
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
return cls._parse_web_results_with_fallback(
|
||||
html_text=html_text,
|
||||
limit=limit,
|
||||
lxml_parser=_parse_lxml,
|
||||
regex_parser=_parse_regex,
|
||||
fallback_when_empty=True,
|
||||
)
|
||||
for match in anchor_pattern.finditer(html_text or ""):
|
||||
href = cls._extract_duckduckgo_target_url(match.group(1))
|
||||
title_html = match.group(2)
|
||||
title = re.sub(r"<[^>]+>", " ", str(title_html or ""))
|
||||
title = html.unescape(title)
|
||||
_add_item(href, title, "")
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
return items[:limit]
|
||||
|
||||
@classmethod
|
||||
def _parse_yahoo_results(
|
||||
@@ -756,51 +828,43 @@ class search_file(Cmdlet):
|
||||
limit: int,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Parse Yahoo HTML search results into normalized rows."""
|
||||
items: List[Dict[str, str]] = []
|
||||
seen_urls: set[str] = set()
|
||||
|
||||
def _add_item(url_text: str, title_text: str, snippet_text: str) -> None:
|
||||
url_clean = str(url_text or "").strip()
|
||||
if not url_clean or not url_clean.startswith(("http://", "https://")):
|
||||
return
|
||||
if not cls._url_matches_site(url_clean, site_host):
|
||||
return
|
||||
if url_clean in seen_urls:
|
||||
return
|
||||
seen_urls.add(url_clean)
|
||||
items.append(
|
||||
{
|
||||
"url": url_clean,
|
||||
"title": cls._normalize_space(title_text) or url_clean,
|
||||
"snippet": cls._normalize_space(snippet_text),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
from lxml import html as lxml_html
|
||||
|
||||
doc = lxml_html.fromstring(html_text or "")
|
||||
def _parse_lxml(doc: Any, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
for node in doc.xpath("//a[@href]"):
|
||||
href = cls._extract_yahoo_target_url(node.get("href"))
|
||||
title = " ".join([str(t).strip() for t in node.itertext() if str(t).strip()])
|
||||
_add_item(href, title, "")
|
||||
if len(items) >= limit:
|
||||
break
|
||||
except Exception:
|
||||
anchor_pattern = re.compile(
|
||||
r'<a[^>]+href=["\']([^"\']+)["\'][^>]*>(.*?)</a>',
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
for match in anchor_pattern.finditer(html_text or ""):
|
||||
href = cls._extract_yahoo_target_url(match.group(1))
|
||||
title_html = match.group(2)
|
||||
title = re.sub(r"<[^>]+>", " ", str(title_html or ""))
|
||||
title = html.unescape(title)
|
||||
_add_item(href, title, "")
|
||||
title = cls._itertext_join(node)
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text="",
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
return items[:limit]
|
||||
def _parse_regex(raw_html: str, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
for match in _GENERIC_ANCHOR_RE.finditer(raw_html):
|
||||
href = cls._extract_yahoo_target_url(match.group(1))
|
||||
title_html = match.group(2)
|
||||
title = cls._html_fragment_to_text(title_html)
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text="",
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
return cls._parse_web_results_with_fallback(
|
||||
html_text=html_text,
|
||||
limit=limit,
|
||||
lxml_parser=_parse_lxml,
|
||||
regex_parser=_parse_regex,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _query_yahoo(
|
||||
@@ -881,30 +945,7 @@ class search_file(Cmdlet):
|
||||
limit: int,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Parse Bing HTML search results into normalized rows."""
|
||||
items: List[Dict[str, str]] = []
|
||||
seen_urls: set[str] = set()
|
||||
|
||||
def _add_item(url_text: str, title_text: str, snippet_text: str) -> None:
|
||||
url_clean = str(url_text or "").strip()
|
||||
if not url_clean or not url_clean.startswith(("http://", "https://")):
|
||||
return
|
||||
if not cls._url_matches_site(url_clean, site_host):
|
||||
return
|
||||
if url_clean in seen_urls:
|
||||
return
|
||||
seen_urls.add(url_clean)
|
||||
items.append(
|
||||
{
|
||||
"url": url_clean,
|
||||
"title": cls._normalize_space(title_text) or url_clean,
|
||||
"snippet": cls._normalize_space(snippet_text),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
from lxml import html as lxml_html
|
||||
|
||||
doc = lxml_html.fromstring(html_text or "")
|
||||
def _parse_lxml(doc: Any, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
result_nodes = doc.xpath("//li[contains(@class, 'b_algo')]")
|
||||
|
||||
for node in result_nodes:
|
||||
@@ -913,7 +954,7 @@ class search_file(Cmdlet):
|
||||
continue
|
||||
link = links[0]
|
||||
href = str(link.get("href") or "").strip()
|
||||
title = " ".join([str(t).strip() for t in link.itertext() if str(t).strip()])
|
||||
title = cls._itertext_join(link)
|
||||
|
||||
snippet = ""
|
||||
for sel in (
|
||||
@@ -923,28 +964,41 @@ class search_file(Cmdlet):
|
||||
):
|
||||
snip_nodes = node.xpath(sel)
|
||||
if snip_nodes:
|
||||
snippet = " ".join(
|
||||
[str(t).strip() for t in snip_nodes[0].itertext() if str(t).strip()]
|
||||
)
|
||||
snippet = cls._itertext_join(snip_nodes[0])
|
||||
break
|
||||
|
||||
_add_item(href, title, snippet)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
except Exception:
|
||||
anchor_pattern = re.compile(
|
||||
r"<h2[^>]*>\s*<a[^>]+href=\"([^\"]+)\"[^>]*>(.*?)</a>",
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
for match in anchor_pattern.finditer(html_text or ""):
|
||||
href = match.group(1)
|
||||
title = re.sub(r"<[^>]+>", " ", str(match.group(2) or ""))
|
||||
title = html.unescape(title)
|
||||
_add_item(href, title, "")
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text=snippet,
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
return items[:limit]
|
||||
def _parse_regex(raw_html: str, items: List[Dict[str, str]], seen_urls: set[str]) -> None:
|
||||
for match in _BING_RESULT_ANCHOR_RE.finditer(raw_html):
|
||||
href = match.group(1)
|
||||
title = cls._html_fragment_to_text(match.group(2))
|
||||
cls._append_web_result(
|
||||
items,
|
||||
seen_urls,
|
||||
site_host=site_host,
|
||||
url_text=href,
|
||||
title_text=title,
|
||||
snippet_text="",
|
||||
)
|
||||
if len(items) >= limit:
|
||||
break
|
||||
|
||||
return cls._parse_web_results_with_fallback(
|
||||
html_text=html_text,
|
||||
limit=limit,
|
||||
lxml_parser=_parse_lxml,
|
||||
regex_parser=_parse_regex,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _query_web_search(
|
||||
@@ -1218,33 +1272,30 @@ class search_file(Cmdlet):
|
||||
if file_name:
|
||||
title = file_name
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"title": title,
|
||||
"path": target_url,
|
||||
"url": target_url,
|
||||
"source": "web",
|
||||
"store": "web",
|
||||
"table": "web.search",
|
||||
"ext": detected_ext,
|
||||
"detail": snippet,
|
||||
"tag": [f"site:{site_host}"] + ([f"type:{detected_ext}"] if detected_ext else []),
|
||||
"columns": [
|
||||
payload = build_file_result_payload(
|
||||
title=title,
|
||||
path=target_url,
|
||||
url=target_url,
|
||||
source="web",
|
||||
store="web",
|
||||
table="web.search",
|
||||
ext=detected_ext,
|
||||
detail=snippet,
|
||||
tag=[f"site:{site_host}"] + ([f"type:{detected_ext}"] if detected_ext else []),
|
||||
columns=[
|
||||
("Title", title),
|
||||
("Type", detected_ext),
|
||||
("URL", target_url),
|
||||
],
|
||||
"_selection_args": ["-url", target_url],
|
||||
"_selection_action": ["download-file", "-url", target_url],
|
||||
}
|
||||
_selection_args=["-url", target_url],
|
||||
_selection_action=["download-file", "-url", target_url],
|
||||
)
|
||||
|
||||
table.add_result(payload)
|
||||
results_list.append(payload)
|
||||
ctx.emit(payload)
|
||||
|
||||
if refresh_mode:
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
publish_result_table(ctx, table, results_list, overlay=refresh_mode)
|
||||
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
@@ -1267,15 +1318,7 @@ class search_file(Cmdlet):
|
||||
@staticmethod
|
||||
def _normalize_extension(ext_value: Any) -> str:
|
||||
"""Sanitize extension strings to alphanumerics and cap at 5 chars."""
|
||||
ext = str(ext_value or "").strip().lstrip(".")
|
||||
for sep in (" ", "|", "(", "[", "{", ",", ";"):
|
||||
if sep in ext:
|
||||
ext = ext.split(sep, 1)[0]
|
||||
break
|
||||
if "." in ext:
|
||||
ext = ext.split(".")[-1]
|
||||
ext = "".join(ch for ch in ext if ch.isalnum())
|
||||
return ext[:5]
|
||||
return normalize_file_extension(ext_value)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_lookup_target(value: Optional[str]) -> str:
|
||||
@@ -1580,10 +1623,7 @@ class search_file(Cmdlet):
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
|
||||
if refresh_mode:
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
publish_result_table(ctx, table, results_list, overlay=refresh_mode)
|
||||
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
@@ -1764,11 +1804,11 @@ class search_file(Cmdlet):
|
||||
|
||||
store_filter: Optional[str] = None
|
||||
if query:
|
||||
match = re.search(r"\bstore:([^\s,]+)", query, flags=re.IGNORECASE)
|
||||
match = _STORE_FILTER_RE.search(query)
|
||||
if match:
|
||||
store_filter = match.group(1).strip() or None
|
||||
query = re.sub(r"\s*[,]?\s*store:[^\s,]+", " ", query, flags=re.IGNORECASE)
|
||||
query = re.sub(r"\s{2,}", " ", query)
|
||||
query = _STORE_FILTER_REMOVE_RE.sub(" ", query)
|
||||
query = _WHITESPACE_RE.sub(" ", query)
|
||||
query = query.strip().strip(",")
|
||||
|
||||
if store_filter and not storage_backend:
|
||||
@@ -1912,19 +1952,15 @@ class search_file(Cmdlet):
|
||||
for h in hash_query:
|
||||
resolved_backend_name: Optional[str] = None
|
||||
resolved_backend = None
|
||||
store_registry = None
|
||||
|
||||
for backend_name in backends_to_try:
|
||||
backend = None
|
||||
try:
|
||||
backend = get_backend_instance(config, backend_name, suppress_debug=True)
|
||||
if backend is None:
|
||||
# Last-resort: instantiate full registry for this backend only
|
||||
from Store import Store as _Store
|
||||
_store = _Store(config=config, suppress_debug=True)
|
||||
if _store.is_available(backend_name):
|
||||
backend = _store[backend_name]
|
||||
except Exception:
|
||||
backend = None
|
||||
backend, store_registry, _exc = get_preferred_store_backend(
|
||||
config,
|
||||
backend_name,
|
||||
store_registry=store_registry,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
continue
|
||||
try:
|
||||
@@ -2017,16 +2053,14 @@ class search_file(Cmdlet):
|
||||
except Exception:
|
||||
title_from_tag = None
|
||||
|
||||
title = title_from_tag or meta_obj.get("title") or meta_obj.get(
|
||||
"name"
|
||||
)
|
||||
title = title_from_tag or get_result_title(meta_obj, "title", "name")
|
||||
if not title and path_str:
|
||||
try:
|
||||
title = Path(path_str).stem
|
||||
except Exception:
|
||||
title = path_str
|
||||
|
||||
ext_val = meta_obj.get("ext") or meta_obj.get("extension")
|
||||
ext_val = get_extension_field(meta_obj, "ext", "extension")
|
||||
if not ext_val and path_str:
|
||||
try:
|
||||
ext_val = Path(path_str).suffix
|
||||
@@ -2038,27 +2072,19 @@ class search_file(Cmdlet):
|
||||
except Exception:
|
||||
ext_val = None
|
||||
|
||||
size_bytes = meta_obj.get("size")
|
||||
if size_bytes is None:
|
||||
size_bytes = meta_obj.get("size_bytes")
|
||||
try:
|
||||
size_bytes_int: Optional[int] = (
|
||||
int(size_bytes) if size_bytes is not None else None
|
||||
)
|
||||
except Exception:
|
||||
size_bytes_int = None
|
||||
size_bytes_int = get_int_field(meta_obj, "size", "size_bytes")
|
||||
|
||||
payload: Dict[str,
|
||||
Any] = {
|
||||
"title": str(title or h),
|
||||
"hash": h,
|
||||
"store": resolved_backend_name,
|
||||
"path": path_str,
|
||||
"ext": self._normalize_extension(ext_val),
|
||||
"size_bytes": size_bytes_int,
|
||||
"tag": tags_list,
|
||||
"url": meta_obj.get("url") or [],
|
||||
}
|
||||
payload = build_file_result_payload(
|
||||
title=title,
|
||||
fallback_title=h,
|
||||
hash_value=h,
|
||||
store=resolved_backend_name,
|
||||
path=path_str,
|
||||
ext=ext_val,
|
||||
size_bytes=size_bytes_int,
|
||||
tag=tags_list,
|
||||
url=meta_obj.get("url") or [],
|
||||
)
|
||||
|
||||
self._set_storage_display_columns(payload)
|
||||
|
||||
@@ -2106,16 +2132,20 @@ class search_file(Cmdlet):
|
||||
|
||||
if backend_to_search:
|
||||
searched_backends.append(backend_to_search)
|
||||
target_backend, _store_registry, exc = get_preferred_store_backend(
|
||||
config,
|
||||
backend_to_search,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if target_backend is None:
|
||||
if exc is not None:
|
||||
log(f"Backend '{backend_to_search}' not found: {exc}", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, "error")
|
||||
return 1
|
||||
debug(f"[search-file] Requested backend '{backend_to_search}' not found")
|
||||
return 1
|
||||
try:
|
||||
target_backend = get_backend_instance(config, backend_to_search, suppress_debug=True)
|
||||
if target_backend is None:
|
||||
from Store import Store as _Store
|
||||
_store = _Store(config=config, suppress_debug=True)
|
||||
if _store.is_available(backend_to_search):
|
||||
target_backend = _store[backend_to_search]
|
||||
else:
|
||||
debug(f"[search-file] Requested backend '{backend_to_search}' not found")
|
||||
return 1
|
||||
pass
|
||||
except Exception as exc:
|
||||
log(f"Backend '{backend_to_search}' not found: {exc}", file=sys.stderr)
|
||||
db.update_worker_status(worker_id, "error")
|
||||
@@ -2135,18 +2165,19 @@ class search_file(Cmdlet):
|
||||
)
|
||||
else:
|
||||
all_results = []
|
||||
store_registry = None
|
||||
for backend_name in list_configured_backend_names(config or {}):
|
||||
try:
|
||||
backend = get_backend_instance(config, backend_name, suppress_debug=True)
|
||||
backend, store_registry, _exc = get_preferred_store_backend(
|
||||
config,
|
||||
backend_name,
|
||||
store_registry=store_registry,
|
||||
suppress_debug=True,
|
||||
)
|
||||
if backend is None:
|
||||
from Store import Store as _Store
|
||||
_store = _Store(config=config, suppress_debug=True)
|
||||
if _store.is_available(backend_name):
|
||||
backend = _store[backend_name]
|
||||
else:
|
||||
# Configured backend name exists but has no registered implementation or failed to load.
|
||||
# (e.g. 'all-debrid' being treated as a store but having no store provider).
|
||||
continue
|
||||
# Configured backend name exists but has no registered implementation or failed to load.
|
||||
# (e.g. 'all-debrid' being treated as a store but having no store provider).
|
||||
continue
|
||||
|
||||
searched_backends.append(backend_name)
|
||||
|
||||
@@ -2216,63 +2247,11 @@ class search_file(Cmdlet):
|
||||
|
||||
# Populate default selection args for interactive @N selection/hash/url handling
|
||||
try:
|
||||
sel_args: Optional[List[str]] = None
|
||||
sel_action: Optional[List[str]] = None
|
||||
|
||||
# Prefer explicit path when available
|
||||
p_val = normalized.get("path") or normalized.get("target") or normalized.get("url")
|
||||
if p_val:
|
||||
p_str = str(p_val or "").strip()
|
||||
if p_str:
|
||||
if p_str.startswith(("http://", "https://", "magnet:", "torrent:")):
|
||||
h = normalized.get("hash") or normalized.get("file_hash") or normalized.get("hash_hex")
|
||||
s_val = normalized.get("store")
|
||||
if h and s_val and "/view_file" in p_str:
|
||||
try:
|
||||
h_norm = normalize_hash(h)
|
||||
except Exception:
|
||||
h_norm = str(h)
|
||||
sel_args = ["-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
sel_action = ["get-metadata", "-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
else:
|
||||
sel_args = ["-url", p_str]
|
||||
sel_action = ["download-file", "-url", p_str]
|
||||
else:
|
||||
try:
|
||||
from SYS.utils import expand_path
|
||||
|
||||
full_path = expand_path(p_str)
|
||||
# Prefer showing metadata details when we have a hash+store context
|
||||
h = normalized.get("hash") or normalized.get("file_hash") or normalized.get("hash_hex")
|
||||
s_val = normalized.get("store")
|
||||
if h and s_val:
|
||||
try:
|
||||
h_norm = normalize_hash(h)
|
||||
except Exception:
|
||||
h_norm = str(h)
|
||||
sel_args = ["-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
sel_action = ["get-metadata", "-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
else:
|
||||
sel_args = ["-path", str(full_path)]
|
||||
# Default action for local paths: get-file to fetch or operate on the path
|
||||
sel_action = ["get-file", "-path", str(full_path)]
|
||||
except Exception:
|
||||
sel_args = ["-path", p_str]
|
||||
sel_action = ["get-file", "-path", p_str]
|
||||
|
||||
# Fallback: use hash+store when available
|
||||
if sel_args is None:
|
||||
h = normalized.get("hash") or normalized.get("file_hash") or normalized.get("hash_hex")
|
||||
s_val = normalized.get("store")
|
||||
if h and s_val:
|
||||
try:
|
||||
h_norm = normalize_hash(h)
|
||||
except Exception:
|
||||
h_norm = str(h)
|
||||
sel_args = ["-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
# Show metadata details by default for store/hash selections
|
||||
sel_action = ["get-metadata", "-query", f"hash:{h_norm}", "-store", str(s_val)]
|
||||
|
||||
sel_args, sel_action = build_default_selection(
|
||||
path_value=normalized.get("path") or normalized.get("target") or normalized.get("url"),
|
||||
hash_value=normalized.get("hash") or normalized.get("file_hash") or normalized.get("hash_hex"),
|
||||
store_value=normalized.get("store"),
|
||||
)
|
||||
if sel_args:
|
||||
normalized["_selection_args"] = [str(x) for x in sel_args]
|
||||
if sel_action:
|
||||
@@ -2305,11 +2284,17 @@ class search_file(Cmdlet):
|
||||
subject_hash = query.split("hash:")[1].split(",")[0].strip()
|
||||
subject_context = {"store": backend_to_search, "hash": subject_hash}
|
||||
|
||||
ctx.set_last_result_table_overlay(table, results_list, subject=subject_context)
|
||||
publish_result_table(
|
||||
ctx,
|
||||
table,
|
||||
results_list,
|
||||
subject=subject_context,
|
||||
overlay=True,
|
||||
)
|
||||
except Exception:
|
||||
ctx.set_last_result_table_preserve_history(table, results_list)
|
||||
publish_result_table(ctx, table, results_list, overlay=True)
|
||||
else:
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
publish_result_table(ctx, table, results_list)
|
||||
db.append_worker_stdout(
|
||||
worker_id,
|
||||
_summarize_worker_results(results_list)
|
||||
|
||||
@@ -12,9 +12,9 @@ import time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from SYS.logger import log, debug
|
||||
from SYS.item_accessors import get_store_name
|
||||
from SYS.utils import sha256_file
|
||||
from . import _shared as sh
|
||||
from Store import Store
|
||||
|
||||
Cmdlet = sh.Cmdlet
|
||||
CmdletArg = sh.CmdletArg
|
||||
@@ -153,12 +153,7 @@ def _sanitize_filename(name: str, *, max_len: int = 140) -> str:
|
||||
|
||||
|
||||
def _extract_store_name(item: Any) -> Optional[str]:
|
||||
try:
|
||||
store_val = get_field(item, "store")
|
||||
s = str(store_val or "").strip()
|
||||
return s if s else None
|
||||
except Exception:
|
||||
return None
|
||||
return get_store_name(item, "store")
|
||||
|
||||
|
||||
def _persist_alt_relationship(
|
||||
@@ -437,9 +432,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
if store_name:
|
||||
try:
|
||||
store = Store(config)
|
||||
if store.is_available(store_name):
|
||||
backend = store[str(store_name)]
|
||||
backend, _store_registry, _exc = sh.get_store_backend(
|
||||
config,
|
||||
store_name,
|
||||
)
|
||||
if backend is not None:
|
||||
stored_hash = backend.add_file(
|
||||
Path(str(output_path)),
|
||||
title=new_title,
|
||||
|
||||
79
cmdnat/_parsing.py
Normal file
79
cmdnat/_parsing.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Iterable, List, Optional, Sequence
|
||||
|
||||
VALUE_ARG_FLAGS = frozenset({"-value", "--value", "-set-value", "--set-value"})
|
||||
|
||||
|
||||
def extract_piped_value(result: Any) -> Optional[str]:
|
||||
if isinstance(result, str):
|
||||
return result.strip() if result.strip() else None
|
||||
if isinstance(result, (int, float)):
|
||||
return str(result)
|
||||
if isinstance(result, dict):
|
||||
value = result.get("value")
|
||||
if value is not None:
|
||||
return str(value).strip()
|
||||
return None
|
||||
|
||||
|
||||
def extract_arg_value(
|
||||
args: Sequence[str],
|
||||
*,
|
||||
flags: Iterable[str],
|
||||
allow_positional: bool = False,
|
||||
) -> Optional[str]:
|
||||
if not args:
|
||||
return None
|
||||
|
||||
tokens = [str(tok) for tok in args if tok is not None]
|
||||
normalized_flags = {
|
||||
str(flag).strip().lower() for flag in flags if str(flag).strip()
|
||||
}
|
||||
if not normalized_flags:
|
||||
return None
|
||||
|
||||
for idx, tok in enumerate(tokens):
|
||||
text = tok.strip()
|
||||
if not text:
|
||||
continue
|
||||
low = text.lower()
|
||||
if low in normalized_flags and idx + 1 < len(tokens):
|
||||
candidate = str(tokens[idx + 1]).strip()
|
||||
if candidate:
|
||||
return candidate
|
||||
if "=" in low:
|
||||
head, value = low.split("=", 1)
|
||||
if head in normalized_flags and value:
|
||||
return value.strip()
|
||||
|
||||
if not allow_positional:
|
||||
return None
|
||||
|
||||
for tok in tokens:
|
||||
text = str(tok).strip()
|
||||
if text and not text.startswith("-"):
|
||||
return text
|
||||
return None
|
||||
|
||||
|
||||
def extract_value_arg(args: Sequence[str]) -> Optional[str]:
|
||||
return extract_arg_value(args, flags=VALUE_ARG_FLAGS, allow_positional=True)
|
||||
|
||||
|
||||
def has_flag(args: Sequence[str], flag: str) -> bool:
|
||||
try:
|
||||
want = str(flag or "").strip().lower()
|
||||
if not want:
|
||||
return False
|
||||
return any(str(arg).strip().lower() == want for arg in (args or []))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def normalize_to_list(value: Any) -> List[Any]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
112
cmdnat/_status_shared.py
Normal file
112
cmdnat/_status_shared.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
from SYS.result_table import Table
|
||||
|
||||
|
||||
def upper_text(value: Any) -> str:
|
||||
text = "" if value is None else str(value)
|
||||
return text.upper()
|
||||
|
||||
|
||||
def add_startup_check(
|
||||
table: Table,
|
||||
status: str,
|
||||
name: str,
|
||||
*,
|
||||
provider: str = "",
|
||||
store: str = "",
|
||||
files: int | str | None = None,
|
||||
detail: str = "",
|
||||
) -> None:
|
||||
row = table.add_row()
|
||||
row.add_column("STATUS", upper_text(status))
|
||||
row.add_column("NAME", upper_text(name))
|
||||
row.add_column("PROVIDER", upper_text(provider or ""))
|
||||
row.add_column("STORE", upper_text(store or ""))
|
||||
row.add_column("FILES", "" if files is None else str(files))
|
||||
row.add_column("DETAIL", upper_text(detail or ""))
|
||||
|
||||
|
||||
def has_store_subtype(cfg: dict, subtype: str) -> bool:
|
||||
store_cfg = cfg.get("store")
|
||||
if not isinstance(store_cfg, dict):
|
||||
return False
|
||||
bucket = store_cfg.get(subtype)
|
||||
if not isinstance(bucket, dict):
|
||||
return False
|
||||
return any(isinstance(value, dict) and bool(value) for value in bucket.values())
|
||||
|
||||
|
||||
def has_provider(cfg: dict, name: str) -> bool:
|
||||
provider_cfg = cfg.get("provider")
|
||||
if not isinstance(provider_cfg, dict):
|
||||
return False
|
||||
block = provider_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
|
||||
def has_tool(cfg: dict, name: str) -> bool:
|
||||
tool_cfg = cfg.get("tool")
|
||||
if not isinstance(tool_cfg, dict):
|
||||
return False
|
||||
block = tool_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
|
||||
def ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
|
||||
try:
|
||||
from API.HTTP import HTTPClient
|
||||
|
||||
with HTTPClient(timeout=timeout, retries=1) as client:
|
||||
response = client.get(url, allow_redirects=True)
|
||||
code = int(getattr(response, "status_code", 0) or 0)
|
||||
ok = 200 <= code < 500
|
||||
return ok, f"{url} (HTTP {code})"
|
||||
except httpx.TimeoutException:
|
||||
return False, f"{url} (timeout)"
|
||||
except Exception as exc:
|
||||
return False, f"{url} ({type(exc).__name__})"
|
||||
|
||||
|
||||
def provider_display_name(key: str) -> str:
|
||||
label = (key or "").strip()
|
||||
lower = label.lower()
|
||||
if lower == "openlibrary":
|
||||
return "OpenLibrary"
|
||||
if lower == "alldebrid":
|
||||
return "AllDebrid"
|
||||
if lower == "youtube":
|
||||
return "YouTube"
|
||||
return label[:1].upper() + label[1:] if label else "Provider"
|
||||
|
||||
|
||||
def default_provider_ping_targets(provider_key: str) -> list[str]:
|
||||
provider = (provider_key or "").strip().lower()
|
||||
if provider == "openlibrary":
|
||||
return ["https://openlibrary.org"]
|
||||
if provider == "youtube":
|
||||
return ["https://www.youtube.com"]
|
||||
if provider == "bandcamp":
|
||||
return ["https://bandcamp.com"]
|
||||
if provider == "libgen":
|
||||
try:
|
||||
from Provider.libgen import MIRRORS
|
||||
|
||||
return [str(url).rstrip("/") + "/json.php" for url in (MIRRORS or []) if str(url).strip()]
|
||||
except ImportError:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
def ping_first(urls: list[str]) -> tuple[bool, str]:
|
||||
for url in urls:
|
||||
ok, detail = ping_url(url)
|
||||
if ok:
|
||||
return True, detail
|
||||
if urls:
|
||||
return ping_url(urls[0])
|
||||
return False, "No ping target"
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Dict, Any, Sequence
|
||||
from typing import List, Dict, Any, Sequence, Optional
|
||||
from SYS.cmdlet_spec import Cmdlet, CmdletArg
|
||||
from SYS.logger import log
|
||||
from SYS.result_table import Table
|
||||
@@ -12,22 +12,45 @@ ADJECTIVE_FILE = os.path.join(
|
||||
"cmdnat",
|
||||
"adjective.json"
|
||||
)
|
||||
_ADJECTIVE_CACHE: Optional[Dict[str, List[str]]] = None
|
||||
_ADJECTIVE_CACHE_MTIME_NS: Optional[int] = None
|
||||
|
||||
|
||||
def _load_adjectives() -> Dict[str, List[str]]:
|
||||
global _ADJECTIVE_CACHE, _ADJECTIVE_CACHE_MTIME_NS
|
||||
try:
|
||||
if os.path.exists(ADJECTIVE_FILE):
|
||||
with open(ADJECTIVE_FILE, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
if not os.path.exists(ADJECTIVE_FILE):
|
||||
_ADJECTIVE_CACHE = {}
|
||||
_ADJECTIVE_CACHE_MTIME_NS = None
|
||||
return {}
|
||||
|
||||
current_mtime_ns = os.stat(ADJECTIVE_FILE).st_mtime_ns
|
||||
if (_ADJECTIVE_CACHE is not None and
|
||||
_ADJECTIVE_CACHE_MTIME_NS == current_mtime_ns):
|
||||
return _ADJECTIVE_CACHE
|
||||
|
||||
with open(ADJECTIVE_FILE, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
if not isinstance(loaded, dict):
|
||||
loaded = {}
|
||||
|
||||
_ADJECTIVE_CACHE = loaded
|
||||
_ADJECTIVE_CACHE_MTIME_NS = current_mtime_ns
|
||||
return _ADJECTIVE_CACHE
|
||||
except Exception as e:
|
||||
log(f"Error loading adjectives: {e}", file=sys.stderr)
|
||||
_ADJECTIVE_CACHE = {}
|
||||
_ADJECTIVE_CACHE_MTIME_NS = None
|
||||
return {}
|
||||
|
||||
|
||||
def _save_adjectives(data: Dict[str, List[str]]) -> bool:
|
||||
global _ADJECTIVE_CACHE, _ADJECTIVE_CACHE_MTIME_NS
|
||||
try:
|
||||
with open(ADJECTIVE_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
_ADJECTIVE_CACHE = data
|
||||
_ADJECTIVE_CACHE_MTIME_NS = os.stat(ADJECTIVE_FILE).st_mtime_ns
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"Error saving adjectives: {e}", file=sys.stderr)
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
from typing import List, Dict, Any, Optional, Sequence
|
||||
|
||||
from SYS.cmdlet_spec import Cmdlet, CmdletArg
|
||||
from SYS.config import load_config, save_config, save_config_and_verify
|
||||
from SYS.config import (
|
||||
load_config,
|
||||
save_config,
|
||||
save_config_and_verify,
|
||||
set_nested_config_value,
|
||||
)
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.result_table import Table
|
||||
from cmdnat._parsing import (
|
||||
extract_piped_value as _extract_piped_value,
|
||||
extract_value_arg as _extract_value_arg,
|
||||
)
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".config",
|
||||
@@ -43,91 +52,7 @@ def flatten_config(config: Dict[str, Any], parent_key: str = "", sep: str = ".")
|
||||
|
||||
|
||||
def set_nested_config(config: Dict[str, Any], key: str, value: str) -> bool:
|
||||
keys = key.split(".")
|
||||
d = config
|
||||
|
||||
# Navigate to the parent dict
|
||||
for k in keys[:-1]:
|
||||
if k not in d or not isinstance(d[k], dict):
|
||||
d[k] = {}
|
||||
d = d[k]
|
||||
|
||||
last_key = keys[-1]
|
||||
|
||||
# Try to preserve type if key exists
|
||||
if last_key in d:
|
||||
current_val = d[last_key]
|
||||
if isinstance(current_val, bool):
|
||||
if value.lower() in ("true", "yes", "1", "on"):
|
||||
d[last_key] = True
|
||||
elif value.lower() in ("false", "no", "0", "off"):
|
||||
d[last_key] = False
|
||||
else:
|
||||
# Fallback to boolean conversion of string (usually True for non-empty)
|
||||
# But for config, explicit is better.
|
||||
print(f"Warning: Could not convert '{value}' to boolean. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, int):
|
||||
try:
|
||||
d[last_key] = int(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to int. Using string.")
|
||||
d[last_key] = value
|
||||
elif isinstance(current_val, float):
|
||||
try:
|
||||
d[last_key] = float(value)
|
||||
except ValueError:
|
||||
print(f"Warning: Could not convert '{value}' to float. Using string.")
|
||||
d[last_key] = value
|
||||
else:
|
||||
d[last_key] = value
|
||||
else:
|
||||
# New key, try to infer type
|
||||
if value.lower() in ("true", "false"):
|
||||
d[last_key] = value.lower() == "true"
|
||||
elif value.isdigit():
|
||||
d[last_key] = int(value)
|
||||
else:
|
||||
d[last_key] = value
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _extract_piped_value(result: Any) -> Optional[str]:
|
||||
if isinstance(result, str):
|
||||
return result.strip() if result.strip() else None
|
||||
if isinstance(result, (int, float)):
|
||||
return str(result)
|
||||
if isinstance(result, dict):
|
||||
val = result.get("value")
|
||||
if val is not None:
|
||||
return str(val).strip()
|
||||
return None
|
||||
|
||||
|
||||
def _extract_value_arg(args: Sequence[str]) -> Optional[str]:
|
||||
if not args:
|
||||
return None
|
||||
tokens = [str(tok) for tok in args if tok is not None]
|
||||
flags = {"-value", "--value", "-set-value", "--set-value"}
|
||||
for idx, tok in enumerate(tokens):
|
||||
text = tok.strip()
|
||||
if not text:
|
||||
continue
|
||||
low = text.lower()
|
||||
if low in flags and idx + 1 < len(tokens):
|
||||
candidate = str(tokens[idx + 1]).strip()
|
||||
if candidate:
|
||||
return candidate
|
||||
if "=" in low:
|
||||
head, val = low.split("=", 1)
|
||||
if head in flags and val:
|
||||
return val.strip()
|
||||
for tok in tokens:
|
||||
text = str(tok).strip()
|
||||
if text and not text.startswith("-"):
|
||||
return text
|
||||
return None
|
||||
return set_nested_config_value(config, key, value, on_error=print)
|
||||
|
||||
|
||||
def _get_selected_config_key() -> Optional[str]:
|
||||
|
||||
@@ -12,8 +12,16 @@ from SYS.cmdlet_spec import Cmdlet, CmdletArg
|
||||
from SYS.config import load_config, save_config
|
||||
from SYS.logger import log, debug
|
||||
from SYS.result_table import Table
|
||||
from SYS.item_accessors import get_sha256_hex
|
||||
from SYS.utils import extract_hydrus_hash_from_url
|
||||
from SYS import pipeline as ctx
|
||||
from cmdnat._parsing import (
|
||||
extract_arg_value,
|
||||
extract_piped_value as _extract_piped_value,
|
||||
extract_value_arg as _extract_value_arg,
|
||||
has_flag as _has_flag,
|
||||
normalize_to_list as _normalize_to_list,
|
||||
)
|
||||
|
||||
_MATRIX_PENDING_ITEMS_KEY = "matrix_pending_items"
|
||||
_MATRIX_PENDING_TEXT_KEY = "matrix_pending_text"
|
||||
@@ -21,62 +29,9 @@ _MATRIX_MENU_STATE_KEY = "matrix_menu_state"
|
||||
_MATRIX_SELECTED_SETTING_KEY_KEY = "matrix_selected_setting_key"
|
||||
|
||||
|
||||
def _extract_piped_value(result: Any) -> Optional[str]:
|
||||
"""Extract the piped value from result (string, number, or dict with 'value' key)."""
|
||||
if isinstance(result, str):
|
||||
return result.strip() if result.strip() else None
|
||||
if isinstance(result, (int, float)):
|
||||
return str(result)
|
||||
if isinstance(result, dict):
|
||||
# Fallback to value field if it's a dict
|
||||
val = result.get("value")
|
||||
if val is not None:
|
||||
return str(val).strip()
|
||||
return None
|
||||
|
||||
|
||||
def _extract_value_arg(args: Sequence[str]) -> Optional[str]:
|
||||
"""Extract a fallback value from command-line args (value flag or positional)."""
|
||||
if not args:
|
||||
return None
|
||||
tokens = [str(tok) for tok in args if tok is not None]
|
||||
value_flags = {"-value", "--value", "-set-value", "--set-value"}
|
||||
for idx, tok in enumerate(tokens):
|
||||
low = tok.strip()
|
||||
if not low:
|
||||
continue
|
||||
low_lower = low.lower()
|
||||
if low_lower in value_flags and idx + 1 < len(tokens):
|
||||
candidate = str(tokens[idx + 1]).strip()
|
||||
if candidate:
|
||||
return candidate
|
||||
if "=" in low_lower:
|
||||
head, val = low_lower.split("=", 1)
|
||||
if head in value_flags and val:
|
||||
return val.strip()
|
||||
# Fallback to first non-flag token
|
||||
for tok in tokens:
|
||||
text = str(tok).strip()
|
||||
if text and not text.startswith("-"):
|
||||
return text
|
||||
return None
|
||||
|
||||
|
||||
def _extract_set_value_arg(args: Sequence[str]) -> Optional[str]:
|
||||
"""Extract the value from -set-value flag."""
|
||||
if not args:
|
||||
return None
|
||||
try:
|
||||
tokens = list(args)
|
||||
except Exception:
|
||||
return None
|
||||
for i, tok in enumerate(tokens):
|
||||
try:
|
||||
if str(tok).lower() == "-set-value" and i + 1 < len(tokens):
|
||||
return str(tokens[i + 1]).strip()
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
return extract_arg_value(args, flags={"-set-value"})
|
||||
|
||||
|
||||
def _update_matrix_config(config: Dict[str, Any], key: str, value: Any) -> bool:
|
||||
@@ -122,16 +77,6 @@ def _update_matrix_config(config: Dict[str, Any], key: str, value: Any) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _has_flag(args: Sequence[str], flag: str) -> bool:
|
||||
try:
|
||||
want = str(flag or "").strip().lower()
|
||||
if not want:
|
||||
return False
|
||||
return any(str(a).strip().lower() == want for a in (args or []))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _parse_config_room_filter_ids(config: Dict[str, Any]) -> List[str]:
|
||||
try:
|
||||
if not isinstance(config, dict):
|
||||
@@ -426,14 +371,6 @@ def _extract_text_arg(args: Sequence[str]) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def _normalize_to_list(value: Any) -> List[Any]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def _extract_room_id(room_obj: Any) -> Optional[str]:
|
||||
try:
|
||||
# PipeObject stores unknown fields in .extra
|
||||
@@ -525,22 +462,8 @@ def _extract_url(item: Any) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
|
||||
|
||||
|
||||
def _extract_sha256_hex(item: Any) -> Optional[str]:
|
||||
try:
|
||||
if hasattr(item, "hash"):
|
||||
h = getattr(item, "hash")
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
if isinstance(item, dict):
|
||||
h = item.get("hash")
|
||||
if isinstance(h, str) and _SHA256_RE.fullmatch(h.strip()):
|
||||
return h.strip().lower()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
return get_sha256_hex(item, "hash")
|
||||
|
||||
|
||||
def _extract_hash_from_hydrus_file_url(url: str) -> Optional[str]:
|
||||
|
||||
@@ -39,6 +39,7 @@ _WINDOWS_RESERVED_NAMES = {
|
||||
*(f"com{i}" for i in range(1, 10)),
|
||||
*(f"lpt{i}" for i in range(1, 10)),
|
||||
}
|
||||
_ILLEGAL_FILENAME_CHARS_RE = re.compile(r'[<>:"/\\|?*]')
|
||||
|
||||
|
||||
def _sanitize_filename_base(text: str) -> str:
|
||||
@@ -48,7 +49,7 @@ def _sanitize_filename_base(text: str) -> str:
|
||||
return "table"
|
||||
|
||||
# Replace characters illegal on Windows (and generally unsafe cross-platform).
|
||||
s = re.sub(r'[<>:"/\\|?*]', " ", s)
|
||||
s = _ILLEGAL_FILENAME_CHARS_RE.sub(" ", s)
|
||||
|
||||
# Drop control characters.
|
||||
s = "".join(ch for ch in s if ch.isprintable())
|
||||
|
||||
@@ -23,6 +23,15 @@ _ALLDEBRID_UNLOCK_CACHE: Dict[str,
|
||||
str] = {}
|
||||
_NOTES_PREFETCH_INFLIGHT: set[str] = set()
|
||||
_NOTES_PREFETCH_LOCK = threading.Lock()
|
||||
_PLAYLIST_STORE_CACHE: Optional[Dict[str, Any]] = None
|
||||
_PLAYLIST_STORE_MTIME_NS: Optional[int] = None
|
||||
_SHA256_RE = re.compile(r"[0-9a-f]{64}")
|
||||
_SHA256_FULL_RE = re.compile(r"^[0-9a-f]{64}$")
|
||||
_EXTINF_TITLE_RE = re.compile(r"#EXTINF:-1,(.*?)(?:\n|\r|$)")
|
||||
_WINDOWS_PATH_RE = re.compile(r"^[a-z]:[\\/]", flags=re.IGNORECASE)
|
||||
_HASH_QUERY_RE = re.compile(r"hash=([0-9a-f]{64})")
|
||||
_IPV4_RE = re.compile(r"^\d+\.\d+\.\d+\.\d+$")
|
||||
_MPD_PATH_RE = re.compile(r"\.mpd($|\?)")
|
||||
|
||||
|
||||
def _repo_root() -> Path:
|
||||
@@ -36,26 +45,56 @@ def _playlist_store_path() -> Path:
|
||||
return _repo_root() / "mpv_playlists.json"
|
||||
|
||||
|
||||
def _load_playlist_store(path: Path) -> Dict[str, Any]:
|
||||
if not path.exists():
|
||||
return {"next_id": 1, "playlists": []}
|
||||
def _new_playlist_store() -> Dict[str, Any]:
|
||||
return {"next_id": 1, "playlists": []}
|
||||
|
||||
|
||||
def _normalize_playlist_store(data: Any) -> Dict[str, Any]:
|
||||
if not isinstance(data, dict):
|
||||
return _new_playlist_store()
|
||||
|
||||
normalized = dict(data)
|
||||
try:
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
if not isinstance(data, dict):
|
||||
return {"next_id": 1, "playlists": []}
|
||||
data.setdefault("next_id", 1)
|
||||
data.setdefault("playlists", [])
|
||||
if not isinstance(data["playlists"], list):
|
||||
data["playlists"] = []
|
||||
next_id = int(normalized.get("next_id") or 1)
|
||||
except Exception:
|
||||
next_id = 1
|
||||
normalized["next_id"] = max(next_id, 1)
|
||||
|
||||
playlists = normalized.get("playlists")
|
||||
normalized["playlists"] = playlists if isinstance(playlists, list) else []
|
||||
return normalized
|
||||
|
||||
|
||||
def _load_playlist_store(path: Path) -> Dict[str, Any]:
|
||||
global _PLAYLIST_STORE_CACHE, _PLAYLIST_STORE_MTIME_NS
|
||||
if not path.exists():
|
||||
_PLAYLIST_STORE_CACHE = _new_playlist_store()
|
||||
_PLAYLIST_STORE_MTIME_NS = None
|
||||
return _PLAYLIST_STORE_CACHE
|
||||
try:
|
||||
current_mtime_ns = path.stat().st_mtime_ns
|
||||
if (_PLAYLIST_STORE_CACHE is not None and
|
||||
_PLAYLIST_STORE_MTIME_NS == current_mtime_ns):
|
||||
return _PLAYLIST_STORE_CACHE
|
||||
|
||||
data = _normalize_playlist_store(json.loads(path.read_text(encoding="utf-8")))
|
||||
_PLAYLIST_STORE_CACHE = data
|
||||
_PLAYLIST_STORE_MTIME_NS = current_mtime_ns
|
||||
return data
|
||||
except Exception:
|
||||
return {"next_id": 1, "playlists": []}
|
||||
_PLAYLIST_STORE_CACHE = _new_playlist_store()
|
||||
_PLAYLIST_STORE_MTIME_NS = None
|
||||
return _PLAYLIST_STORE_CACHE
|
||||
|
||||
|
||||
def _save_playlist_store(path: Path, data: Dict[str, Any]) -> bool:
|
||||
global _PLAYLIST_STORE_CACHE, _PLAYLIST_STORE_MTIME_NS
|
||||
try:
|
||||
normalized = _normalize_playlist_store(data)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||
path.write_text(json.dumps(normalized, indent=2), encoding="utf-8")
|
||||
_PLAYLIST_STORE_CACHE = normalized
|
||||
_PLAYLIST_STORE_MTIME_NS = path.stat().st_mtime_ns
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
@@ -559,7 +598,7 @@ def _extract_store_and_hash(item: Any) -> tuple[Optional[str], Optional[str]]:
|
||||
else:
|
||||
text = getattr(item, "path", None) or getattr(item, "url", None)
|
||||
if text:
|
||||
m = re.search(r"[0-9a-f]{64}", str(text).lower())
|
||||
m = _SHA256_RE.search(str(text).lower())
|
||||
if m:
|
||||
file_hash = m.group(0)
|
||||
except Exception:
|
||||
@@ -707,7 +746,7 @@ def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||
try:
|
||||
# Extract title from #EXTINF:-1,Title
|
||||
# Use regex to find title between #EXTINF:-1, and newline
|
||||
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
|
||||
match = _EXTINF_TITLE_RE.search(filename)
|
||||
if match:
|
||||
extracted_title = match.group(1).strip()
|
||||
if not title or title == "memory://":
|
||||
@@ -817,7 +856,7 @@ def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
|
||||
return None
|
||||
# If it's already a bare hydrus hash, use it directly
|
||||
lower_real = real.lower()
|
||||
if re.fullmatch(r"[0-9a-f]{64}", lower_real):
|
||||
if _SHA256_FULL_RE.fullmatch(lower_real):
|
||||
return lower_real
|
||||
|
||||
# If it's a hydrus file URL, normalize to the hash for dedupe
|
||||
@@ -829,7 +868,7 @@ def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
|
||||
if parsed.path.endswith("/get_files/file"):
|
||||
qs = parse_qs(parsed.query)
|
||||
h = qs.get("hash", [None])[0]
|
||||
if h and re.fullmatch(r"[0-9a-f]{64}", h.lower()):
|
||||
if h and _SHA256_FULL_RE.fullmatch(h.lower()):
|
||||
return h.lower()
|
||||
except Exception:
|
||||
pass
|
||||
@@ -862,7 +901,7 @@ def _infer_store_from_playlist_item(
|
||||
target = memory_target
|
||||
|
||||
# Hydrus hashes: bare 64-hex entries
|
||||
if re.fullmatch(r"[0-9a-f]{64}", target.lower()):
|
||||
if _SHA256_FULL_RE.fullmatch(target.lower()):
|
||||
# If we have file_storage, query each Hydrus instance to find which one has this hash
|
||||
if file_storage:
|
||||
hash_str = target.lower()
|
||||
@@ -877,7 +916,7 @@ def _infer_store_from_playlist_item(
|
||||
if lower.startswith("hydrus://"):
|
||||
# Extract hash from hydrus:// URL if possible
|
||||
if file_storage:
|
||||
hash_match = re.search(r"[0-9a-f]{64}", target.lower())
|
||||
hash_match = _SHA256_RE.search(target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(0)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
@@ -886,9 +925,7 @@ def _infer_store_from_playlist_item(
|
||||
return "hydrus"
|
||||
|
||||
# Windows / UNC paths
|
||||
if re.match(r"^[a-z]:[\\/]",
|
||||
target,
|
||||
flags=re.IGNORECASE) or target.startswith("\\\\"):
|
||||
if _WINDOWS_PATH_RE.match(target) or target.startswith("\\\\"):
|
||||
return "local"
|
||||
|
||||
# file:// url
|
||||
@@ -918,7 +955,7 @@ def _infer_store_from_playlist_item(
|
||||
# Hydrus API URL - try to extract hash and find instance
|
||||
if file_storage:
|
||||
# Try to extract hash from URL parameters
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
||||
hash_match = _HASH_QUERY_RE.search(target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(1)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
@@ -929,10 +966,10 @@ def _infer_store_from_playlist_item(
|
||||
if hydrus_instance:
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host_stripped) and "get_files" in path:
|
||||
if _IPV4_RE.match(host_stripped) and "get_files" in path:
|
||||
# IP-based Hydrus URL
|
||||
if file_storage:
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", target.lower())
|
||||
hash_match = _HASH_QUERY_RE.search(target.lower())
|
||||
if hash_match:
|
||||
hash_str = hash_match.group(1)
|
||||
hydrus_instance = _find_hydrus_instance_for_hash(hash_str, file_storage)
|
||||
@@ -1002,7 +1039,7 @@ def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
|
||||
pass
|
||||
if "get_files" in path_part or "file?hash=" in path_part:
|
||||
return True
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host) and "get_files" in path_part:
|
||||
if _IPV4_RE.match(host) and "get_files" in path_part:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -1493,7 +1530,7 @@ def _queue_items(
|
||||
# Set it via IPC before loadfile so the currently running MPV can play the manifest.
|
||||
try:
|
||||
target_str = str(target or "")
|
||||
if re.search(r"\.mpd($|\?)", target_str.lower()):
|
||||
if _MPD_PATH_RE.search(target_str.lower()):
|
||||
_send_ipc_command(
|
||||
{
|
||||
"command": [
|
||||
@@ -1556,8 +1593,9 @@ def _queue_items(
|
||||
|
||||
if target:
|
||||
# If we just have a hydrus hash, build a direct file URL for MPV
|
||||
if re.fullmatch(r"[0-9a-f]{64}",
|
||||
str(target).strip().lower()) and effective_hydrus_url:
|
||||
if _SHA256_FULL_RE.fullmatch(
|
||||
str(target).strip().lower()
|
||||
) and effective_hydrus_url:
|
||||
target = (
|
||||
f"{effective_hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
|
||||
)
|
||||
@@ -2337,7 +2375,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Check if it's a Hydrus URL
|
||||
if "get_files/file" in real_path or "hash=" in real_path:
|
||||
# Extract hash from Hydrus URL
|
||||
hash_match = re.search(r"hash=([0-9a-f]{64})", real_path.lower())
|
||||
hash_match = _HASH_QUERY_RE.search(real_path.lower())
|
||||
if hash_match:
|
||||
file_hash = hash_match.group(1)
|
||||
# Try to find which Hydrus instance has this file
|
||||
@@ -2576,7 +2614,7 @@ def _start_mpv(
|
||||
candidate = it.get("path") or it.get("url")
|
||||
else:
|
||||
candidate = getattr(it, "path", None) or getattr(it, "url", None)
|
||||
if candidate and re.search(r"\.mpd($|\?)", str(candidate).lower()):
|
||||
if candidate and _MPD_PATH_RE.search(str(candidate).lower()):
|
||||
needs_mpd_whitelist = True
|
||||
break
|
||||
if needs_mpd_whitelist:
|
||||
|
||||
@@ -7,6 +7,16 @@ from SYS.cmdlet_spec import Cmdlet
|
||||
from SYS import pipeline as ctx
|
||||
from SYS.result_table import Table
|
||||
from SYS.logger import set_debug, debug
|
||||
from cmdnat._status_shared import (
|
||||
add_startup_check as _add_startup_check,
|
||||
default_provider_ping_targets as _default_provider_ping_targets,
|
||||
has_provider as _has_provider,
|
||||
has_store_subtype as _has_store_subtype,
|
||||
has_tool as _has_tool,
|
||||
ping_first as _ping_first,
|
||||
ping_url as _ping_url,
|
||||
provider_display_name as _provider_display_name,
|
||||
)
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".status",
|
||||
@@ -15,91 +25,6 @@ CMDLET = Cmdlet(
|
||||
arg=[],
|
||||
)
|
||||
|
||||
def _upper(value: Any) -> str:
|
||||
text = "" if value is None else str(value)
|
||||
return text.upper()
|
||||
|
||||
def _add_startup_check(
|
||||
table: Table,
|
||||
status: str,
|
||||
name: str,
|
||||
*,
|
||||
provider: str = "",
|
||||
store: str = "",
|
||||
files: int | str | None = None,
|
||||
detail: str = "",
|
||||
) -> None:
|
||||
row = table.add_row()
|
||||
row.add_column("STATUS", _upper(status))
|
||||
row.add_column("NAME", _upper(name))
|
||||
row.add_column("PROVIDER", _upper(provider or ""))
|
||||
row.add_column("STORE", _upper(store or ""))
|
||||
row.add_column("FILES", "" if files is None else str(files))
|
||||
row.add_column("DETAIL", _upper(detail or ""))
|
||||
|
||||
def _has_store_subtype(cfg: dict, subtype: str) -> bool:
|
||||
store_cfg = cfg.get("store")
|
||||
if not isinstance(store_cfg, dict):
|
||||
return False
|
||||
bucket = store_cfg.get(subtype)
|
||||
if not isinstance(bucket, dict):
|
||||
return False
|
||||
return any(isinstance(v, dict) and bool(v) for v in bucket.values())
|
||||
|
||||
def _has_provider(cfg: dict, name: str) -> bool:
|
||||
provider_cfg = cfg.get("provider")
|
||||
if not isinstance(provider_cfg, dict):
|
||||
return False
|
||||
block = provider_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
def _has_tool(cfg: dict, name: str) -> bool:
|
||||
tool_cfg = cfg.get("tool")
|
||||
if not isinstance(tool_cfg, dict):
|
||||
return False
|
||||
block = tool_cfg.get(str(name).strip().lower())
|
||||
return isinstance(block, dict) and bool(block)
|
||||
|
||||
def _ping_url(url: str, timeout: float = 3.0) -> tuple[bool, str]:
|
||||
try:
|
||||
from API.HTTP import HTTPClient
|
||||
with HTTPClient(timeout=timeout, retries=1) as client:
|
||||
resp = client.get(url, allow_redirects=True)
|
||||
code = int(getattr(resp, "status_code", 0) or 0)
|
||||
ok = 200 <= code < 500
|
||||
return ok, f"{url} (HTTP {code})"
|
||||
except Exception as exc:
|
||||
return False, f"{url} ({type(exc).__name__})"
|
||||
|
||||
def _provider_display_name(key: str) -> str:
|
||||
k = (key or "").strip()
|
||||
low = k.lower()
|
||||
if low == "openlibrary": return "OpenLibrary"
|
||||
if low == "alldebrid": return "AllDebrid"
|
||||
if low == "youtube": return "YouTube"
|
||||
return k[:1].upper() + k[1:] if k else "Provider"
|
||||
|
||||
def _default_provider_ping_targets(provider_key: str) -> list[str]:
|
||||
prov = (provider_key or "").strip().lower()
|
||||
if prov == "openlibrary": return ["https://openlibrary.org"]
|
||||
if prov == "youtube": return ["https://www.youtube.com"]
|
||||
if prov == "bandcamp": return ["https://bandcamp.com"]
|
||||
if prov == "libgen":
|
||||
try:
|
||||
from Provider.libgen import MIRRORS
|
||||
return [str(x).rstrip("/") + "/json.php" for x in (MIRRORS or []) if str(x).strip()]
|
||||
except ImportError: return []
|
||||
return []
|
||||
|
||||
def _ping_first(urls: list[str]) -> tuple[bool, str]:
|
||||
for u in urls:
|
||||
ok, detail = _ping_url(u)
|
||||
if ok: return True, detail
|
||||
if urls:
|
||||
ok, detail = _ping_url(urls[0])
|
||||
return ok, detail
|
||||
return False, "No ping target"
|
||||
|
||||
def _run(result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
||||
startup_table = Table(
|
||||
"*********<IGNITIO>*********<NOUSEMPEH>*********<RUGRAPOG>*********<OMEGHAU>*********"
|
||||
|
||||
@@ -8,28 +8,10 @@ from SYS.cmdlet_spec import Cmdlet, CmdletArg
|
||||
from SYS.logger import log
|
||||
from SYS.result_table import Table
|
||||
from SYS import pipeline as ctx
|
||||
from cmdnat._parsing import has_flag as _has_flag, normalize_to_list as _normalize_to_list
|
||||
|
||||
_TELEGRAM_PENDING_ITEMS_KEY = "telegram_pending_items"
|
||||
|
||||
|
||||
def _has_flag(args: Sequence[str], flag: str) -> bool:
|
||||
try:
|
||||
want = str(flag or "").strip().lower()
|
||||
if not want:
|
||||
return False
|
||||
return any(str(a).strip().lower() == want for a in (args or []))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _normalize_to_list(value: Any) -> List[Any]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def _extract_chat_id(chat_obj: Any) -> Optional[int]:
|
||||
try:
|
||||
if isinstance(chat_obj, dict):
|
||||
|
||||
@@ -4,6 +4,7 @@ from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
from SYS.config import get_nested_config_value as _get_nested
|
||||
from SYS.logger import debug
|
||||
|
||||
|
||||
@@ -28,15 +29,6 @@ def _debug_repr(value: Any, max_chars: int = 12000) -> str:
|
||||
return _truncate_debug_text(s, max_chars=max_chars)
|
||||
|
||||
|
||||
def _get_nested(config: Dict[str, Any], *path: str) -> Any:
|
||||
cur: Any = config
|
||||
for key in path:
|
||||
if not isinstance(cur, dict):
|
||||
return None
|
||||
cur = cur.get(key)
|
||||
return cur
|
||||
|
||||
|
||||
def _as_bool(value: Any, default: bool = False) -> bool:
|
||||
if value is None:
|
||||
return default
|
||||
|
||||
@@ -10,6 +10,7 @@ from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, Optional, Union
|
||||
|
||||
from SYS.config import get_nested_config_value as _get_nested
|
||||
from SYS.logger import debug
|
||||
|
||||
from playwright.sync_api import TimeoutError as PlaywrightTimeoutError
|
||||
@@ -24,15 +25,6 @@ __all__ = [
|
||||
]
|
||||
|
||||
|
||||
def _get_nested(config: Dict[str, Any], *path: str) -> Any:
|
||||
cur: Any = config
|
||||
for key in path:
|
||||
if not isinstance(cur, dict):
|
||||
return None
|
||||
cur = cur.get(key)
|
||||
return cur
|
||||
|
||||
|
||||
def _resolve_out_dir(arg_outdir: Optional[Union[str, Path]]) -> Path:
|
||||
"""Resolve an output directory using config when possible."""
|
||||
if arg_outdir:
|
||||
|
||||
@@ -18,6 +18,7 @@ from typing import Any, Dict, Iterator, List, Optional, Sequence, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from SYS import pipeline as pipeline_context
|
||||
from SYS.config import get_nested_config_value as _get_nested
|
||||
from SYS.logger import debug, log
|
||||
from SYS.models import (
|
||||
DebugLogger,
|
||||
@@ -137,15 +138,6 @@ def _build_supported_domains() -> set[str]:
|
||||
return _SUPPORTED_DOMAINS
|
||||
|
||||
|
||||
def _get_nested(config: Dict[str, Any], *path: str) -> Any:
|
||||
cur: Any = config
|
||||
for key in path:
|
||||
if not isinstance(cur, dict):
|
||||
return None
|
||||
cur = cur.get(key)
|
||||
return cur
|
||||
|
||||
|
||||
def _parse_csv_list(value: Any) -> Optional[List[str]]:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
Reference in New Issue
Block a user