This commit is contained in:
2026-02-07 14:58:13 -08:00
parent c8230cbb42
commit 60c2cc062c
9 changed files with 69 additions and 78 deletions

View File

@@ -102,7 +102,7 @@ def maybe_show_formats_table(
return 1
base_args: List[str] = []
out_arg = parsed.get("path") or parsed.get("output")
out_arg = parsed.get("path")
if out_arg:
base_args.extend(["-path", str(out_arg)])

View File

@@ -309,15 +309,10 @@ def get_provider_credentials(config: Dict[str, Any], provider: str) -> Optional[
def resolve_cookies_path(
config: Dict[str, Any], script_dir: Optional[Path] = None
) -> Optional[Path]:
# Support both legacy top-level `cookies=...` and the modular conf style:
# Only support modular config style:
# [tool=ytdlp]
# cookies="C:\\path\\cookies.txt"
values: list[Any] = []
try:
values.append(config.get("cookies"))
except Exception as exc:
logger.debug("resolve_cookies_path: failed to read top-level cookies: %s", exc, exc_info=True)
try:
tool = config.get("tool")
if isinstance(tool, dict):
@@ -328,14 +323,6 @@ def resolve_cookies_path(
except Exception as exc:
logger.debug("resolve_cookies_path: failed to read tool.ytdlp cookies: %s", exc, exc_info=True)
try:
ytdlp_block = config.get("ytdlp")
if isinstance(ytdlp_block, dict):
values.append(ytdlp_block.get("cookies"))
values.append(ytdlp_block.get("cookiefile"))
except Exception as exc:
logger.debug("resolve_cookies_path: failed to read ytdlp cookies block: %s", exc, exc_info=True)
base_dir = script_dir or SCRIPT_DIR
for value in values:
if not value:

View File

@@ -373,9 +373,6 @@ def normalize_urls(value: Any) -> List[str]:
text = raw.strip()
if not text:
return
# Support legacy prefixes like "url:https://...".
if text.lower().startswith("url:"):
text = text.split(":", 1)[1].strip()
# Prefer extracting obvious URLs to avoid splitting inside query strings.
matches = re.findall(r"https?://[^\s,]+", text, flags=re.IGNORECASE)

View File

@@ -162,13 +162,13 @@ def _as_dict(item: Any) -> Optional[Dict[str, Any]]:
def extract_store_value(item: Any) -> str:
"""Extract storage backend name from item.
Searches item for store identifier using multiple field names:
store, table, source, storage (legacy).
Searches item for store identifier using field names:
store, table.
Args:
item: Object or dict with store information
Returns:
Store name as string (e.g., "hydrus", "local", "") if not found
"""
@@ -176,10 +176,8 @@ def extract_store_value(item: Any) -> str:
store = _get_first_dict_value(
data,
["store",
"table",
"source",
"storage"]
) # storage is legacy
"table"]
)
return str(store or "").strip()
@@ -1059,8 +1057,7 @@ class Table:
row.add_column("Tag", item.tag_name)
# Source/Store (where the tag values come from)
# Support both 'source' (legacy) and 'store' (new) attribute names
source_val = getattr(item, "source", None) or getattr(item, "store", None)
source_val = getattr(item, "store", None)
if source_val:
row.add_column("Store", source_val)

View File

@@ -14,6 +14,31 @@ import httpx
from SYS.logger import debug, log
from SYS.utils_constant import mime_maps
_KNOWN_EXTS = {
str(info.get("ext") or "").strip().lstrip(".")
for category in mime_maps.values()
for info in category.values()
if isinstance(info, dict) and info.get("ext")
}
def _resolve_ext_from_meta(meta: Dict[str, Any], mime_type: Optional[str]) -> str:
ext = str(meta.get("ext") or "").strip().lstrip(".")
if ext and ext not in _KNOWN_EXTS:
ext = ""
if ext.lower() == "ebook":
ext = ""
if not ext and mime_type:
for category in mime_maps.values():
for _ext_key, info in category.items():
if mime_type in info.get("mimes", []):
ext = str(info.get("ext", "")).strip().lstrip(".")
break
if ext:
break
return ext
from Store._base import Store
_HYDRUS_INIT_CHECK_CACHE: dict[tuple[str,
@@ -67,6 +92,16 @@ class HydrusNetwork(Store):
store_name = getattr(self, "NAME", None) or "unknown"
return f"[hydrusnetwork:{store_name}]"
def _append_access_key(self, url: str) -> str:
if not url:
return url
if "access_key=" in url:
return url
if not getattr(self, "API", None):
return url
separator = "&" if "?" in url else "?"
return f"{url}{separator}access_key={quote(str(self.API))}"
def __new__(cls, *args: Any, **kwargs: Any) -> "HydrusNetwork":
instance = super().__new__(cls)
name = kwargs.get("NAME")
@@ -1105,17 +1140,7 @@ class HydrusNetwork(Store):
if not isinstance(meta, dict):
continue
mime_type = meta.get("mime")
ext = str(meta.get("ext") or "").strip().lstrip(".")
if not ext and mime_type:
for category in mime_maps.values():
for _ext_key, info in category.items():
if mime_type in info.get("mimes", []):
ext = str(info.get("ext",
"")
).strip().lstrip(".")
break
if ext:
break
ext = _resolve_ext_from_meta(meta, mime_type)
if _normalize_ext_filter(ext) != ext_filter:
continue
@@ -1168,6 +1193,8 @@ class HydrusNetwork(Store):
item_url = meta.get("known_urls") or meta.get("urls") or meta.get("url") or []
if not item_url:
item_url = meta.get("file_url") or f"{self.URL.rstrip('/')}/view_file?hash={hash_hex}"
if isinstance(item_url, str) and "/view_file" in item_url:
item_url = self._append_access_key(item_url)
results.append(
{
@@ -1181,7 +1208,7 @@ class HydrusNetwork(Store):
"tag": all_tags,
"file_id": file_id,
"mime": mime_type,
"ext": ext,
"ext": _resolve_ext_from_meta(meta, mime_type),
}
)
@@ -1312,15 +1339,7 @@ class HydrusNetwork(Store):
# Prefer Hydrus-provided extension (e.g. ".webm"); fall back to MIME map.
mime_type = meta.get("mime")
ext = str(meta.get("ext") or "").strip().lstrip(".")
if not ext and mime_type:
for category in mime_maps.values():
for _ext_key, info in category.items():
if mime_type in info.get("mimes", []):
ext = str(info.get("ext", "")).strip().lstrip(".")
break
if ext:
break
ext = _resolve_ext_from_meta(meta, mime_type)
# Filter results based on query type
# If user provided explicit namespace (has ':'), don't do substring filtering
@@ -1331,6 +1350,8 @@ class HydrusNetwork(Store):
item_url = meta.get("known_urls") or meta.get("urls") or meta.get("url") or []
if not item_url:
item_url = meta.get("file_url") or f"{self.URL.rstrip('/')}/view_file?hash={hash_hex}"
if isinstance(item_url, str) and "/view_file" in item_url:
item_url = self._append_access_key(item_url)
if has_namespace:
# Explicit namespace search - already filtered by Hydrus tag search

View File

@@ -101,20 +101,7 @@ def _required_keys_for(store_cls: Type[BaseStore]) -> list[str]:
return keys
except Exception:
pass
# Legacy __new__.keys support
keys = getattr(store_cls.__new__, "keys", None)
if keys is None:
return []
if isinstance(keys, dict):
return [str(k) for k in keys.keys()]
if isinstance(keys, (list, tuple, set, frozenset)):
return [str(k) for k in keys]
if isinstance(keys, str):
return [keys]
raise TypeError(
f"Unsupported __new__.keys type for {store_cls.__name__}: {type(keys)}"
)
return []
# Store type names that have been converted to providers-only.

View File

@@ -340,7 +340,6 @@ class SharedArgs:
# Path/File arguments
PATH = CmdletArg("path", type="string", description="File or directory path.")
OUTPUT = CmdletArg("output", type="string", description="Output file path.")
# Generic arguments
QUERY = CmdletArg(
@@ -782,7 +781,7 @@ def resolve_target_dir(
*,
handle_creations: bool = True
) -> Optional[Path]:
"""Resolve a target directory from -path, -output, -storage, or config fallback.
"""Resolve a target directory from -path, -storage, or config fallback.
Args:
parsed: Parsed cmdlet arguments dict.
@@ -792,8 +791,8 @@ def resolve_target_dir(
Returns:
Path to the resolved directory, or None if invalid.
"""
# Priority 1: Explicit -path or -output
target = parsed.get("path") or parsed.get("output")
# Priority 1: Explicit -path
target = parsed.get("path")
if target:
try:
p = Path(str(target)).expanduser().resolve()

View File

@@ -72,13 +72,6 @@ class Download_File(Cmdlet):
SharedArgs.PROVIDER,
SharedArgs.PATH,
SharedArgs.QUERY,
# Prefer -path for output directory to match other cmdlets; keep -output for backwards compatibility.
CmdletArg(
name="-output",
type="string",
alias="o",
description="(deprecated) Output directory (use -path instead)",
),
QueryArg(
"clip",
key="clip",
@@ -2782,7 +2775,7 @@ class Download_File(Cmdlet):
# UX: In piped mode, allow a single positional arg to be the destination directory.
# Example: @1-4 | download-file "C:\\Users\\Me\\Downloads\\yoyo"
if (had_piped_input and raw_url and len(raw_url) == 1
and (not parsed.get("path")) and (not parsed.get("output"))):
and (not parsed.get("path"))):
candidate = str(raw_url[0] or "").strip()
low = candidate.lower()
looks_like_url = low.startswith((

View File

@@ -971,8 +971,18 @@ class search_file(Cmdlet):
p_str = str(p_val or "").strip()
if p_str:
if p_str.startswith(("http://", "https://", "magnet:", "torrent:")):
sel_args = ["-url", p_str]
sel_action = ["download-file", "-url", p_str]
h = normalized.get("hash") or normalized.get("file_hash") or normalized.get("hash_hex")
s_val = normalized.get("store")
if h and s_val and "/view_file" in p_str:
try:
h_norm = normalize_hash(h)
except Exception:
h_norm = str(h)
sel_args = ["-query", f"hash:{h_norm}", "-store", str(s_val)]
sel_action = ["get-metadata", "-query", f"hash:{h_norm}", "-store", str(s_val)]
else:
sel_args = ["-url", p_str]
sel_action = ["download-file", "-url", p_str]
else:
try:
from SYS.utils import expand_path