This commit is contained in:
2026-01-18 03:23:01 -08:00
parent aa675a625a
commit 66132811e0
8 changed files with 50 additions and 50 deletions

View File

@@ -1174,7 +1174,7 @@ def hydrus_export(args, _parser) -> int:
return 1
ffmpeg_metadata = prepare_ffmpeg_metadata(metadata_payload)
def _normalise_ext(value: Optional[str]) -> Optional[str]:
def _normalize_ext(value: Optional[str]) -> Optional[str]:
if not value:
return None
cleaned = value.strip()
@@ -1271,7 +1271,7 @@ def hydrus_export(args, _parser) -> int:
log(f"{exc}", file=sys.stderr)
return 1
source_suffix = _normalise_ext(getattr(args, "source_ext", None))
source_suffix = _normalize_ext(getattr(args, "source_ext", None))
if source_suffix and source_suffix.lower() == ".bin":
source_suffix = None
@@ -1313,7 +1313,7 @@ def hydrus_export(args, _parser) -> int:
) else None
if isinstance(entries, list) and entries:
entry = entries[0]
ext_value = _normalise_ext(
ext_value = _normalize_ext(
entry.get("ext") if isinstance(entry,
dict) else None
)
@@ -1328,7 +1328,7 @@ def hydrus_export(args, _parser) -> int:
if os.environ.get("DOWNLOW_DEBUG"):
log(f"hydrus metadata fetch failed: {exc}", file=sys.stderr)
if not resolved_suffix:
fallback_suffix = _normalise_ext(original_suffix)
fallback_suffix = _normalize_ext(original_suffix)
if fallback_suffix and fallback_suffix.lower() == ".bin":
fallback_suffix = None
resolved_suffix = fallback_suffix or ".hydrus"
@@ -2098,9 +2098,9 @@ def _generate_hydrus_url_variants(url: str) -> List[str]:
alternate_scheme = "https" if parsed.scheme == "http" else "http"
push(urlunsplit((alternate_scheme, parsed.netloc, parsed.path, parsed.query, parsed.fragment)))
normalised_netloc = parsed.netloc.lower()
if normalised_netloc and normalised_netloc != parsed.netloc:
push(urlunsplit((parsed.scheme, normalised_netloc, parsed.path, parsed.query, parsed.fragment)))
normalized_netloc = parsed.netloc.lower()
if normalized_netloc and normalized_netloc != parsed.netloc:
push(urlunsplit((parsed.scheme, normalized_netloc, parsed.path, parsed.query, parsed.fragment)))
if parsed.path:
trimmed_path = parsed.path.rstrip("/")
@@ -2377,7 +2377,7 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
hashes: Optional[List[str]] = None
file_ids: Optional[List[int]] = None
matched_url = None
normalised_reported = None
normalized_reported = None
seen: Set[str] = set()
queue = deque()
for variant in _generate_hydrus_url_variants(url):
@@ -2404,11 +2404,11 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
response_hashes_list: List[str] = []
response_file_ids_list: List[int] = []
if isinstance(response, dict):
normalised_value = response.get("normalised_url")
if isinstance(normalised_value, str):
trimmed = normalised_value.strip()
normalized_value = response.get("normalized_url")
if isinstance(normalized_value, str):
trimmed = normalized_value.strip()
if trimmed:
normalised_reported = normalised_reported or trimmed
normalized_reported = normalized_reported or trimmed
if trimmed not in seen:
queue.append(trimmed)
for redirect_key in ("redirect_url", "url"):
@@ -2478,7 +2478,7 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]:
}
result = fetch_hydrus_metadata(followup_payload)
result["matched_url"] = matched_url or url
result["normalised_url"] = normalised_reported or matched_url or url
result["normalized_url"] = normalized_reported or matched_url or url
result["tried_urls"] = tried_variants
return result

View File

@@ -412,7 +412,7 @@ def normalize_urls(value: Any) -> List[str]:
return out
def _normalise_string_list(values: Optional[Iterable[Any]]) -> List[str]:
def _normalize_string_list(values: Optional[Iterable[Any]]) -> List[str]:
if not values:
return []
seen: Set[str] = set()
@@ -769,7 +769,7 @@ def sync_sidecar(payload: Dict[str, Any]) -> Dict[str, Any]:
else:
sidecar_path = _derive_sidecar_path(candidate)
tags = _normalise_string_list(payload.get("tag"))
tags = _normalize_string_list(payload.get("tag"))
if not tags and sidecar_path.exists():
tags = read_tags_from_file(sidecar_path)

View File

@@ -576,7 +576,7 @@ class UrlPolicy:
return tags
def _normalise_rule(rule: dict[str, Any]) -> dict[str, Any] | None:
def _normalize_rule(rule: dict[str, Any]) -> dict[str, Any] | None:
pattern = str(rule.get("pattern") or rule.get("host") or "").strip()
if not pattern:
return None
@@ -614,7 +614,7 @@ def resolve_url_policy(config: dict[str, Any], url: str) -> UrlPolicy:
for rule_raw in policies_raw:
if not isinstance(rule_raw, dict):
continue
rule = _normalise_rule(rule_raw)
rule = _normalize_rule(rule_raw)
if rule is None:
continue
pattern = rule["pattern"]

View File

@@ -1358,7 +1358,7 @@ def fmt_bytes(n: Optional[int]) -> str:
return f"{mb:.1f} MB"
def _normalise_tag_group_entry(value: Any) -> Optional[str]:
def _normalize_tag_group_entry(value: Any) -> Optional[str]:
"""Internal: Normalize a single tag group entry."""
if not isinstance(value, str):
value = str(value)
@@ -1428,14 +1428,14 @@ def _load_tag_groups() -> Dict[str, List[str]]:
members: List[str] = []
if isinstance(value, list):
for entry in value:
normalised = _normalise_tag_group_entry(entry)
if normalised:
members.append(normalised)
normalized = _normalize_tag_group_entry(entry)
if normalized:
members.append(normalized)
elif isinstance(value, str):
normalised = _normalise_tag_group_entry(value)
if normalised:
normalized = _normalize_tag_group_entry(value)
if normalized:
members.extend(
token.strip() for token in normalised.split(",")
token.strip() for token in normalized.split(",")
if token.strip()
)
if members:

View File

@@ -69,7 +69,7 @@ CMDLET = Cmdlet(
)
def _normalise_hash_hex(value: Optional[str]) -> Optional[str]:
def _normalize_hash_hex(value: Optional[str]) -> Optional[str]:
"""Normalize a hash hex string to lowercase 64-char format."""
if not value or not isinstance(value, str):
return None
@@ -99,7 +99,7 @@ def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]:
if matches:
for rel_type, hash_value in matches:
normalized = _normalise_hash_hex(hash_value)
normalized = _normalize_hash_hex(hash_value)
if normalized:
if rel_type not in result:
result[rel_type] = []
@@ -111,13 +111,13 @@ def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]:
hashes = [h.strip().lower() for h in hashes if isinstance(h, str)]
if not hashes:
return result
king = _normalise_hash_hex(hashes[0])
king = _normalize_hash_hex(hashes[0])
if not king:
return result
result["king"] = [king]
alts: list[str] = []
for h in hashes[1:]:
normalized = _normalise_hash_hex(h)
normalized = _normalize_hash_hex(h)
if normalized and normalized != king:
alts.append(normalized)
if alts:
@@ -155,13 +155,13 @@ def _apply_relationships_from_tags(
king = (rels.get("king") or [None])[0]
if not king:
continue
king_norm = _normalise_hash_hex(king)
king_norm = _normalize_hash_hex(king)
if not king_norm:
continue
for rel_type in ("alt", "related"):
for other in rels.get(rel_type, []) or []:
other_norm = _normalise_hash_hex(other)
other_norm = _normalize_hash_hex(other)
if not other_norm or other_norm == king_norm:
continue
key = (other_norm, king_norm, rel_type)
@@ -184,7 +184,7 @@ def _apply_relationships_from_tags(
king = (rels.get("king") or [None])[0]
if not king:
continue
king_norm = _normalise_hash_hex(king)
king_norm = _normalize_hash_hex(king)
if not king_norm:
continue
@@ -196,7 +196,7 @@ def _apply_relationships_from_tags(
)
for alt in alt_hashes:
alt_norm = _normalise_hash_hex(alt)
alt_norm = _normalize_hash_hex(alt)
if not alt_norm or alt_norm == king_norm:
continue
if (alt_norm, king_norm) in processed_pairs:
@@ -286,7 +286,7 @@ def _extract_hash_and_store(item: Any) -> tuple[Optional[str], Optional[str]]:
"file_hash")
s = get_field(item, "store")
hash_norm = _normalise_hash_hex(str(h) if h is not None else None)
hash_norm = _normalize_hash_hex(str(h) if h is not None else None)
store_norm: Optional[str]
if s is None:
@@ -330,7 +330,7 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]:
return None
# Check if it's already a valid hash
normalized = _normalise_hash_hex(king_arg)
normalized = _normalize_hash_hex(king_arg)
if normalized:
return normalized
@@ -356,7 +356,7 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]:
)
if item_hash:
normalized = _normalise_hash_hex(str(item_hash))
normalized = _normalize_hash_hex(str(item_hash))
if normalized:
return normalized
@@ -500,7 +500,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
parts = [
p.strip() for p in alt_text.replace(";", ",").split(",") if p.strip()
]
hashes = [h for h in (_normalise_hash_hex(p) for p in parts) if h]
hashes = [h for h in (_normalize_hash_hex(p) for p in parts) if h]
if not hashes:
log(
"Invalid -alt value (expected @ selection or 64-hex sha256 hash list)",
@@ -898,7 +898,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
return 1
if king_hash:
normalized_king = _normalise_hash_hex(str(king_hash))
normalized_king = _normalize_hash_hex(str(king_hash))
if not normalized_king:
log(f"King hash invalid: {king_hash}", file=sys.stderr)
return 1
@@ -973,7 +973,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
# PIPELINE MODE with Hydrus: Track relationships using hash
if file_hash and hydrus_client:
file_hash = _normalise_hash_hex(
file_hash = _normalize_hash_hex(
str(file_hash) if file_hash is not None else None
)
if not file_hash:
@@ -1106,7 +1106,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
log("File hash not available (run add-file first)", file=sys.stderr)
return 1
file_hash = _normalise_hash_hex(file_hash)
file_hash = _normalize_hash_hex(file_hash)
if not file_hash:
log("Invalid file hash format", file=sys.stderr)
return 1

View File

@@ -2446,7 +2446,7 @@ class Download_File(Cmdlet):
return out
@staticmethod
def _normalise_hash_hex(value: Optional[str]) -> Optional[str]:
def _normalize_hash_hex(value: Optional[str]) -> Optional[str]:
if not value or not isinstance(value, str):
return None
candidate = value.strip().lower()
@@ -2460,7 +2460,7 @@ class Download_File(Cmdlet):
return None
for key in ("hash", "hash_hex", "file_hash", "hydrus_hash"):
v = hit.get(key)
normalized = cls._normalise_hash_hex(str(v) if v is not None else None)
normalized = cls._normalize_hash_hex(str(v) if v is not None else None)
if normalized:
return normalized
return None
@@ -2553,10 +2553,10 @@ class Download_File(Cmdlet):
hashes: List[str] = []
for po in pipe_objects:
h_val = cls._normalise_hash_hex(str(po.get("hash") or ""))
h_val = cls._normalize_hash_hex(str(po.get("hash") or ""))
hashes.append(h_val or "")
king_hash = cls._normalise_hash_hex(source_king_hash) if source_king_hash else None
king_hash = cls._normalize_hash_hex(source_king_hash) if source_king_hash else None
if not king_hash:
king_hash = hashes[0] if hashes and hashes[0] else None
if not king_hash:

View File

@@ -395,7 +395,7 @@ class Get_Url(Cmdlet):
info = backend.get_url_info(raw_pattern) # type: ignore[attr-defined]
if isinstance(info, dict):
norm = (
info.get("normalised_url")
info.get("normalized_url")
or info.get("normalized_url")
)
if isinstance(norm, str) and norm.strip():

View File

@@ -243,7 +243,7 @@ def _title_from_url(url: str) -> str:
return ""
def _normalise_format(fmt: Optional[str]) -> str:
def _normalize_format(fmt: Optional[str]) -> str:
"""Normalize output format to valid values."""
if not fmt:
return "webp"
@@ -511,7 +511,7 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]:
def _prepare_output_path(options: ScreenshotOptions) -> Path:
"""Prepare and validate output path for screenshot."""
ensure_directory(options.output_dir)
explicit_format = _normalise_format(
explicit_format = _normalize_format(
options.output_format
) if options.output_format else None
inferred_format: Optional[str] = None
@@ -521,7 +521,7 @@ def _prepare_output_path(options: ScreenshotOptions) -> Path:
path = options.output_dir / path
suffix = path.suffix.lower()
if suffix:
inferred_format = _normalise_format(suffix[1:])
inferred_format = _normalize_format(suffix[1:])
else:
stamp = time.strftime("%Y%m%d_%H%M%S")
filename = f"{_slugify_url(options.url)}_{stamp}"
@@ -595,7 +595,7 @@ def _capture(
tool.debug_dump()
debug("Launching browser...")
format_name = _normalise_format(options.output_format)
format_name = _normalize_format(options.output_format)
headless = options.headless or format_name == "pdf"
debug(f"[_capture] Format: {format_name}, Headless: {headless}")
@@ -758,7 +758,7 @@ def _capture_screenshot(
) -> ScreenshotResult:
"""Capture a screenshot for the given options."""
debug(f"[_capture_screenshot] Preparing capture for {options.url}")
requested_format = _normalise_format(options.output_format)
requested_format = _normalize_format(options.output_format)
destination = _prepare_output_path(options)
warnings: List[str] = []
@@ -972,7 +972,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
# PREPARE SCREENSHOT OPTIONS
# ========================================================================
format_name = _normalise_format(format_value)
format_name = _normalize_format(format_value)
filtered_selectors = [str(s).strip() for s in selectors if str(s).strip()]
manual_target_selectors = filtered_selectors if filtered_selectors else None