From 66132811e08d39e9c2c98ca3994904910e1c1a84 Mon Sep 17 00:00:00 2001 From: Nose Date: Sun, 18 Jan 2026 03:23:01 -0800 Subject: [PATCH] f --- API/HydrusNetwork.py | 26 +++++++++++++------------- SYS/metadata.py | 4 ++-- SYS/utils.py | 4 ++-- cmdlet/_shared.py | 14 +++++++------- cmdlet/add_relationship.py | 30 +++++++++++++++--------------- cmdlet/download_file.py | 8 ++++---- cmdlet/get_url.py | 2 +- cmdlet/screen_shot.py | 12 ++++++------ 8 files changed, 50 insertions(+), 50 deletions(-) diff --git a/API/HydrusNetwork.py b/API/HydrusNetwork.py index 7165c94..6c600dc 100644 --- a/API/HydrusNetwork.py +++ b/API/HydrusNetwork.py @@ -1174,7 +1174,7 @@ def hydrus_export(args, _parser) -> int: return 1 ffmpeg_metadata = prepare_ffmpeg_metadata(metadata_payload) - def _normalise_ext(value: Optional[str]) -> Optional[str]: + def _normalize_ext(value: Optional[str]) -> Optional[str]: if not value: return None cleaned = value.strip() @@ -1271,7 +1271,7 @@ def hydrus_export(args, _parser) -> int: log(f"{exc}", file=sys.stderr) return 1 - source_suffix = _normalise_ext(getattr(args, "source_ext", None)) + source_suffix = _normalize_ext(getattr(args, "source_ext", None)) if source_suffix and source_suffix.lower() == ".bin": source_suffix = None @@ -1313,7 +1313,7 @@ def hydrus_export(args, _parser) -> int: ) else None if isinstance(entries, list) and entries: entry = entries[0] - ext_value = _normalise_ext( + ext_value = _normalize_ext( entry.get("ext") if isinstance(entry, dict) else None ) @@ -1328,7 +1328,7 @@ def hydrus_export(args, _parser) -> int: if os.environ.get("DOWNLOW_DEBUG"): log(f"hydrus metadata fetch failed: {exc}", file=sys.stderr) if not resolved_suffix: - fallback_suffix = _normalise_ext(original_suffix) + fallback_suffix = _normalize_ext(original_suffix) if fallback_suffix and fallback_suffix.lower() == ".bin": fallback_suffix = None resolved_suffix = fallback_suffix or ".hydrus" @@ -2098,9 +2098,9 @@ def _generate_hydrus_url_variants(url: str) -> List[str]: alternate_scheme = "https" if parsed.scheme == "http" else "http" push(urlunsplit((alternate_scheme, parsed.netloc, parsed.path, parsed.query, parsed.fragment))) - normalised_netloc = parsed.netloc.lower() - if normalised_netloc and normalised_netloc != parsed.netloc: - push(urlunsplit((parsed.scheme, normalised_netloc, parsed.path, parsed.query, parsed.fragment))) + normalized_netloc = parsed.netloc.lower() + if normalized_netloc and normalized_netloc != parsed.netloc: + push(urlunsplit((parsed.scheme, normalized_netloc, parsed.path, parsed.query, parsed.fragment))) if parsed.path: trimmed_path = parsed.path.rstrip("/") @@ -2377,7 +2377,7 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]: hashes: Optional[List[str]] = None file_ids: Optional[List[int]] = None matched_url = None - normalised_reported = None + normalized_reported = None seen: Set[str] = set() queue = deque() for variant in _generate_hydrus_url_variants(url): @@ -2404,11 +2404,11 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]: response_hashes_list: List[str] = [] response_file_ids_list: List[int] = [] if isinstance(response, dict): - normalised_value = response.get("normalised_url") - if isinstance(normalised_value, str): - trimmed = normalised_value.strip() + normalized_value = response.get("normalized_url") + if isinstance(normalized_value, str): + trimmed = normalized_value.strip() if trimmed: - normalised_reported = normalised_reported or trimmed + normalized_reported = normalized_reported or trimmed if trimmed not in seen: queue.append(trimmed) for redirect_key in ("redirect_url", "url"): @@ -2478,7 +2478,7 @@ def fetch_hydrus_metadata_by_url(payload: Dict[str, Any]) -> Dict[str, Any]: } result = fetch_hydrus_metadata(followup_payload) result["matched_url"] = matched_url or url - result["normalised_url"] = normalised_reported or matched_url or url + result["normalized_url"] = normalized_reported or matched_url or url result["tried_urls"] = tried_variants return result diff --git a/SYS/metadata.py b/SYS/metadata.py index 3d9a646..a676aeb 100644 --- a/SYS/metadata.py +++ b/SYS/metadata.py @@ -412,7 +412,7 @@ def normalize_urls(value: Any) -> List[str]: return out -def _normalise_string_list(values: Optional[Iterable[Any]]) -> List[str]: +def _normalize_string_list(values: Optional[Iterable[Any]]) -> List[str]: if not values: return [] seen: Set[str] = set() @@ -769,7 +769,7 @@ def sync_sidecar(payload: Dict[str, Any]) -> Dict[str, Any]: else: sidecar_path = _derive_sidecar_path(candidate) - tags = _normalise_string_list(payload.get("tag")) + tags = _normalize_string_list(payload.get("tag")) if not tags and sidecar_path.exists(): tags = read_tags_from_file(sidecar_path) diff --git a/SYS/utils.py b/SYS/utils.py index b651588..fd86557 100644 --- a/SYS/utils.py +++ b/SYS/utils.py @@ -576,7 +576,7 @@ class UrlPolicy: return tags -def _normalise_rule(rule: dict[str, Any]) -> dict[str, Any] | None: +def _normalize_rule(rule: dict[str, Any]) -> dict[str, Any] | None: pattern = str(rule.get("pattern") or rule.get("host") or "").strip() if not pattern: return None @@ -614,7 +614,7 @@ def resolve_url_policy(config: dict[str, Any], url: str) -> UrlPolicy: for rule_raw in policies_raw: if not isinstance(rule_raw, dict): continue - rule = _normalise_rule(rule_raw) + rule = _normalize_rule(rule_raw) if rule is None: continue pattern = rule["pattern"] diff --git a/cmdlet/_shared.py b/cmdlet/_shared.py index b987643..e2d317e 100644 --- a/cmdlet/_shared.py +++ b/cmdlet/_shared.py @@ -1358,7 +1358,7 @@ def fmt_bytes(n: Optional[int]) -> str: return f"{mb:.1f} MB" -def _normalise_tag_group_entry(value: Any) -> Optional[str]: +def _normalize_tag_group_entry(value: Any) -> Optional[str]: """Internal: Normalize a single tag group entry.""" if not isinstance(value, str): value = str(value) @@ -1428,14 +1428,14 @@ def _load_tag_groups() -> Dict[str, List[str]]: members: List[str] = [] if isinstance(value, list): for entry in value: - normalised = _normalise_tag_group_entry(entry) - if normalised: - members.append(normalised) + normalized = _normalize_tag_group_entry(entry) + if normalized: + members.append(normalized) elif isinstance(value, str): - normalised = _normalise_tag_group_entry(value) - if normalised: + normalized = _normalize_tag_group_entry(value) + if normalized: members.extend( - token.strip() for token in normalised.split(",") + token.strip() for token in normalized.split(",") if token.strip() ) if members: diff --git a/cmdlet/add_relationship.py b/cmdlet/add_relationship.py index b0adc69..e6195de 100644 --- a/cmdlet/add_relationship.py +++ b/cmdlet/add_relationship.py @@ -69,7 +69,7 @@ CMDLET = Cmdlet( ) -def _normalise_hash_hex(value: Optional[str]) -> Optional[str]: +def _normalize_hash_hex(value: Optional[str]) -> Optional[str]: """Normalize a hash hex string to lowercase 64-char format.""" if not value or not isinstance(value, str): return None @@ -99,7 +99,7 @@ def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]: if matches: for rel_type, hash_value in matches: - normalized = _normalise_hash_hex(hash_value) + normalized = _normalize_hash_hex(hash_value) if normalized: if rel_type not in result: result[rel_type] = [] @@ -111,13 +111,13 @@ def _extract_relationships_from_tag(tag_value: str) -> Dict[str, list[str]]: hashes = [h.strip().lower() for h in hashes if isinstance(h, str)] if not hashes: return result - king = _normalise_hash_hex(hashes[0]) + king = _normalize_hash_hex(hashes[0]) if not king: return result result["king"] = [king] alts: list[str] = [] for h in hashes[1:]: - normalized = _normalise_hash_hex(h) + normalized = _normalize_hash_hex(h) if normalized and normalized != king: alts.append(normalized) if alts: @@ -155,13 +155,13 @@ def _apply_relationships_from_tags( king = (rels.get("king") or [None])[0] if not king: continue - king_norm = _normalise_hash_hex(king) + king_norm = _normalize_hash_hex(king) if not king_norm: continue for rel_type in ("alt", "related"): for other in rels.get(rel_type, []) or []: - other_norm = _normalise_hash_hex(other) + other_norm = _normalize_hash_hex(other) if not other_norm or other_norm == king_norm: continue key = (other_norm, king_norm, rel_type) @@ -184,7 +184,7 @@ def _apply_relationships_from_tags( king = (rels.get("king") or [None])[0] if not king: continue - king_norm = _normalise_hash_hex(king) + king_norm = _normalize_hash_hex(king) if not king_norm: continue @@ -196,7 +196,7 @@ def _apply_relationships_from_tags( ) for alt in alt_hashes: - alt_norm = _normalise_hash_hex(alt) + alt_norm = _normalize_hash_hex(alt) if not alt_norm or alt_norm == king_norm: continue if (alt_norm, king_norm) in processed_pairs: @@ -286,7 +286,7 @@ def _extract_hash_and_store(item: Any) -> tuple[Optional[str], Optional[str]]: "file_hash") s = get_field(item, "store") - hash_norm = _normalise_hash_hex(str(h) if h is not None else None) + hash_norm = _normalize_hash_hex(str(h) if h is not None else None) store_norm: Optional[str] if s is None: @@ -330,7 +330,7 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]: return None # Check if it's already a valid hash - normalized = _normalise_hash_hex(king_arg) + normalized = _normalize_hash_hex(king_arg) if normalized: return normalized @@ -356,7 +356,7 @@ def _resolve_king_reference(king_arg: str) -> Optional[str]: ) if item_hash: - normalized = _normalise_hash_hex(str(item_hash)) + normalized = _normalize_hash_hex(str(item_hash)) if normalized: return normalized @@ -500,7 +500,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int: parts = [ p.strip() for p in alt_text.replace(";", ",").split(",") if p.strip() ] - hashes = [h for h in (_normalise_hash_hex(p) for p in parts) if h] + hashes = [h for h in (_normalize_hash_hex(p) for p in parts) if h] if not hashes: log( "Invalid -alt value (expected @ selection or 64-hex sha256 hash list)", @@ -898,7 +898,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int: return 1 if king_hash: - normalized_king = _normalise_hash_hex(str(king_hash)) + normalized_king = _normalize_hash_hex(str(king_hash)) if not normalized_king: log(f"King hash invalid: {king_hash}", file=sys.stderr) return 1 @@ -973,7 +973,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int: # PIPELINE MODE with Hydrus: Track relationships using hash if file_hash and hydrus_client: - file_hash = _normalise_hash_hex( + file_hash = _normalize_hash_hex( str(file_hash) if file_hash is not None else None ) if not file_hash: @@ -1106,7 +1106,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int: log("File hash not available (run add-file first)", file=sys.stderr) return 1 - file_hash = _normalise_hash_hex(file_hash) + file_hash = _normalize_hash_hex(file_hash) if not file_hash: log("Invalid file hash format", file=sys.stderr) return 1 diff --git a/cmdlet/download_file.py b/cmdlet/download_file.py index 4f1cde5..0ed1079 100644 --- a/cmdlet/download_file.py +++ b/cmdlet/download_file.py @@ -2446,7 +2446,7 @@ class Download_File(Cmdlet): return out @staticmethod - def _normalise_hash_hex(value: Optional[str]) -> Optional[str]: + def _normalize_hash_hex(value: Optional[str]) -> Optional[str]: if not value or not isinstance(value, str): return None candidate = value.strip().lower() @@ -2460,7 +2460,7 @@ class Download_File(Cmdlet): return None for key in ("hash", "hash_hex", "file_hash", "hydrus_hash"): v = hit.get(key) - normalized = cls._normalise_hash_hex(str(v) if v is not None else None) + normalized = cls._normalize_hash_hex(str(v) if v is not None else None) if normalized: return normalized return None @@ -2553,10 +2553,10 @@ class Download_File(Cmdlet): hashes: List[str] = [] for po in pipe_objects: - h_val = cls._normalise_hash_hex(str(po.get("hash") or "")) + h_val = cls._normalize_hash_hex(str(po.get("hash") or "")) hashes.append(h_val or "") - king_hash = cls._normalise_hash_hex(source_king_hash) if source_king_hash else None + king_hash = cls._normalize_hash_hex(source_king_hash) if source_king_hash else None if not king_hash: king_hash = hashes[0] if hashes and hashes[0] else None if not king_hash: diff --git a/cmdlet/get_url.py b/cmdlet/get_url.py index 7300d76..fee913b 100644 --- a/cmdlet/get_url.py +++ b/cmdlet/get_url.py @@ -395,7 +395,7 @@ class Get_Url(Cmdlet): info = backend.get_url_info(raw_pattern) # type: ignore[attr-defined] if isinstance(info, dict): norm = ( - info.get("normalised_url") + info.get("normalized_url") or info.get("normalized_url") ) if isinstance(norm, str) and norm.strip(): diff --git a/cmdlet/screen_shot.py b/cmdlet/screen_shot.py index 681effb..76a1879 100644 --- a/cmdlet/screen_shot.py +++ b/cmdlet/screen_shot.py @@ -243,7 +243,7 @@ def _title_from_url(url: str) -> str: return "" -def _normalise_format(fmt: Optional[str]) -> str: +def _normalize_format(fmt: Optional[str]) -> str: """Normalize output format to valid values.""" if not fmt: return "webp" @@ -511,7 +511,7 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]: def _prepare_output_path(options: ScreenshotOptions) -> Path: """Prepare and validate output path for screenshot.""" ensure_directory(options.output_dir) - explicit_format = _normalise_format( + explicit_format = _normalize_format( options.output_format ) if options.output_format else None inferred_format: Optional[str] = None @@ -521,7 +521,7 @@ def _prepare_output_path(options: ScreenshotOptions) -> Path: path = options.output_dir / path suffix = path.suffix.lower() if suffix: - inferred_format = _normalise_format(suffix[1:]) + inferred_format = _normalize_format(suffix[1:]) else: stamp = time.strftime("%Y%m%d_%H%M%S") filename = f"{_slugify_url(options.url)}_{stamp}" @@ -595,7 +595,7 @@ def _capture( tool.debug_dump() debug("Launching browser...") - format_name = _normalise_format(options.output_format) + format_name = _normalize_format(options.output_format) headless = options.headless or format_name == "pdf" debug(f"[_capture] Format: {format_name}, Headless: {headless}") @@ -758,7 +758,7 @@ def _capture_screenshot( ) -> ScreenshotResult: """Capture a screenshot for the given options.""" debug(f"[_capture_screenshot] Preparing capture for {options.url}") - requested_format = _normalise_format(options.output_format) + requested_format = _normalize_format(options.output_format) destination = _prepare_output_path(options) warnings: List[str] = [] @@ -972,7 +972,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int: # PREPARE SCREENSHOT OPTIONS # ======================================================================== - format_name = _normalise_format(format_value) + format_name = _normalize_format(format_value) filtered_selectors = [str(s).strip() for s in selectors if str(s).strip()] manual_target_selectors = filtered_selectors if filtered_selectors else None