Add YAPF style + ignore, and format tracked Python files

This commit is contained in:
2025-12-29 18:42:02 -08:00
parent c019c00aed
commit 507946a3e4
108 changed files with 11664 additions and 6494 deletions

View File

@@ -15,7 +15,7 @@ from urllib.parse import parse_qs, urlparse
from SYS.logger import log
import pipeline as ctx
from config import resolve_output_dir
from SYS.config import resolve_output_dir
from . import _shared as sh
Cmdlet = sh.Cmdlet
@@ -26,7 +26,6 @@ create_pipe_object_result = sh.create_pipe_object_result
parse_cmdlet_args = sh.parse_cmdlet_args
should_show_help = sh.should_show_help
_SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$")
@@ -85,19 +84,27 @@ def _hydrus_instance_names(config: Dict[str, Any]) -> Set[str]:
if isinstance(store_cfg, dict):
hydrus_cfg = store_cfg.get("hydrusnetwork")
if isinstance(hydrus_cfg, dict):
instances = {str(k).strip().lower() for k in hydrus_cfg.keys() if str(k).strip()}
instances = {
str(k).strip().lower()
for k in hydrus_cfg.keys() if str(k).strip()
}
except Exception:
instances = set()
return instances
def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: Path) -> Path | None:
def _maybe_download_hydrus_item(
item: Any,
config: Dict[str,
Any],
output_dir: Path
) -> Path | None:
"""Download a Hydrus-backed item to a local temp path (best-effort).
This is intentionally side-effect free except for writing the local temp file.
"""
try:
from config import get_hydrus_access_key, get_hydrus_url
from SYS.config import get_hydrus_access_key, get_hydrus_url
from API.HydrusNetwork import HydrusNetwork as HydrusClient, download_hydrus_file
except Exception:
return None
@@ -105,7 +112,8 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
store_name = _extract_store_name(item)
store_lower = store_name.lower()
hydrus_instances = _hydrus_instance_names(config)
store_hint = store_lower in {"hydrus", "hydrusnetwork"} or (store_lower in hydrus_instances)
store_hint = store_lower in {"hydrus",
"hydrusnetwork"} or (store_lower in hydrus_instances)
url = _extract_url(item)
file_hash = _extract_sha256_hex(item) or (
@@ -149,8 +157,13 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
# Best-effort extension from Hydrus metadata.
suffix = ".hydrus"
try:
meta_response = client.fetch_file_metadata(hashes=[file_hash], include_mime=True)
entries = meta_response.get("metadata") if isinstance(meta_response, dict) else None
meta_response = client.fetch_file_metadata(
hashes=[file_hash],
include_mime=True
)
entries = meta_response.get("metadata"
) if isinstance(meta_response,
dict) else None
if isinstance(entries, list) and entries:
entry = entries[0]
if isinstance(entry, dict):
@@ -173,7 +186,9 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
if dest.exists():
dest = output_dir / f"{file_hash}_{uuid.uuid4().hex[:10]}{suffix}"
headers = {"Hydrus-Client-API-Access-Key": access_key}
headers = {
"Hydrus-Client-API-Access-Key": access_key
}
download_hydrus_file(file_url, headers, dest, timeout=60.0)
try:
if dest.exists() and dest.is_file():
@@ -183,17 +198,20 @@ def _maybe_download_hydrus_item(item: Any, config: Dict[str, Any], output_dir: P
return None
def _resolve_existing_or_fetch_path(
item: Any, config: Dict[str, Any]
) -> tuple[Path | None, Path | None]:
def _resolve_existing_or_fetch_path(item: Any,
config: Dict[str,
Any]) -> tuple[Path | None,
Path | None]:
"""Return (path, temp_path) where temp_path is non-None only for files we downloaded."""
# 1) Direct local path
try:
po = coerce_to_pipe_object(item, None)
raw_path = (
getattr(po, "path", None)
or getattr(po, "target", None)
or sh.get_pipe_object_path(item)
getattr(po,
"path",
None) or getattr(po,
"target",
None) or sh.get_pipe_object_path(item)
)
if raw_path:
p = Path(str(raw_path)).expanduser()
@@ -223,17 +241,22 @@ def _resolve_existing_or_fetch_path(
if src.strip().lower().startswith(("http://", "https://")):
tmp_base = None
try:
tmp_base = config.get("temp") if isinstance(config, dict) else None
tmp_base = config.get("temp"
) if isinstance(config,
dict) else None
except Exception:
tmp_base = None
out_dir = (
Path(str(tmp_base)).expanduser()
if tmp_base
else (Path(tempfile.gettempdir()) / "Medios-Macina")
Path(str(tmp_base)).expanduser() if tmp_base else
(Path(tempfile.gettempdir()) / "Medios-Macina")
)
out_dir = out_dir / "archive" / "hydrus"
downloaded = _maybe_download_hydrus_item(
{"hash": file_hash, "store": store_name, "url": src.strip()},
{
"hash": file_hash,
"store": store_name,
"url": src.strip()
},
config,
out_dir,
)
@@ -248,9 +271,8 @@ def _resolve_existing_or_fetch_path(
except Exception:
tmp_base = None
out_dir = (
Path(str(tmp_base)).expanduser()
if tmp_base
else (Path(tempfile.gettempdir()) / "Medios-Macina")
Path(str(tmp_base)).expanduser() if tmp_base else
(Path(tempfile.gettempdir()) / "Medios-Macina")
)
out_dir = out_dir / "archive" / "hydrus"
downloaded = _maybe_download_hydrus_item(item, config, out_dir)
@@ -350,21 +372,29 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
stamp = time.strftime("%Y%m%d_%H%M%S")
out_path = out_dir / f"archive_{stamp}.tar.zst"
try:
out_path = sh._unique_destination_path(out_path) # type: ignore[attr-defined]
out_path = sh._unique_destination_path(
out_path
) # type: ignore[attr-defined]
except Exception:
pass
try:
out_path.parent.mkdir(parents=True, exist_ok=True)
except Exception as exc:
log(f"Failed to create output directory: {out_path.parent} ({exc})", file=sys.stderr)
log(
f"Failed to create output directory: {out_path.parent} ({exc})",
file=sys.stderr
)
return 1
# Import zstandard lazily so the rest of the CLI still runs without it.
try:
import zstandard as zstd # type: ignore
except Exception:
log("Missing dependency: zstandard (pip install zstandard)", file=sys.stderr)
log(
"Missing dependency: zstandard (pip install zstandard)",
file=sys.stderr
)
return 1
# Write tar stream into zstd stream.
@@ -372,9 +402,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
with open(out_path, "wb") as out_handle:
cctx = zstd.ZstdCompressor(level=level)
with cctx.stream_writer(out_handle) as compressor:
with tarfile.open(
fileobj=compressor, mode="w|", format=tarfile.PAX_FORMAT
) as tf:
with tarfile.open(fileobj=compressor,
mode="w|",
format=tarfile.PAX_FORMAT) as tf:
seen_names: Set[str] = set()
for p in paths:
arcname = _unique_arcname(p.name, seen_names)
@@ -382,7 +412,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
try:
tf.add(str(p), arcname=arcname, recursive=True)
except Exception as exc:
log(f"Failed to add to archive: {p} ({exc})", file=sys.stderr)
log(
f"Failed to add to archive: {p} ({exc})",
file=sys.stderr
)
except Exception as exc:
log(f"Archive creation failed: {exc}", file=sys.stderr)
return 1
@@ -437,7 +470,9 @@ CMDLET = Cmdlet(
usage="@N | archive-file [-level <1-22>] [-path <path>]",
arg=[
CmdletArg(
"-level", type="integer", description="Zstandard compression level (default: 11)."
"-level",
type="integer",
description="Zstandard compression level (default: 11)."
),
SharedArgs.PATH,
],