dfdf
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
import sys
|
||||
|
||||
import pipeline as ctx
|
||||
@@ -48,28 +48,37 @@ class Delete_Url(Cmdlet):
|
||||
log("Error: -query must be of the form hash:<sha256>")
|
||||
return 1
|
||||
|
||||
# Bulk input is common in pipelines; treat a list of PipeObjects as a batch.
|
||||
results: List[Any] = result if isinstance(result, list) else ([result] if result is not None else [])
|
||||
|
||||
if query_hash and len(results) > 1:
|
||||
log("Error: -query hash:<sha256> cannot be used with multiple piped items")
|
||||
return 1
|
||||
|
||||
# Extract hash and store from result or args
|
||||
file_hash = query_hash or get_field(result, "hash")
|
||||
store_name = parsed.get("store") or get_field(result, "store")
|
||||
file_hash = query_hash or (get_field(result, "hash") if result is not None else None)
|
||||
store_name = parsed.get("store") or (get_field(result, "store") if result is not None else None)
|
||||
url_arg = parsed.get("url")
|
||||
|
||||
if not file_hash:
|
||||
log("Error: No file hash provided (pipe an item or use -query \"hash:<sha256>\")")
|
||||
return 1
|
||||
|
||||
if not store_name:
|
||||
log("Error: No store name provided")
|
||||
return 1
|
||||
# If we have multiple piped items, we will resolve hash/store per item below.
|
||||
if not results:
|
||||
if not file_hash:
|
||||
log("Error: No file hash provided (pipe an item or use -query \"hash:<sha256>\")")
|
||||
return 1
|
||||
if not store_name:
|
||||
log("Error: No store name provided")
|
||||
return 1
|
||||
|
||||
if not url_arg:
|
||||
log("Error: No URL provided")
|
||||
return 1
|
||||
|
||||
# Normalize hash
|
||||
file_hash = normalize_hash(file_hash)
|
||||
if not file_hash:
|
||||
log("Error: Invalid hash format")
|
||||
return 1
|
||||
# Normalize hash (single-item mode)
|
||||
if not results and file_hash:
|
||||
file_hash = normalize_hash(file_hash)
|
||||
if not file_hash:
|
||||
log("Error: Invalid hash format")
|
||||
return 1
|
||||
|
||||
# Parse url (comma-separated)
|
||||
urls = [u.strip() for u in str(url_arg).split(',') if u.strip()]
|
||||
@@ -80,12 +89,104 @@ class Delete_Url(Cmdlet):
|
||||
# Get backend and delete url
|
||||
try:
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
backend.delete_url(file_hash, urls)
|
||||
for u in urls:
|
||||
ctx.emit(f"Deleted URL: {u}")
|
||||
|
||||
def _remove_urls(existing: Any, remove: List[str]) -> Any:
|
||||
# Preserve prior shape: keep str when 1 url, list when multiple.
|
||||
current: List[str] = []
|
||||
try:
|
||||
if isinstance(existing, str):
|
||||
current = [p.strip() for p in existing.split(",") if p.strip()]
|
||||
elif isinstance(existing, (list, tuple)):
|
||||
current = [str(u).strip() for u in existing if str(u).strip()]
|
||||
except Exception:
|
||||
current = []
|
||||
remove_set = {u for u in (remove or []) if u}
|
||||
new_urls = [u for u in current if u not in remove_set]
|
||||
if len(new_urls) == 1:
|
||||
return new_urls[0]
|
||||
return new_urls
|
||||
|
||||
def _set_item_url(item: Any, merged: Any) -> None:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
item["url"] = merged
|
||||
return
|
||||
if hasattr(item, "url"):
|
||||
setattr(item, "url", merged)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
store_override = parsed.get("store")
|
||||
batch: Dict[str, List[Tuple[str, List[str]]]] = {}
|
||||
pass_through: List[Any] = []
|
||||
|
||||
if results:
|
||||
for item in results:
|
||||
pass_through.append(item)
|
||||
|
||||
raw_hash = query_hash or get_field(item, "hash")
|
||||
raw_store = store_override or get_field(item, "store")
|
||||
if not raw_hash or not raw_store:
|
||||
ctx.print_if_visible("[delete-url] Warning: Item missing hash/store; skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
normalized = normalize_hash(raw_hash)
|
||||
if not normalized:
|
||||
ctx.print_if_visible("[delete-url] Warning: Item has invalid hash; skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
store_text = str(raw_store).strip()
|
||||
if not store_text:
|
||||
ctx.print_if_visible("[delete-url] Warning: Item has empty store; skipping", file=sys.stderr)
|
||||
continue
|
||||
if not storage.is_available(store_text):
|
||||
ctx.print_if_visible(
|
||||
f"[delete-url] Warning: Store '{store_text}' not configured; skipping", file=sys.stderr
|
||||
)
|
||||
continue
|
||||
|
||||
batch.setdefault(store_text, []).append((normalized, list(urls)))
|
||||
|
||||
for store_text, pairs in batch.items():
|
||||
try:
|
||||
backend = storage[store_text]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
merged: Dict[str, List[str]] = {}
|
||||
for h, ulist in pairs:
|
||||
merged.setdefault(h, [])
|
||||
for u in (ulist or []):
|
||||
if u and u not in merged[h]:
|
||||
merged[h].append(u)
|
||||
bulk_pairs = [(h, merged[h]) for h in merged.keys()]
|
||||
|
||||
bulk_fn = getattr(backend, "delete_url_bulk", None)
|
||||
if callable(bulk_fn):
|
||||
bulk_fn(bulk_pairs, config=config)
|
||||
else:
|
||||
for h, ulist in bulk_pairs:
|
||||
backend.delete_url(h, ulist, config=config)
|
||||
|
||||
ctx.print_if_visible(
|
||||
f"✓ delete-url: {len(urls)} url(s) for {len(bulk_pairs)} item(s) in '{store_text}'",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
for item in pass_through:
|
||||
existing = get_field(item, "url")
|
||||
_set_item_url(item, _remove_urls(existing, list(urls)))
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
# Single-item mode
|
||||
backend = storage[str(store_name)]
|
||||
backend.delete_url(str(file_hash), urls, config=config)
|
||||
ctx.print_if_visible(f"✓ delete-url: {len(urls)} url(s) removed", file=sys.stderr)
|
||||
if result is not None:
|
||||
existing = get_field(result, "url")
|
||||
_set_item_url(result, _remove_urls(existing, list(urls)))
|
||||
ctx.emit(result)
|
||||
return 0
|
||||
|
||||
except KeyError:
|
||||
|
||||
Reference in New Issue
Block a user