This commit is contained in:
nose
2025-12-22 02:11:53 -08:00
parent d0b821b5dd
commit 16316bb3fd
20 changed files with 4218 additions and 2422 deletions

View File

@@ -29,7 +29,7 @@ class Delete_Url(Cmdlet):
arg=[
SharedArgs.QUERY,
SharedArgs.STORE,
CmdletArg("url", required=True, description="URL to remove"),
CmdletArg("url", required=False, description="URL to remove (optional when piping url rows)"),
],
detail=[
"- Removes URL association from file identified by hash+store",
@@ -69,22 +69,24 @@ class Delete_Url(Cmdlet):
log("Error: No store name provided")
return 1
if not url_arg:
log("Error: No URL provided")
return 1
# Normalize hash (single-item mode)
if not results and file_hash:
file_hash = normalize_hash(file_hash)
if not file_hash:
log("Error: Invalid hash format")
return 1
# Parse url (comma-separated)
urls = [u.strip() for u in str(url_arg).split(',') if u.strip()]
if not urls:
log("Error: No valid url provided")
return 1
from metadata import normalize_urls
def _urls_from_arg(raw: Any) -> List[str]:
if raw is None:
return []
# Support comma-separated input for backwards compatibility
if isinstance(raw, str) and "," in raw:
return [u.strip() for u in raw.split(",") if u.strip()]
return [u.strip() for u in normalize_urls(raw) if str(u).strip()]
urls_from_cli = _urls_from_arg(url_arg)
# Get backend and delete url
try:
@@ -145,7 +147,17 @@ class Delete_Url(Cmdlet):
)
continue
batch.setdefault(store_text, []).append((normalized, list(urls)))
# Determine which URLs to delete.
# - If user passed an explicit <url>, apply it to all items.
# - Otherwise, when piping url rows from get-url, delete the url(s) from each item.
item_urls = list(urls_from_cli)
if not item_urls:
item_urls = [u.strip() for u in normalize_urls(get_field(item, "url") or get_field(item, "source_url")) if str(u).strip()]
if not item_urls:
ctx.print_if_visible("[delete-url] Warning: Item has no url field; skipping", file=sys.stderr)
continue
batch.setdefault(store_text, []).append((normalized, item_urls))
for store_text, pairs in batch.items():
try:
@@ -168,24 +180,39 @@ class Delete_Url(Cmdlet):
for h, ulist in bulk_pairs:
backend.delete_url(h, ulist, config=config)
deleted_count = 0
for _h, ulist in bulk_pairs:
deleted_count += len(ulist or [])
ctx.print_if_visible(
f"✓ delete-url: {len(urls)} url(s) for {len(bulk_pairs)} item(s) in '{store_text}'",
f"✓ delete-url: {deleted_count} url(s) for {len(bulk_pairs)} item(s) in '{store_text}'",
file=sys.stderr,
)
for item in pass_through:
existing = get_field(item, "url")
_set_item_url(item, _remove_urls(existing, list(urls)))
# In batch mode we removed the union of requested urls for the file.
# Using urls_from_cli (if present) matches the user's explicit intent; otherwise
# remove the piped url row(s).
remove_set = urls_from_cli
if not remove_set:
remove_set = [u.strip() for u in normalize_urls(get_field(item, "url") or get_field(item, "source_url")) if str(u).strip()]
_set_item_url(item, _remove_urls(existing, list(remove_set)))
ctx.emit(item)
return 0
# Single-item mode
if not urls_from_cli:
urls_from_cli = [u.strip() for u in normalize_urls(get_field(result, "url") or get_field(result, "source_url")) if str(u).strip()]
if not urls_from_cli:
log("Error: No URL provided")
return 1
backend = storage[str(store_name)]
backend.delete_url(str(file_hash), urls, config=config)
ctx.print_if_visible(f"✓ delete-url: {len(urls)} url(s) removed", file=sys.stderr)
backend.delete_url(str(file_hash), list(urls_from_cli), config=config)
ctx.print_if_visible(f"✓ delete-url: {len(urls_from_cli)} url(s) removed", file=sys.stderr)
if result is not None:
existing = get_field(result, "url")
_set_item_url(result, _remove_urls(existing, list(urls)))
_set_item_url(result, _remove_urls(existing, list(urls_from_cli)))
ctx.emit(result)
return 0