f
This commit is contained in:
@@ -95,39 +95,9 @@ class Get_Url(Cmdlet):
|
||||
return item.strip()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _extract_url_from_result(result: Any) -> Optional[str]:
|
||||
# Prefer explicit url field.
|
||||
u = Get_Url._extract_first_url(get_field(result, "url"))
|
||||
if u:
|
||||
return u
|
||||
|
||||
# Fall back to ResultTable-style columns list.
|
||||
cols = None
|
||||
if isinstance(result, dict):
|
||||
cols = result.get("columns")
|
||||
else:
|
||||
cols = getattr(result, "columns", None)
|
||||
if isinstance(cols, list):
|
||||
for pair in cols:
|
||||
try:
|
||||
if isinstance(pair, (list, tuple)) and len(pair) == 2:
|
||||
k, v = pair
|
||||
if str(k or "").strip().lower() in {"url", "urls"}:
|
||||
u2 = Get_Url._extract_first_url(v)
|
||||
if u2:
|
||||
return u2
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _extract_title_from_result(result: Any) -> Optional[str]:
|
||||
# Prefer explicit title field.
|
||||
t = get_field(result, "title")
|
||||
if isinstance(t, str) and t.strip():
|
||||
return t.strip()
|
||||
|
||||
# Fall back to ResultTable-style columns list.
|
||||
cols = None
|
||||
if isinstance(result, dict):
|
||||
@@ -318,6 +288,13 @@ class Get_Url(Cmdlet):
|
||||
for url in (urls or []):
|
||||
if not self._match_url_pattern(str(url), raw_pattern):
|
||||
continue
|
||||
|
||||
# Double-check it looks like a URL to avoid data leakage from dirty DBs
|
||||
from SYS.metadata import normalize_urls
|
||||
valid = normalize_urls([str(url)])
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
items.append(
|
||||
UrlItem(
|
||||
url=str(url),
|
||||
@@ -328,7 +305,7 @@ class Get_Url(Cmdlet):
|
||||
ext=str(ext or ""),
|
||||
)
|
||||
)
|
||||
found_stores.add(str(store_name))
|
||||
found_stores.add(str(store_name))
|
||||
except Exception as exc:
|
||||
debug(
|
||||
f"Error searching store '{store_name}': {exc}",
|
||||
@@ -358,10 +335,6 @@ class Get_Url(Cmdlet):
|
||||
# Check if user provided a URL pattern to search for
|
||||
search_pattern = parsed.get("url")
|
||||
|
||||
# Allow piping a URL row (or any result with a url field/column) into get-url.
|
||||
if not search_pattern:
|
||||
search_pattern = self._extract_url_from_result(result)
|
||||
|
||||
if search_pattern:
|
||||
# URL search mode: find all files with matching URLs across stores
|
||||
items, stores_searched = self._search_urls_across_stores(search_pattern, config)
|
||||
@@ -405,9 +378,13 @@ class Get_Url(Cmdlet):
|
||||
}
|
||||
display_items.append(payload)
|
||||
table.add_result(payload)
|
||||
ctx.emit(payload)
|
||||
|
||||
ctx.set_last_result_table(table if display_items else None, display_items, subject=result)
|
||||
|
||||
# Emit after table state is finalized to prevent side effects in TUI rendering
|
||||
for d in display_items:
|
||||
ctx.emit(d)
|
||||
|
||||
log(
|
||||
f"Found {len(items)} matching url(s) in {len(stores_searched)} store(s)"
|
||||
)
|
||||
@@ -433,18 +410,16 @@ class Get_Url(Cmdlet):
|
||||
log("Error: No store name provided")
|
||||
return 1
|
||||
|
||||
# Normalize hash
|
||||
file_hash = normalize_hash(file_hash)
|
||||
if not file_hash:
|
||||
log("Error: Invalid hash format")
|
||||
return 1
|
||||
|
||||
# Get backend and retrieve url
|
||||
try:
|
||||
storage = Store(config)
|
||||
backend = storage[store_name]
|
||||
|
||||
urls = backend.get_url(file_hash)
|
||||
|
||||
# Filter URLs to avoid data leakage from dirty DBs
|
||||
from SYS.metadata import normalize_urls
|
||||
urls = normalize_urls(urls)
|
||||
|
||||
title = str(get_field(result, "title") or "").strip()
|
||||
table_title = "Title"
|
||||
@@ -468,10 +443,15 @@ class Get_Url(Cmdlet):
|
||||
row.add_column("Url", u)
|
||||
item = UrlItem(url=u, hash=file_hash, store=str(store_name))
|
||||
items.append(item)
|
||||
ctx.emit(item)
|
||||
|
||||
# Make this a real result table so @.. / @,, can navigate it
|
||||
ctx.set_last_result_table(table if items else None, items, subject=result)
|
||||
# Use overlay mode to avoid "merging" with the previous status/table state.
|
||||
# This is idiomatic for detail views and prevents the search table from being
|
||||
# contaminated by partial re-renders.
|
||||
ctx.set_last_result_table_overlay(table if items else None, items, subject=result)
|
||||
|
||||
# Emit items at the end for pipeline continuity
|
||||
for item in items:
|
||||
ctx.emit(item)
|
||||
|
||||
if not items:
|
||||
log("No url found", file=sys.stderr)
|
||||
|
||||
Reference in New Issue
Block a user