kl
This commit is contained in:
16
API/Tidal.py
16
API/Tidal.py
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|||||||
from typing import Any, Dict, List, Optional, Set
|
from typing import Any, Dict, List, Optional, Set
|
||||||
|
|
||||||
from .base import API, ApiError
|
from .base import API, ApiError
|
||||||
|
from SYS.logger import debug
|
||||||
|
|
||||||
DEFAULT_BASE_URL = "https://tidal-api.binimum.org"
|
DEFAULT_BASE_URL = "https://tidal-api.binimum.org"
|
||||||
|
|
||||||
@@ -241,21 +242,24 @@ class Tidal(API):
|
|||||||
|
|
||||||
# 1. Fetch info (metadata) - fetch raw to ensure all fields are available for merging
|
# 1. Fetch info (metadata) - fetch raw to ensure all fields are available for merging
|
||||||
info_resp = self._get_json("info/", params={"id": track_int})
|
info_resp = self._get_json("info/", params={"id": track_int})
|
||||||
|
debug(f"[API.Tidal] info_resp (len={len(str(info_resp))}): {info_resp}")
|
||||||
info_data = info_resp.get("data") if isinstance(info_resp, dict) else info_resp
|
info_data = info_resp.get("data") if isinstance(info_resp, dict) else info_resp
|
||||||
if not isinstance(info_data, dict) or "id" not in info_data:
|
if not isinstance(info_data, dict) or "id" not in info_data:
|
||||||
info_data = info_resp if isinstance(info_resp, dict) and "id" in info_resp else {}
|
info_data = info_resp if isinstance(info_resp, dict) and "id" in info_resp else {}
|
||||||
|
|
||||||
# 2. Fetch track (manifest/bit depth)
|
# 2. Fetch track (manifest/bit depth)
|
||||||
track_resp = self.track(track_id)
|
track_resp = self.track(track_id)
|
||||||
|
debug(f"[API.Tidal] track_resp (len={len(str(track_resp))}): {track_resp}")
|
||||||
# Note: track() method in this class currently returns raw JSON, so we handle it similarly.
|
# Note: track() method in this class currently returns raw JSON, so we handle it similarly.
|
||||||
track_data = track_resp.get("data") if isinstance(track_resp, dict) else track_resp
|
track_data = track_resp.get("data") if isinstance(track_resp, dict) else track_resp
|
||||||
if not isinstance(track_data, dict) or "id" not in track_data:
|
if not isinstance(track_data, dict):
|
||||||
track_data = track_resp if isinstance(track_resp, dict) and "id" in track_resp else {}
|
track_data = track_resp if isinstance(track_resp, dict) else {}
|
||||||
|
|
||||||
# 3. Fetch lyrics
|
# 3. Fetch lyrics
|
||||||
lyrics_data = {}
|
lyrics_data = {}
|
||||||
try:
|
try:
|
||||||
lyr_resp = self.lyrics(track_id)
|
lyr_resp = self.lyrics(track_id)
|
||||||
|
debug(f"[API.Tidal] lyrics_resp (len={len(str(lyr_resp))}): {lyr_resp}")
|
||||||
lyrics_data = lyr_resp.get("lyrics") or lyr_resp if isinstance(lyr_resp, dict) else {}
|
lyrics_data = lyr_resp.get("lyrics") or lyr_resp if isinstance(lyr_resp, dict) else {}
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@@ -267,18 +271,24 @@ class Tidal(API):
|
|||||||
if isinstance(track_data, dict):
|
if isinstance(track_data, dict):
|
||||||
merged_md.update(track_data)
|
merged_md.update(track_data)
|
||||||
|
|
||||||
|
debug(f"[API.Tidal] merged_md keys: {list(merged_md.keys())}")
|
||||||
|
|
||||||
# Derived tags and normalized/parsed info
|
# Derived tags and normalized/parsed info
|
||||||
tags = build_track_tags(merged_md)
|
tags = build_track_tags(merged_md)
|
||||||
|
debug(f"[API.Tidal] generated tags: {tags}")
|
||||||
parsed_info = parse_track_item(merged_md)
|
parsed_info = parse_track_item(merged_md)
|
||||||
|
|
||||||
# Structure for return
|
# Structure for return
|
||||||
return {
|
res = {
|
||||||
"metadata": merged_md,
|
"metadata": merged_md,
|
||||||
"parsed": parsed_info,
|
"parsed": parsed_info,
|
||||||
"tags": list(tags),
|
"tags": list(tags),
|
||||||
"lyrics": lyrics_data,
|
"lyrics": lyrics_data,
|
||||||
}
|
}
|
||||||
|
debug(f"[API.Tidal] returning full_track_metadata keys: {list(res.keys())}")
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
# Legacy alias for TidalApiClient
|
# Legacy alias for TidalApiClient
|
||||||
TidalApiClient = Tidal
|
TidalApiClient = Tidal
|
||||||
|
HifiApiClient = Tidal
|
||||||
|
|||||||
14
CLI.py
14
CLI.py
@@ -3731,18 +3731,32 @@ class PipelineExecutor:
|
|||||||
if emits:
|
if emits:
|
||||||
try:
|
try:
|
||||||
from cmdlet import _shared as sh
|
from cmdlet import _shared as sh
|
||||||
|
from SYS import models
|
||||||
|
|
||||||
|
# 1. Apply -path persistence (moves temp files to final destination)
|
||||||
emits = sh.apply_output_path_from_pipeobjects(
|
emits = sh.apply_output_path_from_pipeobjects(
|
||||||
cmd_name=cmd_name,
|
cmd_name=cmd_name,
|
||||||
args=list(stage_args),
|
args=list(stage_args),
|
||||||
emits=emits,
|
emits=emits,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# 2. METADATA STICKINESS / PROPAGATION
|
||||||
|
# We normalize all emitted items and merge metadata/tags from the previous stage.
|
||||||
|
# This ensures info like track titles/lyrics survive downloads/conversions.
|
||||||
|
# See cmdlet._shared.propagate_metadata for the merge logic.
|
||||||
|
prev_items = piped_result
|
||||||
|
if not isinstance(prev_items, (list, tuple)):
|
||||||
|
prev_items = [prev_items] if prev_items else []
|
||||||
|
|
||||||
|
emits = sh.propagate_metadata(prev_items, emits)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pipeline_ctx.emits = list(emits)
|
pipeline_ctx.emits = list(emits)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if emits:
|
if emits:
|
||||||
# If the cmdlet already installed an overlay table (e.g. get-tag),
|
# If the cmdlet already installed an overlay table (e.g. get-tag),
|
||||||
# don't overwrite it: set_last_result_items_only() would clear the
|
# don't overwrite it: set_last_result_items_only() would clear the
|
||||||
|
|||||||
@@ -13,13 +13,15 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from API.Tidal import (
|
from API.Tidal import (
|
||||||
HifiApiClient,
|
Tidal as TidalApiClient,
|
||||||
build_track_tags,
|
build_track_tags,
|
||||||
coerce_duration_seconds,
|
coerce_duration_seconds,
|
||||||
extract_artists,
|
extract_artists,
|
||||||
stringify,
|
stringify,
|
||||||
)
|
)
|
||||||
from ProviderCore.base import Provider, SearchResult, parse_inline_query_arguments
|
from ProviderCore.base import Provider, SearchResult, parse_inline_query_arguments
|
||||||
|
from ProviderCore.inline_utils import collect_choice
|
||||||
|
from cmdlet._shared import get_field
|
||||||
from SYS import pipeline as pipeline_context
|
from SYS import pipeline as pipeline_context
|
||||||
from SYS.logger import debug, log
|
from SYS.logger import debug, log
|
||||||
|
|
||||||
@@ -64,7 +66,9 @@ def _format_total_seconds(seconds: Any) -> str:
|
|||||||
return f"{mins}:{secs:02d}"
|
return f"{mins}:{secs:02d}"
|
||||||
|
|
||||||
|
|
||||||
class Tidal(Provider):
|
class HIFI(Provider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = "hifi"
|
||||||
|
|
||||||
TABLE_AUTO_STAGES = {
|
TABLE_AUTO_STAGES = {
|
||||||
"hifi.track": ["download-file"],
|
"hifi.track": ["download-file"],
|
||||||
@@ -97,7 +101,7 @@ class Tidal(Provider):
|
|||||||
self.api_timeout = float(self.config.get("timeout", 10.0))
|
self.api_timeout = float(self.config.get("timeout", 10.0))
|
||||||
except Exception:
|
except Exception:
|
||||||
self.api_timeout = 10.0
|
self.api_timeout = 10.0
|
||||||
self.api_clients = [HifiApiClient(base_url=url, timeout=self.api_timeout) for url in self.api_urls]
|
self.api_clients = [TidalApiClient(base_url=url, timeout=self.api_timeout) for url in self.api_urls]
|
||||||
|
|
||||||
def extract_query_arguments(self, query: str) -> Tuple[str, Dict[str, Any]]:
|
def extract_query_arguments(self, query: str) -> Tuple[str, Dict[str, Any]]:
|
||||||
normalized, parsed = parse_inline_query_arguments(query)
|
normalized, parsed = parse_inline_query_arguments(query)
|
||||||
@@ -281,7 +285,7 @@ class Tidal(Provider):
|
|||||||
if isinstance(detail, dict):
|
if isinstance(detail, dict):
|
||||||
title = self._stringify(detail.get("title")) or title
|
title = self._stringify(detail.get("title")) or title
|
||||||
|
|
||||||
return SearchResult(
|
res = SearchResult(
|
||||||
table="hifi.track",
|
table="hifi.track",
|
||||||
title=title,
|
title=title,
|
||||||
path=f"hifi://track/{track_id}",
|
path=f"hifi://track/{track_id}",
|
||||||
@@ -291,6 +295,12 @@ class Tidal(Provider):
|
|||||||
full_metadata=dict(detail) if isinstance(detail, dict) else {},
|
full_metadata=dict(detail) if isinstance(detail, dict) else {},
|
||||||
selection_args=["-url", f"hifi://track/{track_id}"],
|
selection_args=["-url", f"hifi://track/{track_id}"],
|
||||||
)
|
)
|
||||||
|
if isinstance(detail, dict):
|
||||||
|
try:
|
||||||
|
res.tag = self._build_track_tags(detail)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return res
|
||||||
|
|
||||||
def _extract_artist_selection_context(self, selected_items: List[Any]) -> List[Tuple[int, str]]:
|
def _extract_artist_selection_context(self, selected_items: List[Any]) -> List[Tuple[int, str]]:
|
||||||
contexts: List[Tuple[int, str]] = []
|
contexts: List[Tuple[int, str]] = []
|
||||||
@@ -1130,25 +1140,36 @@ class Tidal(Provider):
|
|||||||
md = dict(getattr(result, "full_metadata") or {})
|
md = dict(getattr(result, "full_metadata") or {})
|
||||||
|
|
||||||
track_id = self._extract_track_id_from_result(result)
|
track_id = self._extract_track_id_from_result(result)
|
||||||
if track_id:
|
debug(f"[hifi] download: track_id={track_id}, manifest_present={bool(md.get('manifest'))}, tag_count={len(result.tag) if result.tag else 0}")
|
||||||
|
|
||||||
|
# Enrichment: fetch full metadata if manifest or detailed info (like tags/lyrics) is missing.
|
||||||
|
# We check for 'manifest' because it's required for DASH playback.
|
||||||
|
# We also check for lyrics/subtitles to ensure they are available for add-file.
|
||||||
|
has_lyrics = bool(md.get("_tidal_lyrics_subtitles")) or bool(md.get("lyrics"))
|
||||||
|
|
||||||
|
if track_id and (not md.get("manifest") or not md.get("artist") or len(result.tag or []) <= 1 or not has_lyrics):
|
||||||
|
debug(f"[hifi] Enriching track data (reason: manifest={not md.get('manifest')}, lyrics={not has_lyrics}, tags={len(result.tag or [])})")
|
||||||
# Multi-part enrichment from API: metadata, tags, and lyrics.
|
# Multi-part enrichment from API: metadata, tags, and lyrics.
|
||||||
full_data = self._fetch_all_track_data(track_id)
|
full_data = self._fetch_all_track_data(track_id)
|
||||||
|
debug(f"[hifi] download: enrichment full_data present={bool(full_data)}")
|
||||||
if isinstance(full_data, dict):
|
if isinstance(full_data, dict):
|
||||||
# 1. Update metadata
|
# 1. Update metadata
|
||||||
api_md = full_data.get("metadata")
|
api_md = full_data.get("metadata")
|
||||||
if isinstance(api_md, dict):
|
if isinstance(api_md, dict):
|
||||||
|
debug(f"[hifi] download: updating metadata with {len(api_md)} keys")
|
||||||
md.update(api_md)
|
md.update(api_md)
|
||||||
|
|
||||||
# 2. Update tags (re-sync result.tag so cmdlet sees them)
|
# 2. Update tags (re-sync result.tag so cmdlet sees them)
|
||||||
api_tags = full_data.get("tags")
|
api_tags = full_data.get("tags")
|
||||||
|
debug(f"[hifi] download: enrichment tags={api_tags}")
|
||||||
if isinstance(api_tags, list) and api_tags:
|
if isinstance(api_tags, list) and api_tags:
|
||||||
result.tag = set(api_tags)
|
result.tag = set(api_tags)
|
||||||
|
|
||||||
# 3. Handle lyrics
|
# 3. Handle lyrics
|
||||||
lyrics = full_data.get("lyrics")
|
lyrics_dict = full_data.get("lyrics")
|
||||||
if isinstance(lyrics, dict) and lyrics:
|
if isinstance(lyrics_dict, dict) and lyrics_dict:
|
||||||
md.setdefault("lyrics", lyrics)
|
md.setdefault("lyrics", lyrics_dict)
|
||||||
subtitles = lyrics.get("subtitles")
|
subtitles = lyrics_dict.get("subtitles")
|
||||||
if isinstance(subtitles, str) and subtitles.strip():
|
if isinstance(subtitles, str) and subtitles.strip():
|
||||||
md["_tidal_lyrics_subtitles"] = subtitles.strip()
|
md["_tidal_lyrics_subtitles"] = subtitles.strip()
|
||||||
|
|
||||||
@@ -1328,7 +1349,7 @@ class Tidal(Provider):
|
|||||||
|
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
def _get_api_client_for_base(self, base_url: str) -> Optional[HifiApiClient]:
|
def _get_api_client_for_base(self, base_url: str) -> Optional[TidalApiClient]:
|
||||||
base = base_url.rstrip("/")
|
base = base_url.rstrip("/")
|
||||||
for client in self.api_clients:
|
for client in self.api_clients:
|
||||||
if getattr(client, "base_url", "").rstrip("/") == base:
|
if getattr(client, "base_url", "").rstrip("/") == base:
|
||||||
@@ -1739,6 +1760,10 @@ class Tidal(Provider):
|
|||||||
or payload.get("path")
|
or payload.get("path")
|
||||||
or payload.get("url")
|
or payload.get("url")
|
||||||
)
|
)
|
||||||
|
# Guard against method binding (e.g. str.title) being returned by getattr(str, "title")
|
||||||
|
if callable(title):
|
||||||
|
title = None
|
||||||
|
|
||||||
if not title:
|
if not title:
|
||||||
title = f"Track {track_id}"
|
title = f"Track {track_id}"
|
||||||
path = (
|
path = (
|
||||||
@@ -1983,12 +2008,6 @@ class Tidal(Provider):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Optimization: If we are selecting tracks, do NOT force a "Detail View" (resolving manifest) here.
|
|
||||||
# This allows batch selection to flow immediately to `download-file` (via TABLE_AUTO_STAGES)
|
|
||||||
# or other downstream cmdlets. The download logic (HIFI.download) handles manifest resolution locally.
|
|
||||||
if table_type == "hifi.track" or (is_generic_hifi and any(str(get_field(i, "path")).startswith("hifi://track/") for i in selected_items)):
|
|
||||||
return False
|
|
||||||
|
|
||||||
contexts = self._extract_track_selection_context(selected_items)
|
contexts = self._extract_track_selection_context(selected_items)
|
||||||
try:
|
try:
|
||||||
debug(f"[hifi.selector] track contexts={len(contexts)}")
|
debug(f"[hifi.selector] track contexts={len(contexts)}")
|
||||||
|
|||||||
@@ -501,6 +501,26 @@ class InternetArchive(Provider):
|
|||||||
"internetarchive.formats": ["download-file"],
|
"internetarchive.formats": ["download-file"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def maybe_show_picker(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
item: Optional[Any] = None,
|
||||||
|
parsed: Dict[str, Any],
|
||||||
|
config: Dict[str, Any],
|
||||||
|
quiet_mode: bool,
|
||||||
|
) -> Optional[int]:
|
||||||
|
"""Generic hook for download-file to show a selection table for IA items."""
|
||||||
|
from cmdlet._shared import get_field as sh_get_field
|
||||||
|
return maybe_show_formats_table(
|
||||||
|
raw_urls=[url] if url else [],
|
||||||
|
piped_items=[item] if item else [],
|
||||||
|
parsed=parsed,
|
||||||
|
config=config,
|
||||||
|
quiet_mode=quiet_mode,
|
||||||
|
get_field=sh_get_field,
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||||
super().__init__(config)
|
super().__init__(config)
|
||||||
conf = _pick_provider_config(self.config)
|
conf = _pick_provider_config(self.config)
|
||||||
|
|||||||
@@ -11,11 +11,16 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
from collections.abc import Iterable as IterableABC
|
from collections.abc import Iterable as IterableABC
|
||||||
|
|
||||||
from SYS.logger import log
|
from SYS.logger import log, debug
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set
|
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from SYS import models
|
from SYS import models
|
||||||
|
from SYS import pipeline as pipeline_context
|
||||||
|
from SYS.result_table import ResultTable
|
||||||
|
from SYS.rich_display import stderr_console as get_stderr_console
|
||||||
|
from rich.prompt import Confirm
|
||||||
|
from contextlib import AbstractContextManager, nullcontext
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -2405,6 +2410,117 @@ def coerce_to_pipe_object(
|
|||||||
return pipe_obj
|
return pipe_obj
|
||||||
|
|
||||||
|
|
||||||
|
def propagate_metadata(
|
||||||
|
previous_items: Sequence[Any],
|
||||||
|
new_items: Sequence[Any]
|
||||||
|
) -> List[Any]:
|
||||||
|
"""Merge metadata/tags from previous pipeline stage into new items.
|
||||||
|
|
||||||
|
Implements "sticky metadata": items generated by a transformation (download, convert)
|
||||||
|
should inherit rich info (lyrics, art, tags) from their source.
|
||||||
|
|
||||||
|
Strategies:
|
||||||
|
A. Hash Match: If inputs/outputs share a hash, they are the same item.
|
||||||
|
B. Index Match: If lists are same length, assume 1:1 mapping (heuristic).
|
||||||
|
C. Explicit Parent: If output has `parent_hash`, link to input with that hash.
|
||||||
|
"""
|
||||||
|
if not previous_items or not new_items:
|
||||||
|
return list(new_items)
|
||||||
|
|
||||||
|
try:
|
||||||
|
prev_normalized = [coerce_to_pipe_object(p) for p in previous_items]
|
||||||
|
except Exception:
|
||||||
|
return list(new_items)
|
||||||
|
|
||||||
|
prev_by_hash: Dict[str, models.PipeObject] = {}
|
||||||
|
for p_obj in prev_normalized:
|
||||||
|
if p_obj.hash and p_obj.hash != "unknown":
|
||||||
|
prev_by_hash[p_obj.hash] = p_obj
|
||||||
|
|
||||||
|
normalized: List[models.PipeObject] = []
|
||||||
|
|
||||||
|
# Pre-calculate length matching for heuristic
|
||||||
|
is_same_length = len(new_items) == len(prev_normalized)
|
||||||
|
|
||||||
|
for i, item in enumerate(new_items):
|
||||||
|
try:
|
||||||
|
obj = coerce_to_pipe_object(item)
|
||||||
|
except Exception:
|
||||||
|
normalized.append(item) # Should not happen given coerce guards
|
||||||
|
continue
|
||||||
|
|
||||||
|
parent: Optional[models.PipeObject] = None
|
||||||
|
|
||||||
|
# Strategy A: Precise Hash Match
|
||||||
|
if obj.hash in prev_by_hash:
|
||||||
|
parent = prev_by_hash[obj.hash]
|
||||||
|
|
||||||
|
# Strategy B: Index Match (Heuristic)
|
||||||
|
if not parent and is_same_length:
|
||||||
|
parent = prev_normalized[i]
|
||||||
|
|
||||||
|
# Strategy C: Explicit Parent Hash
|
||||||
|
if not parent and obj.parent_hash and obj.parent_hash in prev_by_hash:
|
||||||
|
parent = prev_by_hash[obj.parent_hash]
|
||||||
|
|
||||||
|
if parent:
|
||||||
|
# 1. Tags: Merge unique tags
|
||||||
|
if parent.tag:
|
||||||
|
if not obj.tag:
|
||||||
|
obj.tag = list(parent.tag)
|
||||||
|
else:
|
||||||
|
curr_tags = {str(t).lower() for t in obj.tag}
|
||||||
|
for pt in parent.tag:
|
||||||
|
if str(pt).lower() not in curr_tags:
|
||||||
|
obj.tag.append(pt)
|
||||||
|
|
||||||
|
# 2. Metadata: Merge missing keys
|
||||||
|
if parent.metadata:
|
||||||
|
if not obj.metadata:
|
||||||
|
obj.metadata = parent.metadata.copy()
|
||||||
|
else:
|
||||||
|
for mk, mv in parent.metadata.items():
|
||||||
|
if mk not in obj.metadata:
|
||||||
|
obj.metadata[mk] = mv
|
||||||
|
|
||||||
|
# 3. Source URL: Propagate if missing
|
||||||
|
if parent.source_url and not obj.source_url:
|
||||||
|
obj.source_url = parent.source_url
|
||||||
|
elif parent.url and not obj.source_url and not obj.url:
|
||||||
|
# If parent had a URL and child has none, it's likely the source
|
||||||
|
obj.source_url = parent.url
|
||||||
|
|
||||||
|
# 4. Relationships: Merge missing keys
|
||||||
|
if parent.relationships:
|
||||||
|
if not obj.relationships:
|
||||||
|
obj.relationships = parent.relationships.copy()
|
||||||
|
else:
|
||||||
|
for rk, rv in parent.relationships.items():
|
||||||
|
if rk not in obj.relationships:
|
||||||
|
obj.relationships[rk] = rv
|
||||||
|
|
||||||
|
# 5. Extra (Notes/etc): Merge missing keys
|
||||||
|
# Important for passing 'notes' payload (lyrics, captions)
|
||||||
|
if parent.extra:
|
||||||
|
if not obj.extra:
|
||||||
|
obj.extra = parent.extra.copy()
|
||||||
|
else:
|
||||||
|
# Recursive merge for 'notes' dict specifically?
|
||||||
|
# For now just shallow merge keys, but handle 'notes' specially if valid.
|
||||||
|
for ek, ev in parent.extra.items():
|
||||||
|
if ek not in obj.extra:
|
||||||
|
obj.extra[ek] = ev
|
||||||
|
elif ek == "notes" and isinstance(ev, dict) and isinstance(obj.extra[ek], dict):
|
||||||
|
# Merge notes dict
|
||||||
|
for nk, nv in ev.items():
|
||||||
|
if nk not in obj.extra[ek]:
|
||||||
|
obj.extra[ek][nk] = nv
|
||||||
|
|
||||||
|
normalized.append(obj)
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
def register_url_with_local_library(
|
def register_url_with_local_library(
|
||||||
pipe_obj: models.PipeObject,
|
pipe_obj: models.PipeObject,
|
||||||
config: Dict[str,
|
config: Dict[str,
|
||||||
@@ -2518,12 +2634,12 @@ def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
|
|||||||
|
|
||||||
if candidate_path:
|
if candidate_path:
|
||||||
m = re.search(
|
m = re.search(
|
||||||
r"tidal:(?://)?track[\\/](\d+)",
|
r"(tidal|hifi):(?://)?track[\\/](\d+)",
|
||||||
str(candidate_path),
|
str(candidate_path),
|
||||||
flags=re.IGNORECASE,
|
flags=re.IGNORECASE,
|
||||||
)
|
)
|
||||||
if m:
|
if m:
|
||||||
track_id = m.group(1)
|
track_id = m.group(2)
|
||||||
|
|
||||||
if (not already) and track_id is not None:
|
if (not already) and track_id is not None:
|
||||||
try:
|
try:
|
||||||
@@ -2706,3 +2822,327 @@ def resolve_tidal_manifest_path(item: Any) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
return str(target_path)
|
return str(target_path)
|
||||||
|
|
||||||
|
def check_url_exists_in_storage(
|
||||||
|
urls: Sequence[str],
|
||||||
|
storage: Any,
|
||||||
|
hydrus_available: bool,
|
||||||
|
final_output_dir: Optional[Path] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Pre-flight check to see if URLs already exist in storage.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
urls: List of URLs to check
|
||||||
|
storage: The storage interface
|
||||||
|
hydrus_available: Whether Hydrus is available
|
||||||
|
final_output_dir: Final output directory (to skip if same as storage)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if check passed (user said yes or no dups), False if user said no (stop).
|
||||||
|
"""
|
||||||
|
if storage is None:
|
||||||
|
debug("Bulk URL preflight skipped: storage unavailable")
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_cmd_text = pipeline_context.get_current_command_text("")
|
||||||
|
except Exception:
|
||||||
|
current_cmd_text = ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
stage_ctx = pipeline_context.get_stage_context()
|
||||||
|
except Exception:
|
||||||
|
stage_ctx = None
|
||||||
|
|
||||||
|
in_pipeline = bool(stage_ctx is not None or ("|" in str(current_cmd_text or "")))
|
||||||
|
if in_pipeline:
|
||||||
|
try:
|
||||||
|
cached_cmd = pipeline_context.load_value("preflight.url_duplicates.command", default="")
|
||||||
|
cached_decision = pipeline_context.load_value("preflight.url_duplicates.continue", default=None)
|
||||||
|
except Exception:
|
||||||
|
cached_cmd = ""
|
||||||
|
cached_decision = None
|
||||||
|
|
||||||
|
if cached_decision is not None and str(cached_cmd or "") == str(current_cmd_text or ""):
|
||||||
|
if bool(cached_decision):
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
unique_urls: List[str] = []
|
||||||
|
for u in urls or []:
|
||||||
|
s = str(u or "").strip()
|
||||||
|
if s and s not in unique_urls:
|
||||||
|
unique_urls.append(s)
|
||||||
|
if len(unique_urls) == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
from SYS.metadata import normalize_urls
|
||||||
|
except Exception:
|
||||||
|
normalize_urls = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
def _httpish(value: str) -> bool:
|
||||||
|
try:
|
||||||
|
return bool(value) and (value.startswith("http://") or value.startswith("https://"))
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
url_needles: Dict[str, List[str]] = {}
|
||||||
|
for u in unique_urls:
|
||||||
|
needles: List[str] = []
|
||||||
|
if normalize_urls is not None:
|
||||||
|
try:
|
||||||
|
needles.extend([n for n in (normalize_urls(u) or []) if isinstance(n, str)])
|
||||||
|
except Exception:
|
||||||
|
needles = []
|
||||||
|
if not needles:
|
||||||
|
needles = [u]
|
||||||
|
filtered: List[str] = []
|
||||||
|
for n in needles:
|
||||||
|
n2 = str(n or "").strip()
|
||||||
|
if not n2:
|
||||||
|
continue
|
||||||
|
if not _httpish(n2):
|
||||||
|
continue
|
||||||
|
if n2 not in filtered:
|
||||||
|
filtered.append(n2)
|
||||||
|
url_needles[u] = filtered if filtered else [u]
|
||||||
|
|
||||||
|
backend_names: List[str] = []
|
||||||
|
try:
|
||||||
|
backend_names_all = storage.list_searchable_backends()
|
||||||
|
except Exception:
|
||||||
|
backend_names_all = []
|
||||||
|
|
||||||
|
for backend_name in backend_names_all:
|
||||||
|
try:
|
||||||
|
backend = storage[backend_name]
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
if str(backend_name).strip().lower() == "temp":
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
backend_location = getattr(backend, "_location", None)
|
||||||
|
if backend_location and final_output_dir:
|
||||||
|
backend_path = Path(str(backend_location)).expanduser().resolve()
|
||||||
|
temp_path = Path(str(final_output_dir)).expanduser().resolve()
|
||||||
|
if backend_path == temp_path:
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
backend_names.append(backend_name)
|
||||||
|
|
||||||
|
if not backend_names:
|
||||||
|
debug("Bulk URL preflight skipped: no searchable backends")
|
||||||
|
return True
|
||||||
|
|
||||||
|
seen_pairs: set[tuple[str, str]] = set()
|
||||||
|
matched_urls: set[str] = set()
|
||||||
|
match_rows: List[Dict[str, Any]] = []
|
||||||
|
max_rows = 200
|
||||||
|
|
||||||
|
try:
|
||||||
|
from Store.HydrusNetwork import HydrusNetwork
|
||||||
|
except Exception:
|
||||||
|
HydrusNetwork = None # type: ignore
|
||||||
|
|
||||||
|
for backend_name in backend_names:
|
||||||
|
if len(match_rows) >= max_rows:
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
backend = storage[backend_name]
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if HydrusNetwork is not None and isinstance(backend, HydrusNetwork):
|
||||||
|
if not hydrus_available:
|
||||||
|
continue
|
||||||
|
|
||||||
|
client = getattr(backend, "_client", None)
|
||||||
|
if client is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for original_url, needles in url_needles.items():
|
||||||
|
if len(match_rows) >= max_rows:
|
||||||
|
break
|
||||||
|
if (original_url, str(backend_name)) in seen_pairs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
found_hash: Optional[str] = None
|
||||||
|
found = False
|
||||||
|
for needle in (needles or [])[:3]:
|
||||||
|
if not _httpish(needle):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
from API.HydrusNetwork import HydrusRequestSpec
|
||||||
|
|
||||||
|
spec = HydrusRequestSpec(
|
||||||
|
method="GET",
|
||||||
|
endpoint="/add_urls/get_url_files",
|
||||||
|
query={"url": needle},
|
||||||
|
)
|
||||||
|
# Access internal client safely if possible, else skip check
|
||||||
|
if hasattr(client, "_perform_request"):
|
||||||
|
response = client._perform_request(spec)
|
||||||
|
raw_hashes = None
|
||||||
|
if isinstance(response, dict):
|
||||||
|
raw_hashes = response.get("hashes") or response.get("file_hashes")
|
||||||
|
raw_ids = response.get("file_ids")
|
||||||
|
has_ids = isinstance(raw_ids, list) and len(raw_ids) > 0
|
||||||
|
has_hashes = isinstance(raw_hashes, list) and len(raw_hashes) > 0
|
||||||
|
if has_hashes:
|
||||||
|
try:
|
||||||
|
found_hash = str(raw_hashes[0]).strip()
|
||||||
|
except Exception:
|
||||||
|
found_hash = None
|
||||||
|
if has_ids or has_hashes:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
continue
|
||||||
|
|
||||||
|
seen_pairs.add((original_url, str(backend_name)))
|
||||||
|
matched_urls.add(original_url)
|
||||||
|
display_row = {
|
||||||
|
"title": "(exists)",
|
||||||
|
"store": str(backend_name),
|
||||||
|
"hash": found_hash or "",
|
||||||
|
"url": original_url,
|
||||||
|
"columns": [
|
||||||
|
("Title", "(exists)"),
|
||||||
|
("Store", str(backend_name)),
|
||||||
|
("Hash", found_hash or ""),
|
||||||
|
("URL", original_url),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
match_rows.append(display_row)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for original_url, needles in url_needles.items():
|
||||||
|
if len(match_rows) >= max_rows:
|
||||||
|
break
|
||||||
|
if (original_url, str(backend_name)) in seen_pairs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
backend_hits: List[Dict[str, Any]] = []
|
||||||
|
for needle in (needles or [])[:3]:
|
||||||
|
try:
|
||||||
|
backend_hits = backend.search(f"url:{needle}", limit=1) or []
|
||||||
|
if backend_hits:
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not backend_hits:
|
||||||
|
continue
|
||||||
|
|
||||||
|
seen_pairs.add((original_url, str(backend_name)))
|
||||||
|
matched_urls.add(original_url)
|
||||||
|
hit = backend_hits[0]
|
||||||
|
title = hit.get("title") or hit.get("name") or hit.get("target") or hit.get("path") or "(exists)"
|
||||||
|
file_hash = hit.get("hash") or hit.get("file_hash") or hit.get("sha256") or ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from SYS.result_table import build_display_row
|
||||||
|
extracted = build_display_row(hit, keys=["title", "store", "hash", "ext", "size"])
|
||||||
|
except Exception:
|
||||||
|
extracted = {}
|
||||||
|
|
||||||
|
extracted["title"] = str(title)
|
||||||
|
extracted["store"] = str(hit.get("store") or backend_name)
|
||||||
|
extracted["hash"] = str(file_hash or "")
|
||||||
|
|
||||||
|
ext = extracted.get("ext")
|
||||||
|
size_val = extracted.get("size")
|
||||||
|
|
||||||
|
display_row = {
|
||||||
|
"title": str(title),
|
||||||
|
"store": str(hit.get("store") or backend_name),
|
||||||
|
"hash": str(file_hash or ""),
|
||||||
|
"ext": str(ext or ""),
|
||||||
|
"size": size_val,
|
||||||
|
"url": original_url,
|
||||||
|
"columns": [
|
||||||
|
("Title", str(title)),
|
||||||
|
("Store", str(hit.get("store") or backend_name)),
|
||||||
|
("Hash", str(file_hash or "")),
|
||||||
|
("Ext", str(ext or "")),
|
||||||
|
("Size", size_val),
|
||||||
|
("URL", original_url),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
match_rows.append(display_row)
|
||||||
|
|
||||||
|
if not match_rows:
|
||||||
|
debug("Bulk URL preflight: no matches")
|
||||||
|
return True
|
||||||
|
|
||||||
|
table = ResultTable(f"URL already exists ({len(matched_urls)} url(s))", max_columns=10)
|
||||||
|
table.set_no_choice(True)
|
||||||
|
try:
|
||||||
|
table.set_preserve_order(True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for row in match_rows:
|
||||||
|
table.add_result(row)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pipeline_context.set_last_result_table_overlay(table, match_rows)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
suspend = getattr(pipeline_context, "suspend_live_progress", None)
|
||||||
|
cm: AbstractContextManager[Any] = nullcontext()
|
||||||
|
if callable(suspend):
|
||||||
|
try:
|
||||||
|
maybe_cm = suspend()
|
||||||
|
if maybe_cm is not None:
|
||||||
|
cm = maybe_cm # type: ignore[assignment]
|
||||||
|
except Exception:
|
||||||
|
cm = nullcontext()
|
||||||
|
|
||||||
|
with cm:
|
||||||
|
get_stderr_console().print(table)
|
||||||
|
setattr(table, "_rendered_by_cmdlet", True)
|
||||||
|
answered_yes = bool(Confirm.ask("Continue anyway?", default=False, console=get_stderr_console()))
|
||||||
|
|
||||||
|
if in_pipeline:
|
||||||
|
try:
|
||||||
|
existing = pipeline_context.load_value("preflight", default=None)
|
||||||
|
except Exception:
|
||||||
|
existing = None
|
||||||
|
preflight_cache: Dict[str, Any] = existing if isinstance(existing, dict) else {}
|
||||||
|
url_dup_cache = preflight_cache.get("url_duplicates")
|
||||||
|
if not isinstance(url_dup_cache, dict):
|
||||||
|
url_dup_cache = {}
|
||||||
|
url_dup_cache["command"] = str(current_cmd_text or "")
|
||||||
|
url_dup_cache["continue"] = bool(answered_yes)
|
||||||
|
preflight_cache["url_duplicates"] = url_dup_cache
|
||||||
|
try:
|
||||||
|
pipeline_context.store_value("preflight", preflight_cache)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not answered_yes:
|
||||||
|
if in_pipeline:
|
||||||
|
try:
|
||||||
|
pipeline_context.request_pipeline_stop(reason="duplicate-url declined", exit_code=0)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -209,11 +209,8 @@ class Add_Note(Cmdlet):
|
|||||||
note_name = str(note_name or "").strip()
|
note_name = str(note_name or "").strip()
|
||||||
note_text = str(note_text or "").strip()
|
note_text = str(note_text or "").strip()
|
||||||
if not note_name or not note_text:
|
if not note_name or not note_text:
|
||||||
log(
|
pass # We now support implicit pipeline notes if -query is missing
|
||||||
"[add_note] Error: -query must include title:<title> and text:<text>",
|
# But if explicit targeting (store+hash) is used, we still demand args below.
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if hash_override and not store_override:
|
if hash_override and not store_override:
|
||||||
log(
|
log(
|
||||||
@@ -224,6 +221,14 @@ class Add_Note(Cmdlet):
|
|||||||
|
|
||||||
explicit_target = bool(hash_override and store_override)
|
explicit_target = bool(hash_override and store_override)
|
||||||
results = normalize_result_input(result)
|
results = normalize_result_input(result)
|
||||||
|
|
||||||
|
if explicit_target and (not note_name or not note_text):
|
||||||
|
log(
|
||||||
|
"[add_note] Error: Explicit target (store+hash) requires -query with title/text",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return 1
|
||||||
|
|
||||||
if results and explicit_target:
|
if results and explicit_target:
|
||||||
# Direct targeting mode: apply note once to the explicit target and
|
# Direct targeting mode: apply note once to the explicit target and
|
||||||
# pass through any piped items unchanged.
|
# pass through any piped items unchanged.
|
||||||
@@ -287,7 +292,36 @@ class Add_Note(Cmdlet):
|
|||||||
ctx.emit(res)
|
ctx.emit(res)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
item_note_text = note_text
|
# Determine notes to write for this item
|
||||||
|
notes_to_write: List[Tuple[str, str]] = []
|
||||||
|
|
||||||
|
# 1. Explicit arguments always take precedence
|
||||||
|
if note_name and note_text:
|
||||||
|
notes_to_write.append((note_name, note_text))
|
||||||
|
|
||||||
|
# 2. Pipeline notes auto-ingestion
|
||||||
|
# Look for 'notes' dictionary in the item (propagated by pipeline/download-file)
|
||||||
|
# Structure: {'notes': {'lyric': '...', 'sub': '...'}}
|
||||||
|
# Check both root and nested 'extra'
|
||||||
|
|
||||||
|
# Check root 'notes' (dict or extra.notes)
|
||||||
|
pipeline_notes = res.get("notes")
|
||||||
|
if not isinstance(pipeline_notes, dict):
|
||||||
|
extra = res.get("extra")
|
||||||
|
if isinstance(extra, dict):
|
||||||
|
pipeline_notes = extra.get("notes")
|
||||||
|
|
||||||
|
if isinstance(pipeline_notes, dict):
|
||||||
|
for k, v in pipeline_notes.items():
|
||||||
|
# If arg-provided note conflicts effectively with pipeline note?
|
||||||
|
# We just append both.
|
||||||
|
if v and str(v).strip():
|
||||||
|
notes_to_write.append((str(k), str(v)))
|
||||||
|
|
||||||
|
if not notes_to_write:
|
||||||
|
# Pass through items that have nothing to add
|
||||||
|
ctx.emit(res)
|
||||||
|
continue
|
||||||
|
|
||||||
store_name = str(store_override or res.get("store") or "").strip()
|
store_name = str(store_override or res.get("store") or "").strip()
|
||||||
raw_hash = res.get("hash")
|
raw_hash = res.get("hash")
|
||||||
@@ -298,7 +332,7 @@ class Add_Note(Cmdlet):
|
|||||||
"[add_note] Error: Missing -store and item has no store field",
|
"[add_note] Error: Missing -store and item has no store field",
|
||||||
file=sys.stderr
|
file=sys.stderr
|
||||||
)
|
)
|
||||||
return 1
|
continue
|
||||||
|
|
||||||
resolved_hash = self._resolve_hash(
|
resolved_hash = self._resolve_hash(
|
||||||
raw_hash=str(raw_hash) if raw_hash else None,
|
raw_hash=str(raw_hash) if raw_hash else None,
|
||||||
@@ -312,80 +346,43 @@ class Add_Note(Cmdlet):
|
|||||||
)
|
)
|
||||||
ctx.emit(res)
|
ctx.emit(res)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
# Queue operations
|
||||||
backend = store_registry[store_name]
|
if store_name not in note_ops:
|
||||||
except Exception as exc:
|
note_ops[store_name] = []
|
||||||
log(
|
|
||||||
f"[add_note] Error: Unknown store '{store_name}': {exc}",
|
for (n_name, n_text) in notes_to_write:
|
||||||
file=sys.stderr
|
note_ops[store_name].append((resolved_hash, n_name, n_text))
|
||||||
)
|
planned_ops += 1
|
||||||
return 1
|
|
||||||
|
|
||||||
# Queue for bulk write per store. We still emit items immediately;
|
|
||||||
# the pipeline only advances after this cmdlet returns.
|
|
||||||
note_ops.setdefault(store_name,
|
|
||||||
[]).append((resolved_hash,
|
|
||||||
note_name,
|
|
||||||
item_note_text))
|
|
||||||
planned_ops += 1
|
|
||||||
|
|
||||||
ctx.emit(res)
|
ctx.emit(res)
|
||||||
|
|
||||||
# Execute bulk writes per store.
|
|
||||||
successful_writes = 0
|
# Execute batch operations
|
||||||
|
success_count = 0
|
||||||
for store_name, ops in note_ops.items():
|
for store_name, ops in note_ops.items():
|
||||||
if not ops:
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
backend = store_registry[store_name]
|
backend = store_registry[store_name]
|
||||||
except Exception:
|
if not hasattr(backend, "set_note"):
|
||||||
continue
|
log(f"[add_note] Store '{store_name}' does not support notes", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for (h, name, text) in ops:
|
||||||
|
try:
|
||||||
|
if backend.set_note(h, name, text, config=config):
|
||||||
|
success_count += 1
|
||||||
|
except Exception as e:
|
||||||
|
log(f"[add_note] Write failed {store_name}:{h} ({name}): {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log(f"[add_note] Store access failed '{store_name}': {e}", file=sys.stderr)
|
||||||
|
|
||||||
store_success = 0
|
if planned_ops > 0:
|
||||||
bulk_fn = getattr(backend, "set_note_bulk", None)
|
msg = f"✓ add-note: Updated {success_count}/{planned_ops} notes across {len(note_ops)} stores"
|
||||||
if callable(bulk_fn):
|
ctx.print_if_visible(msg, file=sys.stderr)
|
||||||
try:
|
|
||||||
ok = bool(bulk_fn(list(ops), config=config))
|
return 0
|
||||||
if ok:
|
|
||||||
store_success += len(ops)
|
|
||||||
ctx.print_if_visible(
|
|
||||||
f"✓ add-note: {len(ops)} item(s) in '{store_name}'",
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
successful_writes += store_success
|
|
||||||
continue
|
|
||||||
log(
|
|
||||||
f"[add_note] Warning: bulk set_note returned False for '{store_name}'",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
except Exception as exc:
|
|
||||||
log(
|
|
||||||
f"[add_note] Warning: bulk set_note failed for '{store_name}': {exc}; falling back",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Fallback: per-item writes
|
|
||||||
for file_hash, name, text in ops:
|
|
||||||
try:
|
|
||||||
ok = bool(backend.set_note(file_hash, name, text, config=config))
|
|
||||||
if ok:
|
|
||||||
store_success += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if store_success:
|
|
||||||
successful_writes += store_success
|
|
||||||
ctx.print_if_visible(
|
|
||||||
f"✓ add-note: {store_success} item(s) in '{store_name}'",
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
|
|
||||||
log(
|
|
||||||
f"[add_note] Updated {successful_writes}/{planned_ops} item(s)",
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
return 0 if successful_writes > 0 else 1
|
|
||||||
|
|
||||||
|
|
||||||
CMDLET = Add_Note()
|
CMDLET = Add_Note()
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user