2025-12-11 12:47:30 -08:00
from __future__ import annotations
from typing import Any , Dict , List , Sequence , Optional
from pathlib import Path
import sys
2025-12-11 19:04:02 -08:00
from SYS . logger import log
2025-12-11 12:47:30 -08:00
import models
import pipeline as ctx
from . _shared import normalize_result_input , filter_results_by_temp
2025-12-11 23:21:45 -08:00
from . _shared import (
Cmdlet ,
CmdletArg ,
SharedArgs ,
normalize_hash ,
parse_tag_arguments ,
expand_tag_groups ,
parse_cmdlet_args ,
collapse_namespace_tag ,
should_show_help ,
get_field ,
)
from Store import Store
from SYS . utils import sha256_file
def _extract_title_tag ( tags : List [ str ] ) - > Optional [ str ] :
""" Return the value of the first title: tag if present. """
for t in tags :
if t . lower ( ) . startswith ( " title: " ) :
value = t . split ( " : " , 1 ) [ 1 ] . strip ( )
return value or None
return None
2025-12-13 00:18:30 -08:00
def _extract_item_tags ( res : Any ) - > List [ str ] :
if isinstance ( res , models . PipeObject ) :
raw = getattr ( res , " tag " , None )
elif isinstance ( res , dict ) :
raw = res . get ( " tag " )
else :
raw = None
if isinstance ( raw , list ) :
return [ str ( t ) for t in raw if t is not None ]
if isinstance ( raw , str ) and raw . strip ( ) :
return [ raw ]
return [ ]
def _set_item_tags ( res : Any , tags : List [ str ] ) - > None :
if isinstance ( res , models . PipeObject ) :
res . tag = tags
elif isinstance ( res , dict ) :
res [ " tag " ] = tags
2025-12-11 23:21:45 -08:00
def _apply_title_to_result ( res : Any , title_value : Optional [ str ] ) - > None :
""" Update result object/dict title fields and columns in-place. """
if not title_value :
return
if isinstance ( res , models . PipeObject ) :
res . title = title_value
# Update columns if present (Title column assumed index 0)
columns = getattr ( res , " columns " , None )
if isinstance ( columns , list ) and columns :
label , * _ = columns [ 0 ]
if str ( label ) . lower ( ) == " title " :
columns [ 0 ] = ( label , title_value )
elif isinstance ( res , dict ) :
res [ " title " ] = title_value
cols = res . get ( " columns " )
if isinstance ( cols , list ) :
updated = [ ]
changed = False
for col in cols :
if isinstance ( col , tuple ) and len ( col ) == 2 :
label , _val = col
if str ( label ) . lower ( ) == " title " :
updated . append ( ( label , title_value ) )
changed = True
else :
updated . append ( col )
else :
updated . append ( col )
if changed :
res [ " columns " ] = updated
2025-12-12 21:55:38 -08:00
def _matches_target (
item : Any ,
target_hash : Optional [ str ] ,
target_path : Optional [ str ] ,
target_store : Optional [ str ] = None ,
) - > bool :
""" Determine whether a result item refers to the given target.
Important : hashes can collide across backends in this app ' s UX (same media in
multiple stores ) . When target_store is provided , it must match too .
"""
2025-12-11 23:21:45 -08:00
def norm ( val : Any ) - > Optional [ str ] :
return str ( val ) . lower ( ) if val is not None else None
target_hash_l = target_hash . lower ( ) if target_hash else None
target_path_l = target_path . lower ( ) if target_path else None
2025-12-12 21:55:38 -08:00
target_store_l = target_store . lower ( ) if target_store else None
2025-12-11 23:21:45 -08:00
if isinstance ( item , dict ) :
hashes = [ norm ( item . get ( " hash " ) ) ]
paths = [ norm ( item . get ( " path " ) ) ]
2025-12-12 21:55:38 -08:00
stores = [ norm ( item . get ( " store " ) ) ]
2025-12-11 23:21:45 -08:00
else :
hashes = [ norm ( get_field ( item , " hash " ) ) ]
paths = [ norm ( get_field ( item , " path " ) ) ]
2025-12-12 21:55:38 -08:00
stores = [ norm ( get_field ( item , " store " ) ) ]
if target_store_l :
if target_store_l not in stores :
return False
2025-12-11 23:21:45 -08:00
if target_hash_l and target_hash_l in hashes :
return True
if target_path_l and target_path_l in paths :
return True
return False
def _update_item_title_fields ( item : Any , new_title : str ) - > None :
""" Mutate an item to reflect a new title in plain fields and columns. """
if isinstance ( item , models . PipeObject ) :
item . title = new_title
columns = getattr ( item , " columns " , None )
if isinstance ( columns , list ) and columns :
label , * _ = columns [ 0 ]
if str ( label ) . lower ( ) == " title " :
columns [ 0 ] = ( label , new_title )
elif isinstance ( item , dict ) :
item [ " title " ] = new_title
cols = item . get ( " columns " )
if isinstance ( cols , list ) :
updated_cols = [ ]
changed = False
for col in cols :
if isinstance ( col , tuple ) and len ( col ) == 2 :
label , _val = col
if str ( label ) . lower ( ) == " title " :
updated_cols . append ( ( label , new_title ) )
changed = True
else :
updated_cols . append ( col )
else :
updated_cols . append ( col )
if changed :
item [ " columns " ] = updated_cols
2025-12-12 21:55:38 -08:00
def _refresh_result_table_title (
new_title : str ,
target_hash : Optional [ str ] ,
target_store : Optional [ str ] ,
target_path : Optional [ str ] ,
) - > None :
2025-12-11 23:21:45 -08:00
""" Refresh the cached result table with an updated title and redisplay it. """
try :
last_table = ctx . get_last_result_table ( )
items = ctx . get_last_result_items ( )
if not last_table or not items :
return
updated_items = [ ]
match_found = False
for item in items :
try :
2025-12-12 21:55:38 -08:00
if _matches_target ( item , target_hash , target_path , target_store ) :
2025-12-11 23:21:45 -08:00
_update_item_title_fields ( item , new_title )
match_found = True
except Exception :
pass
updated_items . append ( item )
if not match_found :
return
new_table = last_table . copy_with_title ( getattr ( last_table , " title " , " " ) )
for item in updated_items :
new_table . add_result ( item )
# Keep the underlying history intact; update only the overlay so @.. can
# clear the overlay then continue back to prior tables (e.g., the search list).
ctx . set_last_result_table_overlay ( new_table , updated_items )
except Exception :
pass
def _refresh_tag_view ( res : Any , target_hash : Optional [ str ] , store_name : Optional [ str ] , target_path : Optional [ str ] , config : Dict [ str , Any ] ) - > None :
""" Refresh tag display via get-tag. Prefer current subject; fall back to direct hash refresh. """
try :
2025-12-12 21:55:38 -08:00
from cmdlet import get as get_cmdlet # type: ignore
2025-12-11 23:21:45 -08:00
except Exception :
return
if not target_hash or not store_name :
return
refresh_args : List [ str ] = [ " -hash " , target_hash , " -store " , store_name ]
2025-12-12 21:55:38 -08:00
get_tag = None
try :
get_tag = get_cmdlet ( " get-tag " )
except Exception :
get_tag = None
if not callable ( get_tag ) :
return
2025-12-11 23:21:45 -08:00
try :
subject = ctx . get_last_result_subject ( )
2025-12-12 21:55:38 -08:00
if subject and _matches_target ( subject , target_hash , target_path , store_name ) :
get_tag ( subject , refresh_args , config )
2025-12-11 23:21:45 -08:00
return
except Exception :
pass
try :
2025-12-12 21:55:38 -08:00
get_tag ( res , refresh_args , config )
2025-12-11 23:21:45 -08:00
except Exception :
pass
2025-12-11 12:47:30 -08:00
class Add_Tag ( Cmdlet ) :
""" Class-based add-tag cmdlet with Cmdlet metadata inheritance. """
def __init__ ( self ) - > None :
super ( ) . __init__ (
name = " add-tag " ,
2025-12-11 23:21:45 -08:00
summary = " Add tag to a file in a store. " ,
usage = " add-tag -store <store> [-hash <sha256>] [-duplicate <format>] [-list <list>[,<list>...]] [--all] <tag>[,<tag>...] " ,
2025-12-11 12:47:30 -08:00
arg = [
2025-12-12 21:55:38 -08:00
CmdletArg ( " tag " , type = " string " , required = False , description = " One or more tag to add. Comma- or space-separated. Can also use {list_name} syntax. If omitted, uses tag from pipeline payload. " , variadic = True ) ,
2025-12-11 12:47:30 -08:00
SharedArgs . HASH ,
SharedArgs . STORE ,
CmdletArg ( " -duplicate " , type = " string " , description = " Copy existing tag values to new namespaces. Formats: title:album,artist (explicit) or title,album,artist (inferred) " ) ,
CmdletArg ( " -list " , type = " string " , description = " Load predefined tag lists from adjective.json. Comma-separated list names (e.g., -list philosophy,occult). " ) ,
2025-12-11 23:21:45 -08:00
CmdletArg ( " --all " , type = " flag " , description = " Include temporary files in tagging (by default, only tag non-temporary files). " ) ,
2025-12-11 12:47:30 -08:00
] ,
detail = [
2025-12-11 23:21:45 -08:00
" - By default, only tag non-temporary files (from pipelines). Use --all to tag everything. " ,
" - Requires a store backend: use -store or pipe items that include store. " ,
" - If -hash is not provided, uses the piped item ' s hash (or derives from its path when possible). " ,
" - Multiple tag can be comma-separated or space-separated. " ,
2025-12-11 12:47:30 -08:00
" - Use -list to include predefined tag lists from adjective.json: -list philosophy,occult " ,
2025-12-11 23:21:45 -08:00
" - tag can also reference lists with curly braces: add-tag {philosophy} \" other:tag \" " ,
2025-12-11 12:47:30 -08:00
" - Use -duplicate to copy EXISTING tag values to new namespaces: " ,
" Explicit format: -duplicate title:album,artist (copies title: to album: and artist:) " ,
" Inferred format: -duplicate title,album,artist (first is source, rest are targets) " ,
" - The source namespace must already exist in the file being tagged. " ,
" - Target namespaces that already have a value are skipped (not overwritten). " ,
" - You can also pass the target hash as a tag token: hash:<sha256>. This overrides -hash and is removed from the tag list. " ,
] ,
exec = self . run ,
)
self . register ( )
def run ( self , result : Any , args : Sequence [ str ] , config : Dict [ str , Any ] ) - > int :
2025-12-11 23:21:45 -08:00
""" Add tag to a file with smart filtering for pipeline results. """
2025-12-11 12:47:30 -08:00
if should_show_help ( args ) :
log ( f " Cmdlet: { self . name } \n Summary: { self . summary } \n Usage: { self . usage } " )
return 0
2025-12-11 23:21:45 -08:00
# Parse arguments
2025-12-11 12:47:30 -08:00
parsed = parse_cmdlet_args ( args , self )
# Check for --all flag
include_temp = parsed . get ( " all " , False )
# Normalize input to list
results = normalize_result_input ( result )
# Filter by temp status (unless --all is set)
if not include_temp :
results = filter_results_by_temp ( results , include_temp = False )
if not results :
log ( " No valid files to tag (all results were temporary; use --all to include temporary files) " , file = sys . stderr )
return 1
2025-12-11 23:21:45 -08:00
# Get tag from arguments (or fallback to pipeline payload)
raw_tag = parsed . get ( " tag " , [ ] )
if isinstance ( raw_tag , str ) :
raw_tag = [ raw_tag ]
2025-12-11 12:47:30 -08:00
2025-12-11 23:21:45 -08:00
# Fallback: if no tag provided explicitly, try to pull from first result payload
if not raw_tag and results :
2025-12-11 12:47:30 -08:00
first = results [ 0 ]
2025-12-11 23:21:45 -08:00
payload_tag = None
2025-12-11 12:47:30 -08:00
# Try multiple tag lookup strategies in order
tag_lookups = [
2025-12-11 23:21:45 -08:00
lambda x : getattr ( x , " tag " , None ) ,
lambda x : x . get ( " tag " ) if isinstance ( x , dict ) else None ,
2025-12-11 12:47:30 -08:00
]
2025-12-11 23:21:45 -08:00
2025-12-11 12:47:30 -08:00
for lookup in tag_lookups :
try :
2025-12-11 23:21:45 -08:00
payload_tag = lookup ( first )
if payload_tag :
2025-12-11 12:47:30 -08:00
break
except ( AttributeError , TypeError , KeyError ) :
continue
2025-12-11 23:21:45 -08:00
if payload_tag :
if isinstance ( payload_tag , str ) :
raw_tag = [ payload_tag ]
elif isinstance ( payload_tag , list ) :
raw_tag = payload_tag
2025-12-11 12:47:30 -08:00
# Handle -list argument (convert to {list} syntax)
list_arg = parsed . get ( " list " )
if list_arg :
for l in list_arg . split ( ' , ' ) :
l = l . strip ( )
if l :
2025-12-11 23:21:45 -08:00
raw_tag . append ( f " {{ { l } }} " )
2025-12-11 12:47:30 -08:00
2025-12-11 23:21:45 -08:00
# Parse and expand tag
tag_to_add = parse_tag_arguments ( raw_tag )
tag_to_add = expand_tag_groups ( tag_to_add )
2025-12-11 12:47:30 -08:00
# Allow hash override via namespaced token (e.g., "hash:abcdef...")
extracted_hash = None
2025-12-11 23:21:45 -08:00
filtered_tag : List [ str ] = [ ]
for tag in tag_to_add :
2025-12-11 12:47:30 -08:00
if isinstance ( tag , str ) and tag . lower ( ) . startswith ( " hash: " ) :
_ , _ , hash_val = tag . partition ( " : " )
if hash_val :
extracted_hash = normalize_hash ( hash_val . strip ( ) )
continue
2025-12-11 23:21:45 -08:00
filtered_tag . append ( tag )
tag_to_add = filtered_tag
2025-12-11 12:47:30 -08:00
2025-12-11 23:21:45 -08:00
if not tag_to_add :
log ( " No tag provided to add " , file = sys . stderr )
2025-12-11 12:47:30 -08:00
return 1
2025-12-11 23:21:45 -08:00
# Get other flags (hash override can come from -hash or hash: token)
hash_override = normalize_hash ( parsed . get ( " hash " ) ) or extracted_hash
2025-12-11 12:47:30 -08:00
duplicate_arg = parsed . get ( " duplicate " )
2025-12-11 23:21:45 -08:00
# tag ARE provided - apply them to each store-backed result
total_added = 0
2025-12-11 12:47:30 -08:00
total_modified = 0
2025-12-11 23:21:45 -08:00
store_override = parsed . get ( " store " )
2025-12-13 00:18:30 -08:00
store_registry = Store ( config )
2025-12-11 12:47:30 -08:00
for res in results :
2025-12-11 23:21:45 -08:00
store_name : Optional [ str ]
raw_hash : Optional [ str ]
raw_path : Optional [ str ]
if isinstance ( res , models . PipeObject ) :
store_name = store_override or res . store
raw_hash = res . hash
raw_path = res . path
elif isinstance ( res , dict ) :
store_name = store_override or res . get ( " store " )
raw_hash = res . get ( " hash " )
raw_path = res . get ( " path " )
else :
2025-12-11 19:04:02 -08:00
ctx . emit ( res )
continue
2025-12-11 23:21:45 -08:00
if not store_name :
2025-12-13 00:18:30 -08:00
store_name = None
# If the item isn't in a configured store backend yet (e.g., store=PATH) but has a local file,
# treat add-tag as a pipeline mutation (carry tags forward for add-file) instead of a store write.
if not store_override :
store_name_str = str ( store_name ) if store_name is not None else " "
local_mode_requested = ( not store_name_str ) or ( store_name_str . upper ( ) == " PATH " ) or ( store_name_str . lower ( ) == " local " )
is_known_backend = bool ( store_name_str ) and store_registry . is_available ( store_name_str )
if local_mode_requested and raw_path :
try :
if Path ( str ( raw_path ) ) . expanduser ( ) . exists ( ) :
existing_tag_list = _extract_item_tags ( res )
existing_lower = { t . lower ( ) for t in existing_tag_list if isinstance ( t , str ) }
item_tag_to_add = list ( tag_to_add )
item_tag_to_add = collapse_namespace_tag ( item_tag_to_add , " title " , prefer = " last " )
if duplicate_arg :
parts = str ( duplicate_arg ) . split ( ' : ' )
source_ns = " "
targets : list [ str ] = [ ]
if len ( parts ) > 1 :
source_ns = parts [ 0 ]
targets = [ t . strip ( ) for t in parts [ 1 ] . split ( ' , ' ) if t . strip ( ) ]
else :
parts2 = str ( duplicate_arg ) . split ( ' , ' )
if len ( parts2 ) > 1 :
source_ns = parts2 [ 0 ]
targets = [ t . strip ( ) for t in parts2 [ 1 : ] if t . strip ( ) ]
if source_ns and targets :
source_prefix = source_ns . lower ( ) + " : "
for t in existing_tag_list :
if not t . lower ( ) . startswith ( source_prefix ) :
continue
value = t . split ( " : " , 1 ) [ 1 ]
for target_ns in targets :
new_tag = f " { target_ns } : { value } "
if new_tag . lower ( ) not in existing_lower :
item_tag_to_add . append ( new_tag )
removed_namespace_tag : list [ str ] = [ ]
for new_tag in item_tag_to_add :
if not isinstance ( new_tag , str ) or " : " not in new_tag :
continue
ns = new_tag . split ( " : " , 1 ) [ 0 ] . strip ( )
if not ns :
continue
ns_prefix = ns . lower ( ) + " : "
for t in existing_tag_list :
if t . lower ( ) . startswith ( ns_prefix ) and t . lower ( ) != new_tag . lower ( ) :
removed_namespace_tag . append ( t )
removed_namespace_tag = sorted ( { t for t in removed_namespace_tag } )
actual_tag_to_add = [
t
for t in item_tag_to_add
if isinstance ( t , str ) and t . lower ( ) not in existing_lower
]
updated_tag_list = [ t for t in existing_tag_list if t not in removed_namespace_tag ]
updated_tag_list . extend ( actual_tag_to_add )
_set_item_tags ( res , updated_tag_list )
final_title = _extract_title_tag ( updated_tag_list )
_apply_title_to_result ( res , final_title )
total_added + = len ( actual_tag_to_add )
total_modified + = 1 if ( removed_namespace_tag or actual_tag_to_add ) else 0
ctx . emit ( res )
continue
except Exception :
pass
if local_mode_requested :
log ( " [add_tag] Error: Missing usable local path for tagging (or provide -store) " , file = sys . stderr )
return 1
if store_name_str and not is_known_backend :
log ( f " [add_tag] Error: Unknown store ' { store_name_str } ' . Available: { store_registry . list_backends ( ) } " , file = sys . stderr )
return 1
2025-12-11 23:21:45 -08:00
resolved_hash = normalize_hash ( hash_override ) if hash_override else normalize_hash ( raw_hash )
if not resolved_hash and raw_path :
try :
p = Path ( str ( raw_path ) )
stem = p . stem
if len ( stem ) == 64 and all ( c in " 0123456789abcdef " for c in stem . lower ( ) ) :
resolved_hash = stem . lower ( )
elif p . exists ( ) and p . is_file ( ) :
resolved_hash = sha256_file ( p )
except Exception :
resolved_hash = None
if not resolved_hash :
log ( " [add_tag] Warning: Item missing usable hash (and could not derive from path); skipping " , file = sys . stderr )
2025-12-11 12:47:30 -08:00
ctx . emit ( res )
continue
2025-12-11 23:21:45 -08:00
try :
2025-12-13 00:18:30 -08:00
backend = store_registry [ str ( store_name ) ]
2025-12-11 23:21:45 -08:00
except Exception as exc :
log ( f " [add_tag] Error: Unknown store ' { store_name } ' : { exc } " , file = sys . stderr )
return 1
try :
existing_tag , _src = backend . get_tag ( resolved_hash , config = config )
except Exception :
existing_tag = [ ]
existing_tag_list = [ t for t in ( existing_tag or [ ] ) if isinstance ( t , str ) ]
existing_lower = { t . lower ( ) for t in existing_tag_list }
original_title = _extract_title_tag ( existing_tag_list )
# Per-item tag list (do not mutate shared list)
item_tag_to_add = list ( tag_to_add )
item_tag_to_add = collapse_namespace_tag ( item_tag_to_add , " title " , prefer = " last " )
# Handle -duplicate logic (copy existing tag to new namespaces)
2025-12-11 12:47:30 -08:00
if duplicate_arg :
2025-12-11 23:21:45 -08:00
parts = str ( duplicate_arg ) . split ( ' : ' )
2025-12-11 12:47:30 -08:00
source_ns = " "
2025-12-11 23:21:45 -08:00
targets : list [ str ] = [ ]
2025-12-11 12:47:30 -08:00
if len ( parts ) > 1 :
source_ns = parts [ 0 ]
2025-12-11 23:21:45 -08:00
targets = [ t . strip ( ) for t in parts [ 1 ] . split ( ' , ' ) if t . strip ( ) ]
2025-12-11 12:47:30 -08:00
else :
2025-12-11 23:21:45 -08:00
parts2 = str ( duplicate_arg ) . split ( ' , ' )
if len ( parts2 ) > 1 :
source_ns = parts2 [ 0 ]
targets = [ t . strip ( ) for t in parts2 [ 1 : ] if t . strip ( ) ]
2025-12-11 12:47:30 -08:00
if source_ns and targets :
2025-12-11 23:21:45 -08:00
source_prefix = source_ns . lower ( ) + " : "
for t in existing_tag_list :
if not t . lower ( ) . startswith ( source_prefix ) :
continue
value = t . split ( " : " , 1 ) [ 1 ]
2025-12-11 12:47:30 -08:00
for target_ns in targets :
new_tag = f " { target_ns } : { value } "
2025-12-11 23:21:45 -08:00
if new_tag . lower ( ) not in existing_lower :
item_tag_to_add . append ( new_tag )
2025-12-11 12:47:30 -08:00
2025-12-11 23:21:45 -08:00
# Namespace replacement: delete old namespace:* when adding namespace:value
removed_namespace_tag : list [ str ] = [ ]
for new_tag in item_tag_to_add :
if not isinstance ( new_tag , str ) or " : " not in new_tag :
continue
ns = new_tag . split ( " : " , 1 ) [ 0 ] . strip ( )
if not ns :
continue
ns_prefix = ns . lower ( ) + " : "
for t in existing_tag_list :
if t . lower ( ) . startswith ( ns_prefix ) and t . lower ( ) != new_tag . lower ( ) :
removed_namespace_tag . append ( t )
removed_namespace_tag = sorted ( { t for t in removed_namespace_tag } )
actual_tag_to_add = [ t for t in item_tag_to_add if isinstance ( t , str ) and t . lower ( ) not in existing_lower ]
changed = False
if removed_namespace_tag :
2025-12-11 12:47:30 -08:00
try :
2025-12-12 21:55:38 -08:00
ok_del = backend . delete_tag ( resolved_hash , removed_namespace_tag , config = config )
if ok_del :
changed = True
2025-12-11 23:21:45 -08:00
except Exception as exc :
log ( f " [add_tag] Warning: Failed deleting namespace tag: { exc } " , file = sys . stderr )
2025-12-11 19:04:02 -08:00
2025-12-11 23:21:45 -08:00
if actual_tag_to_add :
try :
2025-12-12 21:55:38 -08:00
ok_add = backend . add_tag ( resolved_hash , actual_tag_to_add , config = config )
if ok_add :
changed = True
else :
log ( " [add_tag] Warning: Store rejected tag update " , file = sys . stderr )
2025-12-11 23:21:45 -08:00
except Exception as exc :
log ( f " [add_tag] Warning: Failed adding tag: { exc } " , file = sys . stderr )
if changed :
total_added + = len ( actual_tag_to_add )
total_modified + = 1
2025-12-11 19:04:02 -08:00
try :
2025-12-11 23:21:45 -08:00
refreshed_tag , _src2 = backend . get_tag ( resolved_hash , config = config )
refreshed_list = [ t for t in ( refreshed_tag or [ ] ) if isinstance ( t , str ) ]
except Exception :
refreshed_list = existing_tag_list
2025-12-11 19:04:02 -08:00
2025-12-11 23:21:45 -08:00
# Update the result's tag using canonical field
if isinstance ( res , models . PipeObject ) :
res . tag = refreshed_list
elif isinstance ( res , dict ) :
res [ " tag " ] = refreshed_list
final_title = _extract_title_tag ( refreshed_list )
_apply_title_to_result ( res , final_title )
2025-12-11 19:04:02 -08:00
2025-12-11 12:47:30 -08:00
if final_title and ( not original_title or final_title . lower ( ) != original_title . lower ( ) ) :
2025-12-12 21:55:38 -08:00
_refresh_result_table_title ( final_title , resolved_hash , str ( store_name ) , raw_path )
2025-12-11 23:21:45 -08:00
if changed :
_refresh_tag_view ( res , resolved_hash , str ( store_name ) , raw_path , config )
2025-12-11 12:47:30 -08:00
ctx . emit ( res )
2025-12-11 23:21:45 -08:00
log (
f " [add_tag] Added { total_added } new tag(s) across { len ( results ) } item(s); modified { total_modified } item(s) " ,
file = sys . stderr ,
)
2025-12-11 12:47:30 -08:00
return 0
CMDLET = Add_Tag ( )