dfd
This commit is contained in:
@@ -16,6 +16,7 @@ from ._shared import (
|
||||
extract_tags_from_result, extract_title_from_result, extract_known_urls_from_result,
|
||||
merge_sequences, extract_relationships, extract_duration
|
||||
)
|
||||
from ._shared import collapse_namespace_tags
|
||||
from helper.local_library import read_sidecar, find_sidecar, write_sidecar, LocalLibraryDB
|
||||
from helper.utils import sha256_file
|
||||
from metadata import embed_metadata_in_file
|
||||
@@ -133,6 +134,31 @@ def _cleanup_sidecar_files(media_path: Path, *extra_paths: Optional[Path]) -> No
|
||||
continue
|
||||
|
||||
|
||||
def _show_local_result_table(file_hash: Optional[str], config: Dict[str, Any]) -> None:
|
||||
"""Run search-file by hash to display the newly added local file in a table."""
|
||||
if not file_hash:
|
||||
return
|
||||
try:
|
||||
from cmdlets import search_file as search_cmd
|
||||
temp_ctx = models.PipelineStageContext(0, 1)
|
||||
saved_ctx = ctx.get_stage_context()
|
||||
ctx.set_stage_context(temp_ctx)
|
||||
try:
|
||||
# Call the cmdlet exactly like the user would type: search-file "hash:...,store:local"
|
||||
search_cmd._run(None, [f"hash:{file_hash},store:local"], config)
|
||||
try:
|
||||
table = ctx.get_last_result_table()
|
||||
if table is not None:
|
||||
log("")
|
||||
log(table.format_plain())
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
ctx.set_stage_context(saved_ctx)
|
||||
except Exception as exc:
|
||||
debug(f"[add-file] Skipped search-file display: {exc}")
|
||||
|
||||
|
||||
def _persist_local_metadata(
|
||||
library_root: Path,
|
||||
dest_path: Path,
|
||||
@@ -209,7 +235,7 @@ def _handle_local_transfer(
|
||||
try:
|
||||
destination_root.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as exc:
|
||||
log(f"❌ Cannot prepare destination directory {destination_root}: {exc}", file=sys.stderr)
|
||||
log(f"Cannot prepare destination directory {destination_root}: {exc}", file=sys.stderr)
|
||||
return 1, None
|
||||
|
||||
|
||||
@@ -234,8 +260,8 @@ def _handle_local_transfer(
|
||||
return f"title:{value}"
|
||||
return tag
|
||||
|
||||
tags_from_result = [normalize_title_tag(t) for t in tags_from_result]
|
||||
sidecar_tags = [normalize_title_tag(t) for t in sidecar_tags]
|
||||
tags_from_result = collapse_namespace_tags([normalize_title_tag(t) for t in tags_from_result], "title", prefer="last")
|
||||
sidecar_tags = collapse_namespace_tags([normalize_title_tag(t) for t in sidecar_tags], "title", prefer="last")
|
||||
|
||||
# Merge tags carefully: if URL has title tag, don't include sidecar title tags
|
||||
# This prevents duplicate title: tags when URL provides a title
|
||||
@@ -295,6 +321,7 @@ def _handle_local_transfer(
|
||||
else:
|
||||
# Ensure filename is the hash when adding to local storage
|
||||
resolved_hash = _resolve_file_hash(result, sidecar_hash, media_path)
|
||||
hashed_move_done = False
|
||||
if resolved_hash:
|
||||
hashed_name = resolved_hash + media_path.suffix
|
||||
target_path = destination_root / hashed_name
|
||||
@@ -305,7 +332,13 @@ def _handle_local_transfer(
|
||||
pass
|
||||
if media_path != target_path:
|
||||
media_path = media_path.rename(target_path)
|
||||
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
|
||||
hashed_move_done = True
|
||||
|
||||
if hashed_move_done and media_path.parent.samefile(destination_root):
|
||||
# Already placed at final destination with hash name; skip extra upload/move
|
||||
dest_file = str(media_path)
|
||||
else:
|
||||
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
|
||||
except Exception as exc:
|
||||
log(f"❌ Failed to move file into {destination_root}: {exc}", file=sys.stderr)
|
||||
return 1, None
|
||||
@@ -316,7 +349,7 @@ def _handle_local_transfer(
|
||||
|
||||
# If we have a title tag, keep it. Otherwise, derive from filename.
|
||||
has_title = any(str(t).strip().lower().startswith("title:") for t in merged_tags)
|
||||
final_tags = merged_tags
|
||||
final_tags = collapse_namespace_tags(merged_tags, "title", prefer="last")
|
||||
|
||||
if not has_title:
|
||||
filename_title = dest_path.stem.replace("_", " ").strip()
|
||||
@@ -326,7 +359,7 @@ def _handle_local_transfer(
|
||||
if not export_mode:
|
||||
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
|
||||
_cleanup_sidecar_files(media_path, sidecar_path)
|
||||
debug(f"✅ Moved to local library: {dest_path}")
|
||||
_show_local_result_table(file_hash, config or {})
|
||||
else:
|
||||
debug(f"✅ Exported to destination: {dest_path}")
|
||||
return 0, dest_path
|
||||
@@ -390,9 +423,17 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
location = str(path_value)
|
||||
|
||||
# Get location from parsed args - now uses SharedArgs.STORAGE so key is "storage"
|
||||
location = parsed.get("storage")
|
||||
if location:
|
||||
location = str(location).lower().strip()
|
||||
storage_arg = parsed.get("storage")
|
||||
if location is None:
|
||||
location = storage_arg
|
||||
if location:
|
||||
location = str(location).lower().strip()
|
||||
elif storage_arg:
|
||||
# User provided both -path (as destination) and -storage; prefer explicit storage only if it matches
|
||||
storage_str = str(storage_arg).lower().strip()
|
||||
if storage_str != str(location).lower():
|
||||
log(f"❌ Conflicting destinations: -path '{location}' vs -storage '{storage_str}'", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Get file provider from parsed args
|
||||
provider_name = parsed.get("provider")
|
||||
@@ -973,8 +1014,14 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except OSError as exc:
|
||||
log(f"Failed to delete sidecar: {exc}", file=sys.stderr)
|
||||
|
||||
log(f"✅ Successfully completed: {media_path.name} (hash={file_hash})", file=sys.stderr)
|
||||
|
||||
# Decide whether to surface search-file results at end of pipeline
|
||||
stage_ctx = ctx.get_stage_context()
|
||||
is_storage_target = location is not None
|
||||
should_display = is_storage_target and (stage_ctx is None or stage_ctx.is_last_stage)
|
||||
|
||||
if (not should_display) or not file_hash:
|
||||
log(f"Successfully completed: {media_path.name} (hash={file_hash})", file=sys.stderr)
|
||||
|
||||
# Emit result for Hydrus uploads so downstream commands know about it
|
||||
if location == 'hydrus':
|
||||
# Extract title from original result, fallback to filename if not available
|
||||
@@ -999,6 +1046,17 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Clear the stage table so downstream @N doesn't try to re-run download-data
|
||||
# Next stage will use these Hydrus file results, not format objects
|
||||
ctx.set_current_stage_table(None)
|
||||
|
||||
# If this is the last stage (or not in a pipeline), show the file via search-file
|
||||
if should_display and file_hash:
|
||||
try:
|
||||
from cmdlets import search_file as search_cmdlet
|
||||
search_cmdlet._run(None, [f"hash:{file_hash}"], config)
|
||||
except Exception:
|
||||
debug("search-file lookup after add-file failed", file=sys.stderr)
|
||||
elif file_hash:
|
||||
# Not displaying search results here, so report completion normally
|
||||
log(f"Successfully completed: {media_path.name} (hash={file_hash})", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user