This commit is contained in:
nose
2025-12-06 00:10:19 -08:00
parent 5482ee5586
commit f29709d951
20 changed files with 1353 additions and 419 deletions

View File

@@ -185,7 +185,13 @@ def _persist_local_metadata(
log(traceback.format_exc(), file=sys.stderr)
def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any, config: Optional[Dict[str, Any]] = None) -> Tuple[int, Optional[Path]]:
def _handle_local_transfer(
media_path: Path,
destination_root: Path,
result: Any,
config: Optional[Dict[str, Any]] = None,
export_mode: bool = False,
) -> Tuple[int, Optional[Path]]:
"""Transfer a file to local storage and return (exit_code, destination_path).
Args:
@@ -246,34 +252,60 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
relationships = extract_relationships(result)
duration = extract_duration(result)
# Rename source file if title tag is present (to ensure destination has correct name)
title_tag = next((t for t in merged_tags if str(t).strip().lower().startswith("title:")), None)
if title_tag:
try:
from helper.utils import unique_path
title_val = title_tag.split(":", 1)[1].strip()
# Sanitize filename (keep spaces, but remove illegal chars)
safe_title = "".join(c for c in title_val if c.isalnum() or c in " ._-()[]").strip()
if safe_title:
new_name = safe_title + media_path.suffix
new_path = media_path.parent / new_name
if new_path != media_path:
# Ensure we don't overwrite existing files
new_path = unique_path(new_path)
media_path.rename(new_path)
media_path = new_path
debug(f"Renamed source file to match title: {media_path.name}")
except Exception as e:
log(f"Warning: Failed to rename file to match title: {e}", file=sys.stderr)
# Skip title-based renaming for library mode (hash-based) but allow for export mode below
try:
# Ensure filename is the hash when adding to local storage
resolved_hash = _resolve_file_hash(result, sidecar_hash, media_path)
if resolved_hash:
hashed_name = resolved_hash + media_path.suffix
target_path = destination_root / hashed_name
media_path = media_path.rename(target_path) if media_path != target_path else media_path
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
if export_mode:
title_tag = next((t for t in merged_tags if str(t).strip().lower().startswith("title:")), None)
title_value = ""
if title_tag:
title_value = title_tag.split(":", 1)[1].strip()
if not title_value:
title_value = media_path.stem.replace("_", " ").strip()
# Sanitize filename
safe_title = "".join(c for c in title_value if c.isalnum() or c in " ._-()[]{}'`").strip()
base_name = safe_title or media_path.stem
new_name = base_name + media_path.suffix
target_path = destination_root / new_name
destination_root.mkdir(parents=True, exist_ok=True)
if target_path.exists():
from helper.utils import unique_path
target_path = unique_path(target_path)
shutil.move(str(media_path), target_path)
# Move/copy sidecar files alongside
possible_sidecars = [
media_path.with_suffix(media_path.suffix + ".json"),
media_path.with_name(media_path.name + ".tags"),
media_path.with_name(media_path.name + ".tags.txt"),
media_path.with_name(media_path.name + ".metadata"),
media_path.with_name(media_path.name + ".notes"),
]
for sc in possible_sidecars:
try:
if sc.exists():
suffix_part = sc.name.replace(media_path.name, "", 1)
dest_sidecar = target_path.parent / f"{target_path.name}{suffix_part}"
dest_sidecar.parent.mkdir(parents=True, exist_ok=True)
shutil.move(str(sc), dest_sidecar)
except Exception:
pass
media_path = target_path
dest_file = str(target_path)
else:
# Ensure filename is the hash when adding to local storage
resolved_hash = _resolve_file_hash(result, sidecar_hash, media_path)
if resolved_hash:
hashed_name = resolved_hash + media_path.suffix
target_path = destination_root / hashed_name
try:
if target_path.exists():
target_path.unlink()
except Exception:
pass
if media_path != target_path:
media_path = media_path.rename(target_path)
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
except Exception as exc:
log(f"❌ Failed to move file into {destination_root}: {exc}", file=sys.stderr)
return 1, None
@@ -291,9 +323,12 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
if filename_title:
final_tags.insert(0, f"title:{filename_title}")
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
_cleanup_sidecar_files(media_path, sidecar_path)
debug(f"✅ Moved to local library: {dest_path}")
if not export_mode:
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
_cleanup_sidecar_files(media_path, sidecar_path)
debug(f"✅ Moved to local library: {dest_path}")
else:
debug(f"✅ Exported to destination: {dest_path}")
return 0, dest_path
@@ -333,17 +368,26 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
provider_name: Optional[str] = None
delete_after_upload = False
# Check if -path argument was provided to use direct file path instead of piped result
# Check if -path argument was provided
path_arg = parsed.get("path")
if path_arg:
# Create a pseudo-result object from the file path
media_path = Path(str(path_arg).strip())
if not media_path.exists():
log(f"❌ File not found: {media_path}")
return 1
# Create result dict with the file path and origin 'wild' for direct path inputs
result = {"target": str(media_path), "origin": "wild"}
log(f"Using direct file path: {media_path}")
path_value = Path(str(path_arg).strip())
# If there is no piped result, treat -path as the source file (existing behavior)
if result is None:
if not path_value.exists():
log(f"❌ File not found: {path_value}")
return 1
result = {"target": str(path_value), "origin": "wild"}
log(f"Using direct file path: {path_value}")
else:
# Piped result present: treat -path as destination (export)
if not path_value.exists():
try:
path_value.mkdir(parents=True, exist_ok=True)
except Exception as exc:
log(f"❌ Cannot create destination directory {path_value}: {exc}", file=sys.stderr)
return 1
location = str(path_value)
# Get location from parsed args - now uses SharedArgs.STORAGE so key is "storage"
location = parsed.get("storage")
@@ -714,7 +758,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
return 1
log(f"Moving to local path: {destination_root}", file=sys.stderr)
exit_code, dest_path = _handle_local_transfer(media_path, destination_root, result, config)
exit_code, dest_path = _handle_local_transfer(media_path, destination_root, result, config, export_mode=True)
# After successful local transfer, emit result for pipeline continuation
if exit_code == 0 and dest_path: