dfdkflj
This commit is contained in:
@@ -42,16 +42,14 @@ from ._shared import (
|
||||
normalize_result_input,
|
||||
get_pipe_object_path,
|
||||
get_pipe_object_hash,
|
||||
should_show_help,
|
||||
get_field,
|
||||
)
|
||||
import models
|
||||
import pipeline as ctx
|
||||
|
||||
|
||||
def _get_item_value(item: Any, key: str, default: Any = None) -> Any:
|
||||
"""Helper to read either dict keys or attributes."""
|
||||
if isinstance(item, dict):
|
||||
return item.get(key, default)
|
||||
return getattr(item, key, default)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -60,12 +58,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Merge multiple files into one."""
|
||||
|
||||
# Parse help
|
||||
try:
|
||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
if should_show_help(args):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
@@ -102,7 +97,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
source_files: List[Path] = []
|
||||
source_tags_files: List[Path] = []
|
||||
source_hashes: List[str] = []
|
||||
source_urls: List[str] = []
|
||||
source_url: List[str] = []
|
||||
source_tags: List[str] = [] # NEW: collect tags from source files
|
||||
source_relationships: List[str] = [] # NEW: collect relationships from source files
|
||||
|
||||
@@ -146,7 +141,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if tags_file.exists():
|
||||
source_tags_files.append(tags_file)
|
||||
|
||||
# Try to read hash, tags, urls, and relationships from .tags sidecar file
|
||||
# Try to read hash, tags, url, and relationships from .tags sidecar file
|
||||
try:
|
||||
tags_content = tags_file.read_text(encoding='utf-8')
|
||||
for line in tags_content.split('\n'):
|
||||
@@ -157,18 +152,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
hash_value = line[5:].strip()
|
||||
if hash_value:
|
||||
source_hashes.append(hash_value)
|
||||
elif line.startswith('known_url:') or line.startswith('url:'):
|
||||
# Extract URLs from tags file
|
||||
elif line.startswith('url:') or line.startswith('url:'):
|
||||
# Extract url from tags file
|
||||
url_value = line.split(':', 1)[1].strip() if ':' in line else ''
|
||||
if url_value and url_value not in source_urls:
|
||||
source_urls.append(url_value)
|
||||
if url_value and url_value not in source_url:
|
||||
source_url.append(url_value)
|
||||
elif line.startswith('relationship:'):
|
||||
# Extract relationships from tags file
|
||||
rel_value = line.split(':', 1)[1].strip() if ':' in line else ''
|
||||
if rel_value and rel_value not in source_relationships:
|
||||
source_relationships.append(rel_value)
|
||||
else:
|
||||
# Collect actual tags (not metadata like hash: or known_url:)
|
||||
# Collect actual tags (not metadata like hash: or url:)
|
||||
source_tags.append(line)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -178,14 +173,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if hash_value and hash_value not in source_hashes:
|
||||
source_hashes.append(str(hash_value))
|
||||
|
||||
# Extract known URLs if available
|
||||
known_urls = _get_item_value(item, 'known_urls', [])
|
||||
if isinstance(known_urls, str):
|
||||
source_urls.append(known_urls)
|
||||
elif isinstance(known_urls, list):
|
||||
source_urls.extend(known_urls)
|
||||
# Extract known url if available
|
||||
url = get_field(item, 'url', [])
|
||||
if isinstance(url, str):
|
||||
source_url.append(url)
|
||||
elif isinstance(url, list):
|
||||
source_url.extend(url)
|
||||
else:
|
||||
title = _get_item_value(item, 'title', 'unknown') or _get_item_value(item, 'id', 'unknown')
|
||||
title = get_field(item, 'title', 'unknown') or get_field(item, 'id', 'unknown')
|
||||
log(f"Warning: Could not locate file for item: {title}", file=sys.stderr)
|
||||
|
||||
if len(source_files) < 2:
|
||||
@@ -279,8 +274,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if HAS_METADATA_API and write_tags_to_file:
|
||||
# Use unified API for file writing
|
||||
source_hashes_list = source_hashes if source_hashes else None
|
||||
source_urls_list = source_urls if source_urls else None
|
||||
write_tags_to_file(tags_path, merged_tags, source_hashes_list, source_urls_list)
|
||||
source_url_list = source_url if source_url else None
|
||||
write_tags_to_file(tags_path, merged_tags, source_hashes_list, source_url_list)
|
||||
else:
|
||||
# Fallback: manual file writing
|
||||
tags_lines = []
|
||||
@@ -292,10 +287,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Add regular tags
|
||||
tags_lines.extend(merged_tags)
|
||||
|
||||
# Add known URLs
|
||||
if source_urls:
|
||||
for url in source_urls:
|
||||
tags_lines.append(f"known_url:{url}")
|
||||
# Add known url
|
||||
if source_url:
|
||||
for url in source_url:
|
||||
tags_lines.append(f"url:{url}")
|
||||
|
||||
# Add relationships (if available)
|
||||
if source_relationships:
|
||||
@@ -309,7 +304,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Also create .metadata file using centralized function
|
||||
try:
|
||||
write_metadata(output_path, source_hashes[0] if source_hashes else None, source_urls, source_relationships)
|
||||
write_metadata(output_path, source_hashes[0] if source_hashes else None, source_url, source_relationships)
|
||||
log(f"Created metadata: {output_path.name}.metadata", file=sys.stderr)
|
||||
except Exception as e:
|
||||
log(f"Warning: Could not create metadata file: {e}", file=sys.stderr)
|
||||
@@ -325,12 +320,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except ImportError:
|
||||
# Fallback: create a simple object with the required attributes
|
||||
class SimpleItem:
|
||||
def __init__(self, target, title, media_kind, tags=None, known_urls=None):
|
||||
def __init__(self, target, title, media_kind, tags=None, url=None):
|
||||
self.target = target
|
||||
self.title = title
|
||||
self.media_kind = media_kind
|
||||
self.tags = tags or []
|
||||
self.known_urls = known_urls or []
|
||||
self.url = url or []
|
||||
self.origin = "local" # Ensure origin is set for add-file
|
||||
PipelineItem = SimpleItem
|
||||
|
||||
@@ -339,7 +334,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
title=output_path.stem,
|
||||
media_kind=file_kind,
|
||||
tags=merged_tags, # Include merged tags
|
||||
known_urls=source_urls # Include known URLs
|
||||
url=source_url # Include known url
|
||||
)
|
||||
# Clear previous results to ensure only the merged file is passed down
|
||||
ctx.clear_last_result()
|
||||
@@ -904,12 +899,12 @@ CMDLET = Cmdlet(
|
||||
name="merge-file",
|
||||
summary="Merge multiple files into a single output file. Supports audio, video, PDF, and text merging with optional cleanup.",
|
||||
usage="merge-file [-delete] [-output <path>] [-format <auto|mp3|aac|opus|mp4|mkv|pdf|txt>]",
|
||||
args=[
|
||||
arg=[
|
||||
CmdletArg("-delete", type="flag", description="Delete source files after successful merge."),
|
||||
CmdletArg("-output", description="Override output file path."),
|
||||
CmdletArg("-format", description="Output format (auto/mp3/aac/opus/mp4/mkv/pdf/txt). Default: auto-detect from first file."),
|
||||
],
|
||||
details=[
|
||||
detail=[
|
||||
"- Pipe multiple files: search-file query | [1,2,3] | merge-file",
|
||||
"- Audio files merge with minimal quality loss using specified codec.",
|
||||
"- Video files merge into MP4 or MKV containers.",
|
||||
|
||||
Reference in New Issue
Block a user