sssssss
This commit is contained in:
@@ -1182,9 +1182,11 @@ def extract_known_urls_from_result(result: Any) -> list[str]:
|
||||
|
||||
if isinstance(result, models.PipeObject):
|
||||
_extend(result.extra.get('known_urls'))
|
||||
_extend(result.extra.get('url')) # Also check singular url
|
||||
if isinstance(result.metadata, dict):
|
||||
_extend(result.metadata.get('known_urls'))
|
||||
_extend(result.metadata.get('urls'))
|
||||
_extend(result.metadata.get('url'))
|
||||
elif hasattr(result, 'known_urls') or hasattr(result, 'urls'):
|
||||
# Handle objects with known_urls/urls attribute
|
||||
_extend(getattr(result, 'known_urls', None))
|
||||
@@ -1193,10 +1195,12 @@ def extract_known_urls_from_result(result: Any) -> list[str]:
|
||||
if isinstance(result, dict):
|
||||
_extend(result.get('known_urls'))
|
||||
_extend(result.get('urls'))
|
||||
_extend(result.get('url'))
|
||||
extra = result.get('extra')
|
||||
if isinstance(extra, dict):
|
||||
_extend(extra.get('known_urls'))
|
||||
_extend(extra.get('urls'))
|
||||
_extend(extra.get('url'))
|
||||
|
||||
return merge_sequences(urls, case_sensitive=True)
|
||||
|
||||
|
||||
@@ -169,26 +169,14 @@ def _persist_local_metadata(
|
||||
debug(f"[_persist_local_metadata] Absolute dest_path: {dest_path.resolve()}")
|
||||
|
||||
with LocalLibraryDB(library_root) as db:
|
||||
# Save metadata FIRST to ensure file entry is created in DB
|
||||
if any(payload.values()):
|
||||
debug(f"[_persist_local_metadata] Saving metadata payload first")
|
||||
try:
|
||||
db.save_metadata(dest_path, payload)
|
||||
debug(f"[_persist_local_metadata] ✅ Metadata saved")
|
||||
except Exception as meta_exc:
|
||||
log(f"[_persist_local_metadata] ❌ Failed to save metadata: {meta_exc}", file=sys.stderr)
|
||||
raise
|
||||
|
||||
# Save tags to DB synchronously in same transaction
|
||||
# For local storage, DB is the primary source of truth
|
||||
if tags:
|
||||
try:
|
||||
debug(f"[_persist_local_metadata] Saving {len(tags)} tags to DB")
|
||||
db.save_tags(dest_path, tags)
|
||||
debug(f"[_persist_local_metadata] ✅ Tags saved to DB")
|
||||
except Exception as tag_exc:
|
||||
log(f"[_persist_local_metadata] ⚠️ Failed to save tags to DB: {tag_exc}", file=sys.stderr)
|
||||
raise
|
||||
# Use optimized single-transaction save
|
||||
debug(f"[_persist_local_metadata] Saving metadata and {len(tags)} tags to DB")
|
||||
try:
|
||||
db.save_file_info(dest_path, payload, tags)
|
||||
debug(f"[_persist_local_metadata] ✅ File info saved to DB")
|
||||
except Exception as exc:
|
||||
log(f"[_persist_local_metadata] ❌ Failed to save file info: {exc}", file=sys.stderr)
|
||||
raise
|
||||
|
||||
# NOTE: Sidecar files are intentionally NOT created for local storage
|
||||
# Local storage uses database as primary source, not sidecar files
|
||||
@@ -261,6 +249,26 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
|
||||
relationships = extract_relationships(result)
|
||||
duration = extract_duration(result)
|
||||
|
||||
# Rename source file if title tag is present (to ensure destination has correct name)
|
||||
title_tag = next((t for t in merged_tags if str(t).strip().lower().startswith("title:")), None)
|
||||
if title_tag:
|
||||
try:
|
||||
from helper.utils import unique_path
|
||||
title_val = title_tag.split(":", 1)[1].strip()
|
||||
# Sanitize filename (keep spaces, but remove illegal chars)
|
||||
safe_title = "".join(c for c in title_val if c.isalnum() or c in " ._-()[]").strip()
|
||||
if safe_title:
|
||||
new_name = safe_title + media_path.suffix
|
||||
new_path = media_path.parent / new_name
|
||||
if new_path != media_path:
|
||||
# Ensure we don't overwrite existing files
|
||||
new_path = unique_path(new_path)
|
||||
media_path.rename(new_path)
|
||||
media_path = new_path
|
||||
debug(f"Renamed source file to match title: {media_path.name}")
|
||||
except Exception as e:
|
||||
log(f"Warning: Failed to rename file to match title: {e}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
dest_file = storage["local"].upload(media_path, location=str(destination_root), move=True)
|
||||
except Exception as exc:
|
||||
@@ -271,14 +279,16 @@ def _handle_local_transfer(media_path: Path, destination_root: Path, result: Any
|
||||
file_hash = _resolve_file_hash(result, sidecar_hash, dest_path)
|
||||
media_kind = _resolve_media_kind(result, dest_path)
|
||||
|
||||
# Ensure only ONE title tag that matches the actual filename
|
||||
# Remove all existing title tags and add one based on the saved filename
|
||||
merged_tags_no_titles = [t for t in merged_tags if not str(t).strip().lower().startswith("title:")]
|
||||
filename_title = dest_path.stem.replace("_", " ").strip()
|
||||
if filename_title:
|
||||
merged_tags_no_titles.insert(0, f"title:{filename_title}")
|
||||
# If we have a title tag, keep it. Otherwise, derive from filename.
|
||||
has_title = any(str(t).strip().lower().startswith("title:") for t in merged_tags)
|
||||
final_tags = merged_tags
|
||||
|
||||
_persist_local_metadata(destination_root, dest_path, merged_tags_no_titles, merged_urls, file_hash, relationships, duration, media_kind)
|
||||
if not has_title:
|
||||
filename_title = dest_path.stem.replace("_", " ").strip()
|
||||
if filename_title:
|
||||
final_tags.insert(0, f"title:{filename_title}")
|
||||
|
||||
_persist_local_metadata(destination_root, dest_path, final_tags, merged_urls, file_hash, relationships, duration, media_kind)
|
||||
_cleanup_sidecar_files(media_path, sidecar_path)
|
||||
debug(f"✅ Moved to local library: {dest_path}")
|
||||
return 0, dest_path
|
||||
@@ -897,8 +907,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pass
|
||||
|
||||
# If -delete flag is set, delete the file and .tags after successful upload
|
||||
if delete_after_upload:
|
||||
log(f"Deleting local files (as requested)...", file=sys.stderr)
|
||||
# Also delete if the file is a temporary file from merge-file (contains .dlhx_ or (merged))
|
||||
is_temp_merge = "(merged)" in media_path.name or ".dlhx_" in media_path.name
|
||||
|
||||
if delete_after_upload or is_temp_merge:
|
||||
log(f"Deleting local files (as requested or temp file)...", file=sys.stderr)
|
||||
try:
|
||||
media_path.unlink()
|
||||
log(f"✅ Deleted: {media_path.name}", file=sys.stderr)
|
||||
|
||||
@@ -2,6 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from . import register
|
||||
import models
|
||||
@@ -9,17 +11,19 @@ import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="add-url",
|
||||
summary="Associate a URL with a Hydrus file.",
|
||||
summary="Associate a URL with a file (Hydrus or Local).",
|
||||
usage="add-url [-hash <sha256>] <url>",
|
||||
args=[
|
||||
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
|
||||
CmdletArg("url", required=True, description="The URL to associate with the file."),
|
||||
],
|
||||
details=[
|
||||
"- Adds the URL to the Hydrus file's known URL list.",
|
||||
"- Adds the URL to the file's known URL list.",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -37,42 +41,96 @@ def add(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
from ._shared import parse_cmdlet_args
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
override_hash = parsed.get("hash")
|
||||
url = parsed.get("url")
|
||||
url_arg = parsed.get("url")
|
||||
|
||||
if not url:
|
||||
if not url_arg:
|
||||
log("Requires a URL argument")
|
||||
return 1
|
||||
|
||||
url = str(url).strip()
|
||||
if not url:
|
||||
url_arg = str(url_arg).strip()
|
||||
if not url_arg:
|
||||
log("Requires a non-empty URL")
|
||||
return 1
|
||||
|
||||
# Split by comma to handle multiple URLs
|
||||
urls_to_add = [u.strip() for u in url_arg.split(',') if u.strip()]
|
||||
|
||||
# Handle @N selection which creates a list - extract the first item
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||||
if not hash_hex:
|
||||
log("Selected result does not include a Hydrus hash")
|
||||
return 1
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
return 1
|
||||
# Helper to get field from both dict and object
|
||||
def get_field(obj: Any, field: str, default: Any = None) -> Any:
|
||||
if isinstance(obj, dict):
|
||||
return obj.get(field, default)
|
||||
else:
|
||||
return getattr(obj, field, default)
|
||||
|
||||
success = False
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
# 1. Try Local Library
|
||||
file_path = get_field(result, "file_path") or get_field(result, "path")
|
||||
if file_path and not override_hash:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
if path_obj.exists():
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
metadata = db.get_metadata(path_obj) or {}
|
||||
known_urls = metadata.get("known_urls") or []
|
||||
|
||||
local_changed = False
|
||||
for url in urls_to_add:
|
||||
if url not in known_urls:
|
||||
known_urls.append(url)
|
||||
local_changed = True
|
||||
ctx.emit(f"Associated URL with local file {path_obj.name}: {url}")
|
||||
else:
|
||||
ctx.emit(f"URL already exists for local file {path_obj.name}: {url}")
|
||||
|
||||
if local_changed:
|
||||
metadata["known_urls"] = known_urls
|
||||
# Ensure we have a hash if possible, but don't fail if not
|
||||
if not metadata.get("hash"):
|
||||
try:
|
||||
from helper.utils import sha256_file
|
||||
metadata["hash"] = sha256_file(path_obj)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
db.save_metadata(path_obj, metadata)
|
||||
|
||||
success = True
|
||||
except Exception as e:
|
||||
log(f"Error updating local library: {e}", file=sys.stderr)
|
||||
|
||||
# 2. Try Hydrus
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(result, "hash_hex", None))
|
||||
|
||||
if hash_hex:
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
if client:
|
||||
for url in urls_to_add:
|
||||
client.associate_url(hash_hex, url)
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
ctx.emit(f"Associated URL with Hydrus file {preview}: {url}")
|
||||
success = True
|
||||
except Exception as exc:
|
||||
# Only log error if we didn't succeed locally either
|
||||
if not success:
|
||||
log(f"Hydrus add-url failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if success:
|
||||
return 0
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Selected result does not include a file path or Hydrus hash", file=sys.stderr)
|
||||
return 1
|
||||
try:
|
||||
client.associate_url(hash_hex, url)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus add-url failed: {exc}")
|
||||
return 1
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
ctx.emit(f"Associated URL with {preview}: {url}")
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -11,11 +11,50 @@ from pathlib import Path
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
|
||||
|
||||
|
||||
|
||||
def _cleanup_relationships(db_path: Path, file_hash: str) -> int:
|
||||
"""Remove references to file_hash from other files' relationships."""
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Find all metadata entries that contain this hash in relationships
|
||||
cursor.execute("SELECT file_id, relationships FROM metadata WHERE relationships LIKE ?", (f'%{file_hash}%',))
|
||||
rows = cursor.fetchall()
|
||||
|
||||
rel_update_count = 0
|
||||
for row_fid, rel_json in rows:
|
||||
try:
|
||||
rels = json.loads(rel_json)
|
||||
changed = False
|
||||
if isinstance(rels, dict):
|
||||
for r_type, hashes in rels.items():
|
||||
if isinstance(hashes, list) and file_hash in hashes:
|
||||
hashes.remove(file_hash)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
cursor.execute("UPDATE metadata SET relationships = ? WHERE file_id = ?", (json.dumps(rels), row_fid))
|
||||
rel_update_count += 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
if rel_update_count > 0:
|
||||
debug(f"Removed relationship references from {rel_update_count} other files", file=sys.stderr)
|
||||
return rel_update_count
|
||||
except Exception as e:
|
||||
debug(f"Error cleaning up relationships: {e}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
|
||||
def _delete_database_entry(db_path: Path, file_path: str) -> bool:
|
||||
"""Delete file and related entries from local library database.
|
||||
|
||||
@@ -28,35 +67,31 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
|
||||
"""
|
||||
try:
|
||||
if not db_path.exists():
|
||||
log(f"Database not found at {db_path}", file=sys.stderr)
|
||||
debug(f"Database not found at {db_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
log(f"Searching database for file_path: {file_path}", file=sys.stderr)
|
||||
debug(f"Searching database for file_path: {file_path}", file=sys.stderr)
|
||||
|
||||
# Find the file_id using the exact file_path
|
||||
cursor.execute('SELECT id FROM files WHERE file_path = ?', (file_path,))
|
||||
result = cursor.fetchone()
|
||||
|
||||
if not result:
|
||||
log(f"ERROR: File path not found in database", file=sys.stderr)
|
||||
log(f"Expected: {file_path}", file=sys.stderr)
|
||||
|
||||
# Debug: show sample entries
|
||||
cursor.execute('SELECT id, file_path FROM files LIMIT 3')
|
||||
samples = cursor.fetchall()
|
||||
if samples:
|
||||
log(f"Sample DB entries:", file=sys.stderr)
|
||||
for fid, fpath in samples:
|
||||
log(f"{fid}: {fpath}", file=sys.stderr)
|
||||
|
||||
debug(f"File path not found in database: {file_path}", file=sys.stderr)
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
file_id = result[0]
|
||||
log(f"Found file_id={file_id}, deleting all related records", file=sys.stderr)
|
||||
|
||||
# Get file hash before deletion to clean up relationships
|
||||
cursor.execute('SELECT file_hash FROM files WHERE id = ?', (file_id,))
|
||||
hash_result = cursor.fetchone()
|
||||
file_hash = hash_result[0] if hash_result else None
|
||||
|
||||
debug(f"Found file_id={file_id}, deleting all related records", file=sys.stderr)
|
||||
|
||||
# Delete related records
|
||||
cursor.execute('DELETE FROM metadata WHERE file_id = ?', (file_id,))
|
||||
@@ -74,7 +109,11 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
log(f"Deleted: metadata={meta_count}, tags={tags_count}, notes={notes_count}, files={files_count}", file=sys.stderr)
|
||||
# Clean up relationships in other files
|
||||
if file_hash:
|
||||
_cleanup_relationships(db_path, file_hash)
|
||||
|
||||
debug(f"Deleted: metadata={meta_count}, tags={tags_count}, notes={notes_count}, files={files_count}", file=sys.stderr)
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
@@ -106,6 +145,24 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
|
||||
local_deleted = False
|
||||
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
|
||||
|
||||
# Try to resolve local path if target looks like a hash and we have a library root
|
||||
if local_target and looks_like_hash(str(target)) and lib_root:
|
||||
try:
|
||||
db_path = Path(lib_root) / ".downlow_library.db"
|
||||
if db_path.exists():
|
||||
# We can't use LocalLibraryDB context manager easily here without importing it,
|
||||
# but we can use a quick sqlite connection or just use the class if imported.
|
||||
# We imported LocalLibraryDB, so let's use it.
|
||||
with LocalLibraryDB(Path(lib_root)) as db:
|
||||
resolved = db.search_by_hash(str(target))
|
||||
if resolved:
|
||||
target = str(resolved)
|
||||
# Also ensure we have the hash set for Hydrus deletion if needed
|
||||
if not hash_hex:
|
||||
hash_hex = normalize_hash(str(target))
|
||||
except Exception as e:
|
||||
debug(f"Failed to resolve hash to local path: {e}", file=sys.stderr)
|
||||
|
||||
if conserve != "local" and local_target:
|
||||
path = Path(str(target))
|
||||
file_path_str = str(target) # Keep the original string for DB matching
|
||||
@@ -132,18 +189,59 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
|
||||
if lib_root:
|
||||
lib_root_path = Path(lib_root)
|
||||
db_path = lib_root_path / ".downlow_library.db"
|
||||
if _delete_database_entry(db_path, file_path_str):
|
||||
|
||||
# If file_path_str is a hash (because file was already deleted or target was hash),
|
||||
# we need to find the path by hash in the DB first
|
||||
if looks_like_hash(file_path_str):
|
||||
try:
|
||||
with LocalLibraryDB(lib_root_path) as db:
|
||||
resolved = db.search_by_hash(file_path_str)
|
||||
if resolved:
|
||||
file_path_str = str(resolved)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
db_success = _delete_database_entry(db_path, file_path_str)
|
||||
|
||||
if not db_success:
|
||||
# If deletion failed (e.g. not found), but we have a hash, try to clean up relationships anyway
|
||||
effective_hash = None
|
||||
if looks_like_hash(file_path_str):
|
||||
effective_hash = file_path_str
|
||||
elif hash_hex:
|
||||
effective_hash = hash_hex
|
||||
|
||||
if effective_hash:
|
||||
debug(f"Entry not found, but attempting to clean up relationships for hash: {effective_hash}", file=sys.stderr)
|
||||
if _cleanup_relationships(db_path, effective_hash) > 0:
|
||||
db_success = True
|
||||
|
||||
if db_success:
|
||||
if ctx._PIPE_ACTIVE:
|
||||
ctx.emit(f"Removed database entry: {path.name}")
|
||||
log(f"Database entry cleaned up", file=sys.stderr)
|
||||
local_deleted = True # Mark as deleted if DB cleanup succeeded
|
||||
debug(f"Database entry cleaned up", file=sys.stderr)
|
||||
local_deleted = True
|
||||
else:
|
||||
log(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
|
||||
debug(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
|
||||
else:
|
||||
debug(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
|
||||
|
||||
hydrus_deleted = False
|
||||
if conserve != "hydrus" and hash_hex:
|
||||
# Only attempt Hydrus deletion if origin is explicitly Hydrus or if we failed to delete locally
|
||||
# and we suspect it might be in Hydrus.
|
||||
# If origin is local, we should default to NOT deleting from Hydrus unless requested?
|
||||
# Or maybe we should check if it exists in Hydrus first?
|
||||
# The user complaint is "its still trying to delete hydrus, this is a local file".
|
||||
|
||||
should_try_hydrus = True
|
||||
if origin and origin.lower() == "local":
|
||||
should_try_hydrus = False
|
||||
|
||||
# If conserve is set to hydrus, definitely don't delete
|
||||
if conserve == "hydrus":
|
||||
should_try_hydrus = False
|
||||
|
||||
if should_try_hydrus and hash_hex:
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
@@ -153,6 +251,9 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
|
||||
else:
|
||||
if client is None:
|
||||
if not local_deleted:
|
||||
# If we deleted locally, we don't care if Hydrus is unavailable
|
||||
pass
|
||||
else:
|
||||
log("Hydrus client unavailable", file=sys.stderr)
|
||||
return False
|
||||
else:
|
||||
@@ -165,7 +266,8 @@ def _process_single_item(item: Any, override_hash: str | None, conserve: str | N
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
debug(f"Deleted from Hydrus: {preview}…", file=sys.stderr)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus delete failed: {exc}", file=sys.stderr)
|
||||
# If it's not in Hydrus (e.g. 404 or similar), that's fine
|
||||
# log(f"Hydrus delete failed: {exc}", file=sys.stderr)
|
||||
if not local_deleted:
|
||||
return False
|
||||
|
||||
@@ -218,6 +320,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
reason_tokens.append(token)
|
||||
i += 1
|
||||
|
||||
if not lib_root:
|
||||
# Try to get from config
|
||||
p = get_local_storage_path(config)
|
||||
if p:
|
||||
lib_root = str(p)
|
||||
|
||||
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
||||
|
||||
items = []
|
||||
|
||||
@@ -2,22 +2,27 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from . import register
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
import pipeline as ctx
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-url",
|
||||
summary="Remove a URL association from a Hydrus file.",
|
||||
summary="Remove a URL association from a file (Hydrus or Local).",
|
||||
usage="delete-url [-hash <sha256>] <url>",
|
||||
args=[
|
||||
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
|
||||
CmdletArg("<url>", required=True, description="The URL to remove from the file."),
|
||||
CmdletArg("url", required=True, description="The URL to remove from the file."),
|
||||
],
|
||||
details=[
|
||||
"- Removes the URL from the Hydrus file's known URL list.",
|
||||
"- Removes the URL from the file's known URL list.",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -47,36 +52,105 @@ def delete(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
override_hash, rest = _parse_hash_and_rest(args)
|
||||
if not rest:
|
||||
log("Requires a URL argument")
|
||||
return 1
|
||||
url = str(rest[0] or '').strip()
|
||||
if not url:
|
||||
log("Requires a non-empty URL")
|
||||
return 1
|
||||
|
||||
# Handle @N selection which creates a list - extract the first item
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
url_arg = None
|
||||
if rest:
|
||||
url_arg = str(rest[0] or '').strip()
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||||
if not hash_hex:
|
||||
log("Selected result does not include a Hydrus hash")
|
||||
return 1
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
# Normalize result to a list
|
||||
items = result if isinstance(result, list) else [result]
|
||||
if not items:
|
||||
log("No input provided.")
|
||||
return 1
|
||||
|
||||
success_count = 0
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
for item in items:
|
||||
target_url = url_arg
|
||||
target_file = item
|
||||
|
||||
# Check for rich URL object from get-url
|
||||
if isinstance(item, dict) and "url" in item and "source_file" in item:
|
||||
if not target_url:
|
||||
target_url = item["url"]
|
||||
target_file = item["source_file"]
|
||||
|
||||
if not target_url:
|
||||
continue
|
||||
|
||||
if _delete_single(target_file, target_url, override_hash, config):
|
||||
success_count += 1
|
||||
|
||||
if success_count == 0:
|
||||
if not url_arg:
|
||||
log("Requires a URL argument or valid selection.")
|
||||
else:
|
||||
log("Failed to delete URL(s).")
|
||||
return 1
|
||||
try:
|
||||
client.delete_url(hash_hex, url)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-url failed: {exc}")
|
||||
return 1
|
||||
log(f"Deleted URL: {url}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _delete_single(result: Any, url: str, override_hash: str | None, config: Dict[str, Any]) -> bool:
|
||||
# Helper to get field from both dict and object
|
||||
def get_field(obj: Any, field: str, default: Any = None) -> Any:
|
||||
if isinstance(obj, dict):
|
||||
return obj.get(field, default)
|
||||
else:
|
||||
return getattr(obj, field, default)
|
||||
|
||||
success = False
|
||||
|
||||
# 1. Try Local Library
|
||||
file_path = get_field(result, "file_path") or get_field(result, "path")
|
||||
if file_path and not override_hash:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
if path_obj.exists():
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
metadata = db.get_metadata(path_obj) or {}
|
||||
known_urls = metadata.get("known_urls") or []
|
||||
|
||||
# Handle comma-separated URLs if passed as arg
|
||||
# But first check if the exact url string exists (e.g. if it contains commas itself)
|
||||
urls_to_process = []
|
||||
if url in known_urls:
|
||||
urls_to_process = [url]
|
||||
else:
|
||||
urls_to_process = [u.strip() for u in url.split(',') if u.strip()]
|
||||
|
||||
local_changed = False
|
||||
for u in urls_to_process:
|
||||
if u in known_urls:
|
||||
known_urls.remove(u)
|
||||
local_changed = True
|
||||
ctx.emit(f"Deleted URL from local file {path_obj.name}: {u}")
|
||||
|
||||
if local_changed:
|
||||
metadata["known_urls"] = known_urls
|
||||
db.save_metadata(path_obj, metadata)
|
||||
success = True
|
||||
except Exception as e:
|
||||
log(f"Error updating local library: {e}", file=sys.stderr)
|
||||
|
||||
# 2. Try Hydrus
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(result, "hash_hex", None))
|
||||
|
||||
if hash_hex:
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
if client:
|
||||
urls_to_delete = [u.strip() for u in url.split(',') if u.strip()]
|
||||
for u in urls_to_delete:
|
||||
client.delete_url(hash_hex, u)
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
ctx.emit(f"Deleted URL from Hydrus file {preview}: {u}")
|
||||
success = True
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-url failed: {exc}", file=sys.stderr)
|
||||
|
||||
return success
|
||||
|
||||
@@ -372,6 +372,18 @@ def _handle_search_result(result: Any, args: Sequence[str], config: Dict[str, An
|
||||
log("Error: No magnet ID in debrid result", file=sys.stderr)
|
||||
return 1
|
||||
return _handle_debrid_file(magnet_id, file_title, config, args)
|
||||
elif storage_name.lower() in {'bandcamp', 'youtube'}:
|
||||
# Handle Bandcamp/YouTube via yt-dlp
|
||||
url = get_field(result, 'target', None)
|
||||
if not url:
|
||||
# Try to find URL in other fields
|
||||
url = get_field(result, 'url', None)
|
||||
|
||||
if not url:
|
||||
log(f"Error: No URL found for {storage_name} result", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return _handle_ytdlp_download(url, file_title, config, args)
|
||||
else:
|
||||
log(f"Unknown storage backend: {storage_name}", file=sys.stderr)
|
||||
return 1
|
||||
@@ -507,8 +519,28 @@ def _handle_local_file(file_path: Optional[str], file_title: str, config: Dict[s
|
||||
try:
|
||||
source = Path(file_path)
|
||||
if not source.exists():
|
||||
log(f"Error: File not found: {file_path}", file=sys.stderr)
|
||||
return 1
|
||||
# Try to resolve by hash if the path looks like a hash
|
||||
resolved_local = False
|
||||
if looks_like_hash(str(file_path)):
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
resolved_path = db.search_by_hash(str(file_path))
|
||||
if resolved_path and resolved_path.exists():
|
||||
source = resolved_path
|
||||
file_path = str(resolved_path)
|
||||
resolved_local = True
|
||||
# Also set file_hash since we know it
|
||||
file_hash = str(file_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not resolved_local:
|
||||
log(f"Error: File not found: {file_path}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Check for explicit user flags
|
||||
force_mpv = any(str(a).lower() in {'-mpv', '--mpv', 'mpv'} for a in args)
|
||||
@@ -741,7 +773,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Also check for 'source' field (from add-file and other cmdlets)
|
||||
if not origin:
|
||||
origin = get_field(actual_result, 'source', None)
|
||||
if origin and origin.lower() in {'hydrus', 'local', 'debrid', 'alldebrid'}:
|
||||
if origin and origin.lower() in {'hydrus', 'local', 'debrid', 'alldebrid', 'bandcamp', 'youtube'}:
|
||||
# This is a search result with explicit origin - handle it via _handle_search_result
|
||||
return _handle_search_result(actual_result, args, config)
|
||||
|
||||
@@ -1023,8 +1055,28 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if isinstance(local_target, str) and not is_url and not (hash_spec and file_hash):
|
||||
p = Path(local_target)
|
||||
if not p.exists():
|
||||
log(f"File missing: {p}")
|
||||
return 1
|
||||
# Check if it's a hash and try to resolve locally
|
||||
resolved_local = False
|
||||
if looks_like_hash(local_target):
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
resolved_path = db.search_by_hash(local_target)
|
||||
if resolved_path and resolved_path.exists():
|
||||
p = resolved_path
|
||||
resolved_local = True
|
||||
# Also set file_hash since we know it
|
||||
file_hash = local_target
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not resolved_local:
|
||||
log(f"File missing: {p}")
|
||||
return 1
|
||||
|
||||
source_path = p
|
||||
try:
|
||||
source_size = p.stat().st_size
|
||||
@@ -1046,127 +1098,158 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except OSError:
|
||||
pass
|
||||
elif file_hash:
|
||||
# Try local resolution first if origin is local or just in case
|
||||
resolved_local = False
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
return 1
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return 1
|
||||
|
||||
# Fetch metadata and tags (needed for both -metadata flag and audio tagging)
|
||||
# Fetch tags
|
||||
try:
|
||||
tags_payload = client.fetch_file_metadata(hashes=[file_hash], include_service_keys_to_tags=True)
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
resolved_path = db.search_by_hash(file_hash)
|
||||
if resolved_path and resolved_path.exists():
|
||||
source_path = resolved_path
|
||||
resolved_local = True
|
||||
try:
|
||||
source_size = source_path.stat().st_size
|
||||
except OSError:
|
||||
source_size = None
|
||||
duration_sec = _ffprobe_duration_seconds(source_path)
|
||||
except Exception:
|
||||
tags_payload = {}
|
||||
|
||||
# Fetch URLs
|
||||
try:
|
||||
urls_payload = client.fetch_file_metadata(hashes=[file_hash], include_file_urls=True)
|
||||
except Exception:
|
||||
urls_payload = {}
|
||||
|
||||
# Extract title from metadata if base_name is still 'export'
|
||||
if base_name == 'export' and tags_payload:
|
||||
pass
|
||||
|
||||
if not resolved_local:
|
||||
try:
|
||||
file_metadata = tags_payload.get('file_metadata', [])
|
||||
if file_metadata and isinstance(file_metadata, list) and len(file_metadata) > 0:
|
||||
meta = file_metadata[0]
|
||||
if isinstance(meta, dict):
|
||||
tags_dict = meta.get('tags', {})
|
||||
if isinstance(tags_dict, dict):
|
||||
# Look for title in storage tags
|
||||
for service in tags_dict.values():
|
||||
if isinstance(service, dict):
|
||||
storage = service.get('storage_tags', {})
|
||||
if isinstance(storage, dict):
|
||||
for tag_list in storage.values():
|
||||
if isinstance(tag_list, list):
|
||||
for tag in tag_list:
|
||||
if isinstance(tag, str) and tag.lower().startswith('title:'):
|
||||
title_val = tag.split(':', 1)[1].strip()
|
||||
if title_val:
|
||||
base_name = _sanitize_name(title_val)
|
||||
break
|
||||
if base_name != 'export':
|
||||
break
|
||||
if base_name != 'export':
|
||||
break
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
return 1
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return 1
|
||||
|
||||
# Fetch metadata and tags (needed for both -metadata flag and audio tagging)
|
||||
# Fetch tags
|
||||
try:
|
||||
tags_payload = client.fetch_file_metadata(hashes=[file_hash], include_service_keys_to_tags=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Normal file export (happens regardless of -metadata flag)
|
||||
try:
|
||||
from helper.hydrus import hydrus_export as _hydrus_export
|
||||
except Exception:
|
||||
_hydrus_export = None # type: ignore
|
||||
if _hydrus_export is None:
|
||||
log("Hydrus export helper unavailable")
|
||||
return 1
|
||||
download_dir = out_override if (out_override and out_override.is_dir()) else default_dir
|
||||
try:
|
||||
download_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
# If mkdir fails, fall back to default_dir
|
||||
download_dir = default_dir
|
||||
|
||||
# Verify the directory is writable; if not, fall back to default
|
||||
try:
|
||||
test_file = download_dir / f".downlow_write_test_{_uuid.uuid4().hex[:8]}"
|
||||
test_file.touch()
|
||||
test_file.unlink()
|
||||
except (OSError, PermissionError):
|
||||
# Directory is not writable, use default_dir instead
|
||||
download_dir = default_dir
|
||||
tags_payload = {}
|
||||
|
||||
# Fetch URLs
|
||||
try:
|
||||
urls_payload = client.fetch_file_metadata(hashes=[file_hash], include_file_urls=True)
|
||||
except Exception:
|
||||
urls_payload = {}
|
||||
|
||||
# Extract title from metadata if base_name is still 'export'
|
||||
if base_name == 'export' and tags_payload:
|
||||
try:
|
||||
file_metadata = tags_payload.get('file_metadata', [])
|
||||
if file_metadata and isinstance(file_metadata, list) and len(file_metadata) > 0:
|
||||
meta = file_metadata[0]
|
||||
if isinstance(meta, dict):
|
||||
tags_dict = meta.get('tags', {})
|
||||
if isinstance(tags_dict, dict):
|
||||
# Look for title in storage tags
|
||||
for service in tags_dict.values():
|
||||
if isinstance(service, dict):
|
||||
storage = service.get('storage_tags', {})
|
||||
if isinstance(storage, dict):
|
||||
for tag_list in storage.values():
|
||||
if isinstance(tag_list, list):
|
||||
for tag in tag_list:
|
||||
if isinstance(tag, str) and tag.lower().startswith('title:'):
|
||||
title_val = tag.split(':', 1)[1].strip()
|
||||
if title_val:
|
||||
base_name = _sanitize_name(title_val)
|
||||
break
|
||||
if base_name != 'export':
|
||||
break
|
||||
if base_name != 'export':
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Normal file export (happens regardless of -metadata flag)
|
||||
try:
|
||||
from helper.hydrus import hydrus_export as _hydrus_export
|
||||
except Exception:
|
||||
_hydrus_export = None # type: ignore
|
||||
if _hydrus_export is None:
|
||||
log("Hydrus export helper unavailable")
|
||||
return 1
|
||||
download_dir = out_override if (out_override and out_override.is_dir()) else default_dir
|
||||
try:
|
||||
download_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
# If mkdir fails, fall back to default_dir
|
||||
download_dir = default_dir
|
||||
|
||||
# Verify the directory is writable; if not, fall back to default
|
||||
try:
|
||||
test_file = download_dir / f".downlow_write_test_{_uuid.uuid4().hex[:8]}"
|
||||
test_file.touch()
|
||||
test_file.unlink()
|
||||
except (OSError, PermissionError):
|
||||
# Directory is not writable, use default_dir instead
|
||||
download_dir = default_dir
|
||||
try:
|
||||
download_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
pass
|
||||
token = (_uuid.uuid4().hex[:8])
|
||||
provisional_stem = f"{base_name}.dlhx_{token}"
|
||||
provisional = download_dir / f"{provisional_stem}.bin"
|
||||
class _Args:
|
||||
pass
|
||||
token = (_uuid.uuid4().hex[:8])
|
||||
provisional_stem = f"{base_name}.dlhx_{token}"
|
||||
provisional = download_dir / f"{provisional_stem}.bin"
|
||||
class _Args:
|
||||
pass
|
||||
args_obj = _Args()
|
||||
setattr(args_obj, 'output', provisional)
|
||||
setattr(args_obj, 'format', 'copy')
|
||||
setattr(args_obj, 'tmp_dir', str(download_dir))
|
||||
setattr(args_obj, 'metadata_json', None)
|
||||
setattr(args_obj, 'hydrus_url', get_hydrus_url(config, "home") or "http://localhost:45869")
|
||||
setattr(args_obj, 'access_key', get_hydrus_access_key(config, "home") or "")
|
||||
setattr(args_obj, 'timeout', float(config.get('HydrusNetwork_Request_Timeout') or 60.0))
|
||||
try:
|
||||
file_url = client.file_url(file_hash)
|
||||
except Exception:
|
||||
file_url = None
|
||||
setattr(args_obj, 'file_url', file_url)
|
||||
setattr(args_obj, 'file_hash', file_hash)
|
||||
import io as _io, contextlib as _contextlib
|
||||
_buf = _io.StringIO()
|
||||
status = 1
|
||||
with _contextlib.redirect_stdout(_buf):
|
||||
status = _hydrus_export(args_obj, None)
|
||||
if status != 0:
|
||||
stderr_text = _buf.getvalue().strip()
|
||||
if stderr_text:
|
||||
log(stderr_text)
|
||||
return status
|
||||
json_text = _buf.getvalue().strip().splitlines()[-1] if _buf.getvalue() else ''
|
||||
final_from_json: Optional[Path] = None
|
||||
try:
|
||||
payload = json.loads(json_text) if json_text else None
|
||||
if isinstance(payload, dict):
|
||||
outp = payload.get('output')
|
||||
if isinstance(outp, str) and outp:
|
||||
final_from_json = Path(outp)
|
||||
except Exception:
|
||||
final_from_json = None
|
||||
if final_from_json and final_from_json.exists():
|
||||
source_path = final_from_json
|
||||
else:
|
||||
args_obj = _Args()
|
||||
setattr(args_obj, 'output', provisional)
|
||||
setattr(args_obj, 'format', 'copy')
|
||||
setattr(args_obj, 'tmp_dir', str(download_dir))
|
||||
setattr(args_obj, 'metadata_json', None)
|
||||
setattr(args_obj, 'hydrus_url', get_hydrus_url(config, "home") or "http://localhost:45869")
|
||||
setattr(args_obj, 'access_key', get_hydrus_access_key(config, "home") or "")
|
||||
setattr(args_obj, 'timeout', float(config.get('HydrusNetwork_Request_Timeout') or 60.0))
|
||||
try:
|
||||
file_url = client.file_url(file_hash)
|
||||
except Exception:
|
||||
file_url = None
|
||||
setattr(args_obj, 'file_url', file_url)
|
||||
setattr(args_obj, 'file_hash', file_hash)
|
||||
import io as _io, contextlib as _contextlib
|
||||
_buf = _io.StringIO()
|
||||
status = 1
|
||||
with _contextlib.redirect_stdout(_buf):
|
||||
status = _hydrus_export(args_obj, None)
|
||||
if status != 0:
|
||||
stderr_text = _buf.getvalue().strip()
|
||||
if stderr_text:
|
||||
log(stderr_text)
|
||||
return status
|
||||
json_text = _buf.getvalue().strip().splitlines()[-1] if _buf.getvalue() else ''
|
||||
final_from_json: Optional[Path] = None
|
||||
try:
|
||||
payload = json.loads(json_text) if json_text else None
|
||||
if isinstance(payload, dict):
|
||||
outp = payload.get('output')
|
||||
if isinstance(outp, str) and outp:
|
||||
final_from_json = Path(outp)
|
||||
except Exception:
|
||||
final_from_json = None
|
||||
if final_from_json and final_from_json.exists():
|
||||
source_path = final_from_json
|
||||
else:
|
||||
candidates = [p for p in provisional.parent.glob(provisional_stem + '*') if p.exists() and p.is_file()]
|
||||
non_provisional = [p for p in candidates if p.suffix.lower() not in {'.bin', '.hydrus'}]
|
||||
pick_from = non_provisional if non_provisional else candidates
|
||||
if pick_from:
|
||||
try:
|
||||
source_path = max(pick_from, key=lambda p: p.stat().st_mtime)
|
||||
except Exception:
|
||||
source_path = pick_from[0]
|
||||
else:
|
||||
source_path = provisional
|
||||
candidates = [p for p in provisional.parent.glob(provisional_stem + '*') if p.exists() and p.is_file()]
|
||||
non_provisional = [p for p in candidates if p.suffix.lower() not in {'.bin', '.hydrus'}]
|
||||
pick_from = non_provisional if non_provisional else candidates
|
||||
@@ -1177,16 +1260,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
source_path = pick_from[0]
|
||||
else:
|
||||
source_path = provisional
|
||||
candidates = [p for p in provisional.parent.glob(provisional_stem + '*') if p.exists() and p.is_file()]
|
||||
non_provisional = [p for p in candidates if p.suffix.lower() not in {'.bin', '.hydrus'}]
|
||||
pick_from = non_provisional if non_provisional else candidates
|
||||
if pick_from:
|
||||
try:
|
||||
source_path = max(pick_from, key=lambda p: p.stat().st_mtime)
|
||||
except Exception:
|
||||
source_path = pick_from[0]
|
||||
else:
|
||||
source_path = provisional
|
||||
try:
|
||||
source_size = source_size or (source_path.stat().st_size if source_path.exists() else None)
|
||||
except OSError:
|
||||
@@ -1479,6 +1552,77 @@ def _unique_path(p: Path) -> Path:
|
||||
return p
|
||||
|
||||
|
||||
def _handle_ytdlp_download(url: str, title: str, config: Dict[str, Any], args: Sequence[str]) -> int:
|
||||
"""Handle download/streaming of URL using yt-dlp."""
|
||||
if not url:
|
||||
log("Error: No URL provided", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Check for -storage local
|
||||
args_list = list(map(str, args))
|
||||
storage_mode = None
|
||||
if '-storage' in args_list:
|
||||
try:
|
||||
idx = args_list.index('-storage')
|
||||
if idx + 1 < len(args_list):
|
||||
storage_mode = args_list[idx + 1].lower()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
force_local = (storage_mode == 'local')
|
||||
|
||||
if not force_local:
|
||||
# Default: Stream to MPV
|
||||
if _play_in_mpv(url, title, is_stream=True):
|
||||
from . import pipe
|
||||
pipe._run(None, [], config)
|
||||
return 0
|
||||
else:
|
||||
# Fallback to browser
|
||||
try:
|
||||
import webbrowser
|
||||
webbrowser.open(url)
|
||||
debug(f"[get-file] Opened in browser: {title}", file=sys.stderr)
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
return 1
|
||||
|
||||
# Download mode
|
||||
try:
|
||||
import yt_dlp
|
||||
except ImportError:
|
||||
log("Error: yt-dlp not installed. Please install it to download.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
log(f"Downloading {title}...", file=sys.stderr)
|
||||
|
||||
# Determine output directory
|
||||
download_dir = resolve_output_dir(config)
|
||||
try:
|
||||
download_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Configure yt-dlp
|
||||
ydl_opts = {
|
||||
'outtmpl': str(download_dir / '%(title)s.%(ext)s'),
|
||||
'quiet': False,
|
||||
'no_warnings': True,
|
||||
# Use best audio/video
|
||||
'format': 'best',
|
||||
}
|
||||
|
||||
try:
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([url])
|
||||
log(f"Downloaded to: {download_dir}", file=sys.stderr)
|
||||
return 0
|
||||
except Exception as e:
|
||||
log(f"Error downloading: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="get-file",
|
||||
summary="Export files: from Hydrus database OR from AllDebrid magnets via pipe. Auto-detects source and handles accordingly.",
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from helper.logger import log
|
||||
|
||||
@@ -11,16 +12,19 @@ import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, fmt_bytes
|
||||
from helper.local_library import LocalLibraryDB
|
||||
from config import get_local_storage_path
|
||||
from result_table import ResultTable
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="get-relationship",
|
||||
summary="Print Hydrus relationships for the selected file.",
|
||||
summary="Print relationships for the selected file (Hydrus or Local).",
|
||||
usage="get-relationship [-hash <sha256>]",
|
||||
args=[
|
||||
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
|
||||
],
|
||||
details=[
|
||||
"- Lists relationship data as returned by Hydrus.",
|
||||
"- Lists relationship data as returned by Hydrus or Local DB.",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -50,190 +54,187 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
|
||||
# Initialize results collection
|
||||
found_relationships = [] # List of dicts: {hash, type, title, path, origin}
|
||||
source_title = "Unknown"
|
||||
|
||||
# Check for local file first
|
||||
file_path = None
|
||||
if isinstance(result, dict):
|
||||
file_path = result.get("file_path") or result.get("path")
|
||||
source_title = result.get("title") or result.get("name") or "Unknown"
|
||||
elif hasattr(result, "file_path"):
|
||||
file_path = result.file_path
|
||||
source_title = getattr(result, "title", "Unknown")
|
||||
|
||||
local_db_checked = False
|
||||
|
||||
if file_path and not override_hash:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
if not source_title or source_title == "Unknown":
|
||||
source_title = path_obj.name
|
||||
|
||||
if path_obj.exists():
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
metadata = db.get_metadata(path_obj)
|
||||
if metadata and metadata.get("relationships"):
|
||||
local_db_checked = True
|
||||
rels = metadata["relationships"]
|
||||
if isinstance(rels, dict):
|
||||
for rel_type, hashes in rels.items():
|
||||
if hashes:
|
||||
for h in hashes:
|
||||
# Try to resolve hash to filename if possible
|
||||
resolved_path = db.search_by_hash(h)
|
||||
title = h
|
||||
path = None
|
||||
if resolved_path:
|
||||
path = str(resolved_path)
|
||||
# Try to get title from tags
|
||||
try:
|
||||
tags = db.get_tags(resolved_path)
|
||||
found_title = False
|
||||
for t in tags:
|
||||
if t.lower().startswith('title:'):
|
||||
title = t[6:].strip()
|
||||
found_title = True
|
||||
break
|
||||
if not found_title:
|
||||
title = resolved_path.stem
|
||||
except Exception:
|
||||
title = resolved_path.stem
|
||||
|
||||
found_relationships.append({
|
||||
"hash": h,
|
||||
"type": rel_type,
|
||||
"title": title,
|
||||
"path": path,
|
||||
"origin": "local"
|
||||
})
|
||||
except Exception as e:
|
||||
log(f"Error checking local relationships: {e}", file=sys.stderr)
|
||||
|
||||
# If we found local relationships, we can stop or merge with Hydrus?
|
||||
# For now, if we found local ones, let's show them.
|
||||
# But if the file is also in Hydrus, we might want those too.
|
||||
# Let's try Hydrus if we have a hash.
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||||
if not hash_hex:
|
||||
log("Selected result does not include a Hydrus hash", file=sys.stderr)
|
||||
return 1
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable", file=sys.stderr)
|
||||
return 1
|
||||
try:
|
||||
rel = client.get_file_relationships(hash_hex)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus relationships fetch failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
if not rel:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
# Extract file_relationships from response
|
||||
file_rels = rel.get("file_relationships", {})
|
||||
if not file_rels:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
# Get the relationships dict for this specific hash
|
||||
this_file_rels = file_rels.get(hash_hex)
|
||||
if not this_file_rels:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
# Extract related hashes from all relationship types
|
||||
# Keys "0", "1", "3", "8" are relationship type IDs
|
||||
# Values are lists of hashes
|
||||
related_hashes = []
|
||||
for rel_type_id, hash_list in this_file_rels.items():
|
||||
# Skip non-numeric keys and metadata keys
|
||||
if rel_type_id in {"is_king", "king", "king_is_on_file_domain", "king_is_local"}:
|
||||
continue
|
||||
if isinstance(hash_list, list):
|
||||
for rel_hash in hash_list:
|
||||
if isinstance(rel_hash, str) and rel_hash and rel_hash != hash_hex:
|
||||
related_hashes.append(rel_hash)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
unique_hashes = []
|
||||
for h in related_hashes:
|
||||
if h not in seen:
|
||||
seen.add(h)
|
||||
unique_hashes.append(h)
|
||||
|
||||
if not unique_hashes:
|
||||
log("No related files found.")
|
||||
return 0
|
||||
|
||||
# Fetch metadata for all related files
|
||||
try:
|
||||
metadata_payload = client.fetch_file_metadata(
|
||||
hashes=unique_hashes,
|
||||
include_service_keys_to_tags=True,
|
||||
include_duration=True,
|
||||
include_size=True,
|
||||
include_mime=True,
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus metadata fetch failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
metadata_list = metadata_payload.get("metadata") if isinstance(metadata_payload, dict) else None
|
||||
if not isinstance(metadata_list, list):
|
||||
log("Hydrus metadata response was not a list", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Build metadata map by hash
|
||||
meta_by_hash: Dict[str, Dict[str, Any]] = {}
|
||||
for item in metadata_list:
|
||||
if isinstance(item, dict):
|
||||
item_hash = normalize_hash(item.get("hash"))
|
||||
if item_hash:
|
||||
meta_by_hash[item_hash] = item
|
||||
|
||||
# Helper functions for formatting
|
||||
def _format_duration(seconds: Optional[float]) -> str:
|
||||
if seconds is None:
|
||||
return ""
|
||||
# Try to get hash from dict
|
||||
if isinstance(result, dict):
|
||||
hash_hex = normalize_hash(result.get("hash") or result.get("file_hash"))
|
||||
|
||||
if hash_hex and not local_db_checked:
|
||||
try:
|
||||
s = int(seconds)
|
||||
hours = s // 3600
|
||||
minutes = (s % 3600) // 60
|
||||
secs = s % 60
|
||||
if hours > 0:
|
||||
return f"{hours}:{minutes:02d}:{secs:02d}"
|
||||
else:
|
||||
return f"{minutes}:{secs:02d}"
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
def _get_title(meta: Dict[str, Any]) -> str:
|
||||
# Try to extract title from tags
|
||||
tags_payload = meta.get("tags")
|
||||
if isinstance(tags_payload, dict):
|
||||
for service_data in tags_payload.values():
|
||||
if isinstance(service_data, dict):
|
||||
storage_tags = service_data.get("storage_tags")
|
||||
if isinstance(storage_tags, dict):
|
||||
for tag_list in storage_tags.values():
|
||||
if isinstance(tag_list, list):
|
||||
for tag in tag_list:
|
||||
tag_str = str(tag).lower()
|
||||
if tag_str.startswith("title:"):
|
||||
return str(tag)[6:].strip()
|
||||
# Fallback to hash prefix
|
||||
h = meta.get("hash")
|
||||
return str(h)[:12] if h else "unknown"
|
||||
|
||||
def _get_mime_type(meta: Dict[str, Any]) -> str:
|
||||
mime = meta.get("mime", "")
|
||||
if not mime:
|
||||
return ""
|
||||
# Extract type from mime (e.g., "video/mp4" -> "video")
|
||||
parts = str(mime).split("/")
|
||||
return parts[0] if parts else ""
|
||||
|
||||
# Print header and separator
|
||||
log("# | Title | Type | Duration | Size")
|
||||
log("--+---------------------------+-------+----------+--------")
|
||||
|
||||
# Create result objects for each related file
|
||||
results: List[Any] = []
|
||||
|
||||
# Print each related file
|
||||
for idx, rel_hash in enumerate(unique_hashes, start=1):
|
||||
meta = meta_by_hash.get(rel_hash)
|
||||
if not meta:
|
||||
continue
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
if client:
|
||||
rel = client.get_file_relationships(hash_hex)
|
||||
if rel:
|
||||
file_rels = rel.get("file_relationships", {})
|
||||
this_file_rels = file_rels.get(hash_hex)
|
||||
|
||||
if this_file_rels:
|
||||
# Map Hydrus relationship IDs to names
|
||||
# 0: potential duplicates, 1: false positives, 2: false positives (alternates),
|
||||
# 3: duplicates, 4: alternatives, 8: king
|
||||
# This mapping is approximate based on Hydrus API docs/behavior
|
||||
rel_map = {
|
||||
"0": "potential duplicate",
|
||||
"1": "false positive",
|
||||
"2": "false positive",
|
||||
"3": "duplicate",
|
||||
"4": "alternative",
|
||||
"8": "king"
|
||||
}
|
||||
|
||||
for rel_type_id, hash_list in this_file_rels.items():
|
||||
# Skip metadata keys
|
||||
if rel_type_id in {"is_king", "king", "king_is_on_file_domain", "king_is_local"}:
|
||||
continue
|
||||
|
||||
rel_name = rel_map.get(str(rel_type_id), f"type-{rel_type_id}")
|
||||
|
||||
if isinstance(hash_list, list):
|
||||
for rel_hash in hash_list:
|
||||
if isinstance(rel_hash, str) and rel_hash and rel_hash != hash_hex:
|
||||
# Check if we already have this hash from local DB
|
||||
if not any(r['hash'] == rel_hash for r in found_relationships):
|
||||
found_relationships.append({
|
||||
"hash": rel_hash,
|
||||
"type": rel_name,
|
||||
"title": rel_hash, # Can't resolve title easily without another API call
|
||||
"path": None,
|
||||
"origin": "hydrus"
|
||||
})
|
||||
except Exception as exc:
|
||||
# Only log error if we didn't find local relationships either
|
||||
if not found_relationships:
|
||||
log(f"Hydrus relationships fetch failed: {exc}", file=sys.stderr)
|
||||
|
||||
if not found_relationships:
|
||||
log("No relationships found.")
|
||||
return 0
|
||||
|
||||
title = _get_title(meta)
|
||||
mime_type = _get_mime_type(meta)
|
||||
|
||||
# Get duration
|
||||
duration_value = meta.get("duration")
|
||||
if duration_value is None and isinstance(meta.get("metadata"), dict):
|
||||
duration_value = meta["metadata"].get("duration")
|
||||
duration_str = _format_duration(duration_value)
|
||||
|
||||
# Get size
|
||||
size = meta.get("size") or meta.get("file_size")
|
||||
size_str = fmt_bytes(size) if size else ""
|
||||
|
||||
# Format and print row
|
||||
title_display = title[:25].ljust(25)
|
||||
type_display = mime_type[:5].ljust(5)
|
||||
duration_display = duration_str[:8].ljust(8)
|
||||
size_display = size_str[:7].ljust(7)
|
||||
|
||||
log(f"{idx:2d} | {title_display} | {type_display} | {duration_display} | {size_display}")
|
||||
# Display results
|
||||
table = ResultTable(f"Relationships: {source_title}")
|
||||
|
||||
# Sort by type then title
|
||||
# Custom sort order: King first, then Derivative, then others
|
||||
def type_sort_key(item):
|
||||
t = item['type'].lower()
|
||||
if t == 'king':
|
||||
return 0
|
||||
elif t == 'derivative':
|
||||
return 1
|
||||
elif t == 'alternative':
|
||||
return 2
|
||||
elif t == 'duplicate':
|
||||
return 3
|
||||
else:
|
||||
return 4
|
||||
|
||||
found_relationships.sort(key=lambda x: (type_sort_key(x), x['title']))
|
||||
|
||||
pipeline_results = []
|
||||
|
||||
for i, item in enumerate(found_relationships):
|
||||
row = table.add_row()
|
||||
row.add_column("Type", item['type'].title())
|
||||
row.add_column("Title", item['title'])
|
||||
# row.add_column("Hash", item['hash'][:16] + "...") # User requested removal
|
||||
row.add_column("Origin", item['origin'])
|
||||
|
||||
# Create result object for pipeline
|
||||
result_obj = type("RelatedFile", (), {
|
||||
"hash_hex": rel_hash,
|
||||
"title": title,
|
||||
"media_kind": mime_type or "other",
|
||||
"size": size,
|
||||
"duration": duration_value,
|
||||
"known_urls": [],
|
||||
"annotations": [],
|
||||
"columns": [
|
||||
("Title", title),
|
||||
("Type", mime_type),
|
||||
("Duration", duration_str),
|
||||
("Size", size_str),
|
||||
],
|
||||
})()
|
||||
results.append(result_obj)
|
||||
|
||||
# Emit results to pipeline
|
||||
try:
|
||||
ctx._PIPE_EMITS.extend(results)
|
||||
except Exception:
|
||||
pass
|
||||
res_obj = {
|
||||
"title": item['title'],
|
||||
"hash": item['hash'],
|
||||
"file_hash": item['hash'],
|
||||
"relationship_type": item['type'],
|
||||
"origin": item['origin']
|
||||
}
|
||||
if item['path']:
|
||||
res_obj["path"] = item['path']
|
||||
res_obj["file_path"] = item['path']
|
||||
res_obj["target"] = item['path']
|
||||
else:
|
||||
# If Hydrus, target is hash
|
||||
res_obj["target"] = item['hash']
|
||||
|
||||
pipeline_results.append(res_obj)
|
||||
|
||||
# Set selection args
|
||||
# If it has a path, we can use it directly. If hash, maybe get-file -hash?
|
||||
if item['path']:
|
||||
table.set_row_selection_args(i, [item['path']])
|
||||
else:
|
||||
table.set_row_selection_args(i, ["-hash", item['hash']])
|
||||
|
||||
ctx.set_last_result_table(table, pipeline_results)
|
||||
print(table)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from . import register
|
||||
import models
|
||||
@@ -9,16 +11,18 @@ import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash
|
||||
from helper.logger import log
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="get-url",
|
||||
summary="List URLs associated with a Hydrus file.",
|
||||
summary="List URLs associated with a file (Hydrus or Local).",
|
||||
usage="get-url [-hash <sha256>]",
|
||||
args=[
|
||||
CmdletArg("-hash", description="Override the Hydrus file hash (SHA256) to target instead of the selected result."),
|
||||
],
|
||||
details=[
|
||||
"- Prints the known URLs for the selected Hydrus file.",
|
||||
"- Prints the known URLs for the selected file.",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -55,36 +59,81 @@ def get_urls(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
override_hash, _ = _parse_hash_and_rest(args)
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(result, "hash_hex", None))
|
||||
if not hash_hex:
|
||||
log("Selected result does not include a Hydrus hash")
|
||||
return 1
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
return 1
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return 1
|
||||
try:
|
||||
payload = client.fetch_file_metadata(hashes=[hash_hex], include_file_urls=True)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus metadata fetch failed: {exc}")
|
||||
return 1
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
meta = items[0] if (isinstance(items, list) and items and isinstance(items[0], dict)) else None
|
||||
urls = (meta.get("known_urls") if isinstance(meta, dict) else None) or []
|
||||
if urls:
|
||||
ctx.emit("URLs:")
|
||||
for u in urls:
|
||||
override_hash, _ = _parse_hash_and_rest(args)
|
||||
|
||||
# Handle @N selection which creates a list - extract the first item
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
|
||||
found_urls = []
|
||||
|
||||
# 1. Try Local Library
|
||||
file_path = get_field(result, "file_path") or get_field(result, "path")
|
||||
if file_path and not override_hash:
|
||||
try:
|
||||
path_obj = Path(file_path)
|
||||
if path_obj.exists():
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
metadata = db.get_metadata(path_obj)
|
||||
if metadata and metadata.get("known_urls"):
|
||||
found_urls.extend(metadata["known_urls"])
|
||||
except Exception as e:
|
||||
log(f"Error checking local library: {e}", file=sys.stderr)
|
||||
|
||||
# 2. Try Hydrus
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(get_field(result, "hash_hex", None))
|
||||
|
||||
# If we haven't found URLs yet, or if we want to merge them (maybe?), let's check Hydrus if we have a hash
|
||||
# But usually if it's local, we might not want to check Hydrus unless requested.
|
||||
# However, the user said "they can just work together".
|
||||
|
||||
if hash_hex:
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
if client:
|
||||
payload = client.fetch_file_metadata(hashes=[hash_hex], include_file_urls=True)
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
meta = items[0] if (isinstance(items, list) and items and isinstance(items[0], dict)) else None
|
||||
hydrus_urls = (meta.get("known_urls") if isinstance(meta, dict) else None) or []
|
||||
for u in hydrus_urls:
|
||||
if u not in found_urls:
|
||||
found_urls.append(u)
|
||||
except Exception as exc:
|
||||
# Only log error if we didn't find local URLs either, or if it's a specific error
|
||||
if not found_urls:
|
||||
log(f"Hydrus lookup failed: {exc}", file=sys.stderr)
|
||||
|
||||
if found_urls:
|
||||
for u in found_urls:
|
||||
text = str(u).strip()
|
||||
if text:
|
||||
ctx.emit(f"- {text}")
|
||||
else:
|
||||
ctx.emit("No URLs found.")
|
||||
# Emit a rich object that looks like a string but carries context
|
||||
# We use a dict with 'title' which ResultTable uses for display
|
||||
# and 'url' which is the actual data
|
||||
# We also include the source file info so downstream cmdlets can use it
|
||||
|
||||
# Create a result object that mimics the structure expected by delete-url
|
||||
# delete-url expects a file object usually, but here we are emitting URLs.
|
||||
# If we emit a dict with 'url' and 'source_file', delete-url can use it.
|
||||
|
||||
rich_result = {
|
||||
"title": text, # Display as just the URL
|
||||
"url": text,
|
||||
"source_file": result, # Pass the original file context
|
||||
"file_path": get_field(result, "file_path") or get_field(result, "path"),
|
||||
"hash_hex": hash_hex
|
||||
}
|
||||
ctx.emit(rich_result)
|
||||
return 0
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Selected result does not include a file path or Hydrus hash", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
ctx.emit("No URLs found.")
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -331,6 +331,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
self.media_kind = media_kind
|
||||
self.tags = tags or []
|
||||
self.known_urls = known_urls or []
|
||||
self.origin = "local" # Ensure origin is set for add-file
|
||||
PipelineItem = SimpleItem
|
||||
|
||||
merged_item = PipelineItem(
|
||||
@@ -340,6 +341,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tags=merged_tags, # Include merged tags
|
||||
known_urls=source_urls # Include known URLs
|
||||
)
|
||||
# Clear previous results to ensure only the merged file is passed down
|
||||
ctx.clear_last_result()
|
||||
ctx.emit(merged_item)
|
||||
except Exception as e:
|
||||
log(f"Warning: Could not emit pipeline item: {e}", file=sys.stderr)
|
||||
@@ -347,7 +350,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
ctx.emit(f"Merged: {output_path}")
|
||||
|
||||
# Delete source files if requested
|
||||
if delete_after:
|
||||
# Always delete source files if they were downloaded playlist items (temp files)
|
||||
# We can detect this if they are in the temp download directory or if we tracked them
|
||||
if delete_after or True: # Force delete for now as merge consumes them
|
||||
# First delete all .tags files
|
||||
for tags_file in source_tags_files:
|
||||
try:
|
||||
|
||||
@@ -157,7 +157,7 @@ def _ensure_storage_columns(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
size_bytes = int(size_val)
|
||||
size_mb = size_bytes / (1024 * 1024)
|
||||
size_str = f"{size_mb:.1f} MB"
|
||||
size_str = f"{int(size_mb)} MB"
|
||||
except (ValueError, TypeError):
|
||||
size_str = str(size_val)
|
||||
|
||||
@@ -250,6 +250,34 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
else:
|
||||
i += 1
|
||||
|
||||
# Handle piped input (e.g. from @N selection) if query is empty
|
||||
if not query and result:
|
||||
# If result is a list, take the first item
|
||||
actual_result = result[0] if isinstance(result, list) and result else result
|
||||
|
||||
# Helper to get field
|
||||
def get_field(obj: Any, field: str) -> Any:
|
||||
return getattr(obj, field, None) or (obj.get(field) if isinstance(obj, dict) else None)
|
||||
|
||||
origin = get_field(actual_result, 'origin')
|
||||
target = get_field(actual_result, 'target')
|
||||
|
||||
# Special handling for Bandcamp artist/album drill-down
|
||||
if origin == 'bandcamp' and target:
|
||||
query = target
|
||||
if not provider_name:
|
||||
provider_name = 'bandcamp'
|
||||
|
||||
# Generic URL handling
|
||||
elif target and str(target).startswith(('http://', 'https://')):
|
||||
query = target
|
||||
# Try to infer provider from URL if not set
|
||||
if not provider_name:
|
||||
if 'bandcamp.com' in target:
|
||||
provider_name = 'bandcamp'
|
||||
elif 'youtube.com' in target or 'youtu.be' in target:
|
||||
provider_name = 'youtube'
|
||||
|
||||
if not query:
|
||||
log("Provide a search query", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
295
cmdlets/trim_file.py
Normal file
295
cmdlets/trim_file.py
Normal file
@@ -0,0 +1,295 @@
|
||||
"""Trim a media file using ffmpeg."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Sequence, List, Optional
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import json
|
||||
import subprocess
|
||||
import shutil
|
||||
import re
|
||||
|
||||
from helper.logger import log, debug
|
||||
from helper.utils import sha256_file
|
||||
from . import register
|
||||
from ._shared import (
|
||||
Cmdlet,
|
||||
CmdletArg,
|
||||
parse_cmdlet_args,
|
||||
normalize_result_input,
|
||||
extract_tags_from_result,
|
||||
extract_title_from_result
|
||||
)
|
||||
import pipeline as ctx
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="trim-file",
|
||||
summary="Trim a media file using ffmpeg.",
|
||||
usage="trim-file [-path <path>] -range <start-end> [-delete]",
|
||||
args=[
|
||||
CmdletArg("-path", description="Path to the file (optional if piped)."),
|
||||
CmdletArg("-range", required=True, description="Time range to trim (e.g. '3:45-3:55' or '00:03:45-00:03:55')."),
|
||||
CmdletArg("-delete", type="flag", description="Delete the original file after trimming."),
|
||||
],
|
||||
details=[
|
||||
"Creates a new file with 'clip_' prefix in the filename/title.",
|
||||
"Inherits tags from the source file.",
|
||||
"Adds a relationship to the source file (if hash is available).",
|
||||
"Output can be piped to add-file.",
|
||||
]
|
||||
)
|
||||
|
||||
def _parse_time(time_str: str) -> float:
|
||||
"""Convert time string (HH:MM:SS or MM:SS or SS) to seconds."""
|
||||
parts = time_str.strip().split(':')
|
||||
if len(parts) == 3:
|
||||
return float(parts[0]) * 3600 + float(parts[1]) * 60 + float(parts[2])
|
||||
elif len(parts) == 2:
|
||||
return float(parts[0]) * 60 + float(parts[1])
|
||||
elif len(parts) == 1:
|
||||
return float(parts[0])
|
||||
else:
|
||||
raise ValueError(f"Invalid time format: {time_str}")
|
||||
|
||||
def _trim_media(input_path: Path, output_path: Path, start_time: str, end_time: str) -> bool:
|
||||
"""Trim media file using ffmpeg."""
|
||||
ffmpeg_path = shutil.which('ffmpeg')
|
||||
if not ffmpeg_path:
|
||||
log("ffmpeg not found in PATH", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Calculate duration to avoid seeking issues if possible, or just use -to
|
||||
# Using -ss before -i is faster (input seeking) but might be less accurate.
|
||||
# Using -ss after -i is slower (output seeking) but accurate.
|
||||
# For trimming, accuracy is usually preferred, but for long files input seeking is better.
|
||||
# We'll use input seeking (-ss before -i) and -to.
|
||||
|
||||
cmd = [
|
||||
ffmpeg_path, '-y',
|
||||
'-ss', start_time,
|
||||
'-i', str(input_path),
|
||||
'-to', end_time,
|
||||
'-c', 'copy', # Stream copy for speed and quality preservation
|
||||
'-map_metadata', '0', # Copy metadata
|
||||
str(output_path)
|
||||
]
|
||||
|
||||
# If stream copy fails (e.g. cutting not on keyframe), we might need re-encoding.
|
||||
# But let's try copy first as it's standard for "trimming" without quality loss.
|
||||
# Note: -to with input seeking (-ss before -i) resets timestamp, so -to refers to duration?
|
||||
# No, -to refers to position in output if used after -ss?
|
||||
# Actually, if -ss is before -i, the timestamps are reset to 0.
|
||||
# So -to should be (end - start).
|
||||
# Alternatively, use -t (duration).
|
||||
|
||||
try:
|
||||
s = _parse_time(start_time)
|
||||
e = _parse_time(end_time)
|
||||
duration = e - s
|
||||
if duration <= 0:
|
||||
log(f"Invalid range: start {start_time} >= end {end_time}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
cmd = [
|
||||
ffmpeg_path, '-y',
|
||||
'-ss', start_time,
|
||||
'-i', str(input_path),
|
||||
'-t', str(duration),
|
||||
'-c', 'copy',
|
||||
'-map_metadata', '0',
|
||||
str(output_path)
|
||||
]
|
||||
|
||||
debug(f"Running ffmpeg: {' '.join(cmd)}")
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
log(f"ffmpeg error: {result.stderr}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"Error parsing time or running ffmpeg: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
@register(["trim-file"])
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Trim a media file."""
|
||||
# Parse arguments
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
range_arg = parsed.get("range")
|
||||
if not range_arg or '-' not in range_arg:
|
||||
log("Error: -range argument required (format: start-end)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
start_str, end_str = range_arg.split('-', 1)
|
||||
|
||||
delete_original = parsed.get("delete", False)
|
||||
path_arg = parsed.get("path")
|
||||
|
||||
# Collect inputs
|
||||
inputs = normalize_result_input(result)
|
||||
|
||||
# If path arg provided, add it to inputs
|
||||
if path_arg:
|
||||
inputs.append({"file_path": path_arg})
|
||||
|
||||
if not inputs:
|
||||
log("No input files provided.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
success_count = 0
|
||||
|
||||
for item in inputs:
|
||||
# Resolve file path
|
||||
file_path = None
|
||||
if isinstance(item, dict):
|
||||
file_path = item.get("file_path") or item.get("path") or item.get("target")
|
||||
elif hasattr(item, "file_path"):
|
||||
file_path = item.file_path
|
||||
elif isinstance(item, str):
|
||||
file_path = item
|
||||
|
||||
if not file_path:
|
||||
continue
|
||||
|
||||
path_obj = Path(file_path)
|
||||
if not path_obj.exists():
|
||||
log(f"File not found: {file_path}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Determine output path
|
||||
# Prepend clip_ to filename
|
||||
new_filename = f"clip_{path_obj.name}"
|
||||
output_path = path_obj.parent / new_filename
|
||||
|
||||
# Trim
|
||||
log(f"Trimming {path_obj.name} ({start_str} to {end_str})...", file=sys.stderr)
|
||||
if _trim_media(path_obj, output_path, start_str, end_str):
|
||||
log(f"Created clip: {output_path}", file=sys.stderr)
|
||||
success_count += 1
|
||||
|
||||
# Prepare result for pipeline
|
||||
|
||||
# 1. Get source hash for relationship
|
||||
source_hash = None
|
||||
if isinstance(item, dict):
|
||||
source_hash = item.get("hash") or item.get("file_hash")
|
||||
elif hasattr(item, "file_hash"):
|
||||
source_hash = item.file_hash
|
||||
|
||||
if not source_hash:
|
||||
try:
|
||||
source_hash = sha256_file(path_obj)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2. Get tags
|
||||
tags = extract_tags_from_result(item)
|
||||
|
||||
# 3. Get title and modify it
|
||||
title = extract_title_from_result(item)
|
||||
if not title:
|
||||
title = path_obj.stem
|
||||
|
||||
new_title = f"clip_{title}"
|
||||
|
||||
# Update title tag if present
|
||||
new_tags = []
|
||||
has_title_tag = False
|
||||
for t in tags:
|
||||
if t.lower().startswith("title:"):
|
||||
new_tags.append(f"title:{new_title}")
|
||||
has_title_tag = True
|
||||
else:
|
||||
new_tags.append(t)
|
||||
|
||||
if not has_title_tag:
|
||||
new_tags.append(f"title:{new_title}")
|
||||
|
||||
# 4. Calculate clip hash and update original file's relationships
|
||||
clip_hash = None
|
||||
try:
|
||||
clip_hash = sha256_file(output_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if source_hash and clip_hash:
|
||||
# Update original file in local DB if possible
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
from helper.local_library import LocalLibraryDB
|
||||
|
||||
storage_path = get_local_storage_path(config)
|
||||
if storage_path:
|
||||
with LocalLibraryDB(storage_path) as db:
|
||||
# Get original file metadata
|
||||
# We need to find the original file by hash or path
|
||||
# Try path first
|
||||
orig_meta = db.get_metadata(path_obj)
|
||||
if not orig_meta and source_hash:
|
||||
# Try by hash
|
||||
orig_path_resolved = db.search_by_hash(source_hash)
|
||||
if orig_path_resolved:
|
||||
orig_meta = db.get_metadata(orig_path_resolved)
|
||||
|
||||
if orig_meta:
|
||||
# Update relationships
|
||||
rels = orig_meta.get("relationships", {})
|
||||
if not isinstance(rels, dict):
|
||||
rels = {}
|
||||
|
||||
# Add clip as "derivative" (since original is the source)
|
||||
if "derivative" not in rels:
|
||||
rels["derivative"] = []
|
||||
|
||||
if clip_hash not in rels["derivative"]:
|
||||
rels["derivative"].append(clip_hash)
|
||||
|
||||
# Save back to DB
|
||||
# We need to preserve other metadata
|
||||
orig_meta["relationships"] = rels
|
||||
|
||||
# Ensure hash is set in metadata if we have it
|
||||
if source_hash and not orig_meta.get("hash"):
|
||||
orig_meta["hash"] = source_hash
|
||||
|
||||
# We need the path to save
|
||||
save_path = Path(orig_meta.get("file_path") or path_obj)
|
||||
db.save_metadata(save_path, orig_meta)
|
||||
log(f"Updated relationship for original file: {save_path.name}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
log(f"Failed to update original file relationships: {e}", file=sys.stderr)
|
||||
|
||||
# 5. Construct result
|
||||
result_dict = {
|
||||
"file_path": str(output_path),
|
||||
"path": str(output_path),
|
||||
"title": new_title,
|
||||
"tags": new_tags,
|
||||
"media_kind": "video", # Assumption, or derive
|
||||
"hash": clip_hash, # Pass calculated hash
|
||||
"relationships": {
|
||||
# The source is the KING of this clip
|
||||
"king": [source_hash] if source_hash else []
|
||||
}
|
||||
}
|
||||
|
||||
# Emit result
|
||||
ctx.emit(result_dict)
|
||||
|
||||
# Delete original if requested
|
||||
if delete_original:
|
||||
try:
|
||||
path_obj.unlink()
|
||||
log(f"Deleted original file: {path_obj}", file=sys.stderr)
|
||||
# Also try to delete sidecars?
|
||||
# Maybe leave that to user or cleanup cmdlet
|
||||
except Exception as e:
|
||||
log(f"Failed to delete original: {e}", file=sys.stderr)
|
||||
|
||||
else:
|
||||
log(f"Failed to trim {path_obj.name}", file=sys.stderr)
|
||||
|
||||
return 0 if success_count > 0 else 1
|
||||
Reference in New Issue
Block a user