jjlj
This commit is contained in:
@@ -119,7 +119,9 @@ for filename in os.listdir(cmdlet_dir):
|
||||
for alias in cmdlet_obj.aliases:
|
||||
normalized_alias = alias.replace('_', '-').lower()
|
||||
REGISTRY[normalized_alias] = run_fn
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
import sys
|
||||
print(f"Error importing cmdlet '{mod_name}': {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Import root-level modules that also register cmdlets
|
||||
|
||||
@@ -371,7 +371,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Extract tags/known URLs from pipeline objects if available
|
||||
pipe_object_tags = extract_tags_from_result(result)
|
||||
if pipe_object_tags:
|
||||
log(f"Extracted {len(pipe_object_tags)} tag(s) from pipeline result: {', '.join(pipe_object_tags[:5])}", file=sys.stderr)
|
||||
debug(f"Extracted {len(pipe_object_tags)} tag(s) from pipeline result: {', '.join(pipe_object_tags[:5])}", file=sys.stderr)
|
||||
pipe_known_urls = extract_known_urls_from_result(result)
|
||||
|
||||
# Resolve media path: get from piped result
|
||||
@@ -574,11 +574,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
file_provider = get_file_provider(provider_name, config)
|
||||
if file_provider is None:
|
||||
log(f"❌ File provider '{provider_name}' not available", file=sys.stderr)
|
||||
log(f"File provider '{provider_name}' not available", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
hoster_url = file_provider.upload(media_path)
|
||||
log(f"✅ File uploaded to {provider_name}: {hoster_url}", file=sys.stderr)
|
||||
log(f"File uploaded to {provider_name}: {hoster_url}", file=sys.stderr)
|
||||
|
||||
# Associate the URL with the file in Hydrus if possible
|
||||
current_hash = locals().get('file_hash')
|
||||
@@ -590,12 +590,12 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
if client:
|
||||
client.associate_url(current_hash, hoster_url)
|
||||
log(f"✅ Associated URL with file hash {current_hash}", file=sys.stderr)
|
||||
debug(f"Associated URL with file hash {current_hash}", file=sys.stderr)
|
||||
except Exception as exc:
|
||||
log(f"⚠️ Could not associate URL with Hydrus file: {exc}", file=sys.stderr)
|
||||
log(f"Could not associate URL with Hydrus file: {exc}", file=sys.stderr)
|
||||
|
||||
except Exception as exc:
|
||||
log(f"❌ {provider_name} upload failed: {exc}", file=sys.stderr)
|
||||
log(f"{provider_name} upload failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if delete_after_upload:
|
||||
@@ -632,7 +632,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log("❌ No local storage path configured. Set 'storage.local.path' in config.json", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
log(f"Moving into configured local library: {resolved_dir}", file=sys.stderr)
|
||||
debug(f"Moving into configured local library: {resolved_dir}", file=sys.stderr)
|
||||
exit_code, dest_path = _handle_local_transfer(media_path, Path(resolved_dir), result, config)
|
||||
|
||||
# After successful local transfer, emit result for pipeline continuation
|
||||
@@ -713,7 +713,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as exc:
|
||||
log(f"❌ Failed to compute file hash: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
log(f"File hash: {file_hash}", file=sys.stderr)
|
||||
debug(f"File hash: {file_hash}", file=sys.stderr)
|
||||
|
||||
# Read sidecar tags and known URLs first (for tagging)
|
||||
|
||||
@@ -789,9 +789,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
config=config,
|
||||
tags=tags,
|
||||
)
|
||||
log(f"✅ File uploaded to Hydrus: {file_hash}", file=sys.stderr)
|
||||
log(f"Hydrus: {file_hash}", file=sys.stderr)
|
||||
except Exception as exc:
|
||||
log(f"❌ Hydrus upload failed: {exc}", file=sys.stderr)
|
||||
log(f"Failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Associate known URLs in Hydrus metadata
|
||||
|
||||
@@ -13,7 +13,7 @@ import pipeline as ctx
|
||||
from ._shared import normalize_result_input, filter_results_by_temp
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from helper.local_library import read_sidecar, write_sidecar, find_sidecar, has_sidecar, LocalLibraryDB
|
||||
from metadata import rename_by_metadata
|
||||
from metadata import rename
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args
|
||||
from config import get_local_storage_path
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Dict, Sequence
|
||||
import json
|
||||
import sys
|
||||
|
||||
from helper.logger import log
|
||||
from helper.logger import debug, log
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
@@ -84,64 +84,28 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
try:
|
||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Handle @N selection which creates a list - extract the first item
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
result = result[0]
|
||||
|
||||
# Parse overrides and options
|
||||
override_hash: str | None = None
|
||||
conserve: str | None = None
|
||||
lib_root: str | None = None
|
||||
reason_tokens: list[str] = []
|
||||
i = 0
|
||||
while i < len(args):
|
||||
token = args[i]
|
||||
low = str(token).lower()
|
||||
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
|
||||
override_hash = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
|
||||
value = str(args[i + 1]).strip().lower()
|
||||
if value in {"local", "hydrus"}:
|
||||
conserve = value
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
|
||||
lib_root = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
reason_tokens.append(token)
|
||||
i += 1
|
||||
|
||||
# Handle result as either dict or object
|
||||
if isinstance(result, dict):
|
||||
hash_hex_raw = result.get("hash_hex") or result.get("hash")
|
||||
target = result.get("target")
|
||||
origin = result.get("origin")
|
||||
def _process_single_item(item: Any, override_hash: str | None, conserve: str | None,
|
||||
lib_root: str | None, reason: str, config: Dict[str, Any]) -> bool:
|
||||
"""Process deletion for a single item."""
|
||||
# Handle item as either dict or object
|
||||
if isinstance(item, dict):
|
||||
hash_hex_raw = item.get("hash_hex") or item.get("hash")
|
||||
target = item.get("target")
|
||||
origin = item.get("origin")
|
||||
else:
|
||||
hash_hex_raw = getattr(result, "hash_hex", None) or getattr(result, "hash", None)
|
||||
target = getattr(result, "target", None)
|
||||
origin = getattr(result, "origin", None)
|
||||
hash_hex_raw = getattr(item, "hash_hex", None) or getattr(item, "hash", None)
|
||||
target = getattr(item, "target", None)
|
||||
origin = getattr(item, "origin", None)
|
||||
|
||||
# For Hydrus files, the target IS the hash
|
||||
if origin and origin.lower() == "hydrus" and not hash_hex_raw:
|
||||
hash_hex_raw = target
|
||||
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_hex_raw)
|
||||
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
||||
|
||||
local_deleted = False
|
||||
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
|
||||
|
||||
if conserve != "local" and local_target:
|
||||
path = Path(str(target))
|
||||
file_path_str = str(target) # Keep the original string for DB matching
|
||||
@@ -168,8 +132,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if lib_root:
|
||||
lib_root_path = Path(lib_root)
|
||||
db_path = lib_root_path / ".downlow_library.db"
|
||||
log(f"Attempting DB cleanup: lib_root={lib_root}, db_path={db_path}", file=sys.stderr)
|
||||
log(f"Deleting DB entry for: {file_path_str}", file=sys.stderr)
|
||||
if _delete_database_entry(db_path, file_path_str):
|
||||
if ctx._PIPE_ACTIVE:
|
||||
ctx.emit(f"Removed database entry: {path.name}")
|
||||
@@ -178,7 +140,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
else:
|
||||
log(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
|
||||
else:
|
||||
log(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
|
||||
debug(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
|
||||
|
||||
hydrus_deleted = False
|
||||
if conserve != "hydrus" and hash_hex:
|
||||
@@ -187,12 +149,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception as exc:
|
||||
if not local_deleted:
|
||||
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
return False
|
||||
else:
|
||||
if client is None:
|
||||
if not local_deleted:
|
||||
log("Hydrus client unavailable", file=sys.stderr)
|
||||
return 1
|
||||
return False
|
||||
else:
|
||||
payload: Dict[str, Any] = {"hashes": [hash_hex]}
|
||||
if reason:
|
||||
@@ -201,11 +163,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
client._post("/add_files/delete_files", data=payload) # type: ignore[attr-defined]
|
||||
hydrus_deleted = True
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
log(f"Deleted from Hydrus: {preview}…", file=sys.stderr)
|
||||
debug(f"Deleted from Hydrus: {preview}…", file=sys.stderr)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus delete failed: {exc}", file=sys.stderr)
|
||||
if not local_deleted:
|
||||
return 1
|
||||
return False
|
||||
|
||||
if hydrus_deleted and hash_hex:
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
@@ -216,10 +178,64 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
ctx.emit(f"Deleted {preview}.")
|
||||
|
||||
if hydrus_deleted or local_deleted:
|
||||
return 0
|
||||
return True
|
||||
|
||||
log("Selected result has neither Hydrus hash nor local file target")
|
||||
return 1
|
||||
return False
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Help
|
||||
try:
|
||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||
return 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
override_hash: str | None = None
|
||||
conserve: str | None = None
|
||||
lib_root: str | None = None
|
||||
reason_tokens: list[str] = []
|
||||
i = 0
|
||||
while i < len(args):
|
||||
token = args[i]
|
||||
low = str(token).lower()
|
||||
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
|
||||
override_hash = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
|
||||
value = str(args[i + 1]).strip().lower()
|
||||
if value in {"local", "hydrus"}:
|
||||
conserve = value
|
||||
i += 2
|
||||
continue
|
||||
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
|
||||
lib_root = str(args[i + 1]).strip()
|
||||
i += 2
|
||||
continue
|
||||
reason_tokens.append(token)
|
||||
i += 1
|
||||
|
||||
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
||||
|
||||
items = []
|
||||
if isinstance(result, list):
|
||||
items = result
|
||||
elif result:
|
||||
items = [result]
|
||||
|
||||
if not items:
|
||||
log("No items to delete", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
success_count = 0
|
||||
for item in items:
|
||||
if _process_single_item(item, override_hash, conserve, lib_root, reason, config):
|
||||
success_count += 1
|
||||
|
||||
return 0 if success_count > 0 else 1
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="delete-file",
|
||||
|
||||
@@ -8,7 +8,7 @@ import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments
|
||||
from helper.logger import log
|
||||
from helper.logger import debug, log
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
@@ -68,6 +68,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# @5 or @{2,5,8} to delete tags from ResultTable by index
|
||||
tags_from_at_syntax = []
|
||||
hash_from_at_syntax = None
|
||||
file_path_from_at_syntax = None
|
||||
|
||||
if rest and str(rest[0]).startswith("@"):
|
||||
selector_arg = str(rest[0])
|
||||
@@ -100,6 +101,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Also get hash from first item for consistency
|
||||
if not hash_from_at_syntax:
|
||||
hash_from_at_syntax = getattr(item, 'hash_hex', None)
|
||||
if not file_path_from_at_syntax:
|
||||
file_path_from_at_syntax = getattr(item, 'file_path', None)
|
||||
|
||||
if not tags_from_at_syntax:
|
||||
log(f"No tags found at indices: {indices}")
|
||||
@@ -112,108 +115,165 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
# Handle @N selection which creates a list - extract the first item
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
# If we have a list of TagItems, we want to process ALL of them if no args provided
|
||||
# This handles: delete-tag @1 (where @1 expands to a list containing one TagItem)
|
||||
if not args and hasattr(result[0], '__class__') and result[0].__class__.__name__ == 'TagItem':
|
||||
# We will extract tags from the list later
|
||||
pass
|
||||
else:
|
||||
result = result[0]
|
||||
# If we have a list of TagItems, we want to process ALL of them if no args provided
|
||||
# This handles: delete-tag @1 (where @1 expands to a list containing one TagItem)
|
||||
# Also handles: delete-tag @1,2 (where we want to delete tags from multiple files)
|
||||
|
||||
# Determine tags and hash to use
|
||||
tags: list[str] = []
|
||||
hash_hex = None
|
||||
# Normalize result to a list for processing
|
||||
items_to_process = []
|
||||
if isinstance(result, list):
|
||||
items_to_process = result
|
||||
elif result:
|
||||
items_to_process = [result]
|
||||
|
||||
# If we have TagItems and no args, we are deleting the tags themselves
|
||||
# If we have Files (or other objects) and args, we are deleting tags FROM those files
|
||||
|
||||
# Check if we are in "delete selected tags" mode (TagItems)
|
||||
is_tag_item_mode = (items_to_process and hasattr(items_to_process[0], '__class__') and
|
||||
items_to_process[0].__class__.__name__ == 'TagItem')
|
||||
|
||||
if is_tag_item_mode:
|
||||
# Collect all tags to delete from the TagItems
|
||||
# Group by hash/file_path to batch operations if needed, or just process one by one
|
||||
# For simplicity, we'll process one by one or group by file
|
||||
pass
|
||||
else:
|
||||
# "Delete tags from files" mode
|
||||
# We need args (tags to delete)
|
||||
if not args and not tags_from_at_syntax:
|
||||
log("Requires at least one tag argument when deleting from files")
|
||||
return 1
|
||||
|
||||
# Process each item
|
||||
success_count = 0
|
||||
|
||||
# If we have tags from @ syntax (e.g. delete-tag @{1,2}), we ignore the piped result for tag selection
|
||||
# but we might need the piped result for the file context if @ selection was from a Tag table
|
||||
# Actually, the @ selection logic above already extracted tags.
|
||||
|
||||
if tags_from_at_syntax:
|
||||
# Use tags extracted from @ syntax
|
||||
# Special case: @ selection of tags.
|
||||
# We already extracted tags and hash/path.
|
||||
# Just run the deletion once using the extracted info.
|
||||
# This preserves the existing logic for @ selection.
|
||||
|
||||
tags = tags_from_at_syntax
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||
log(f"[delete_tag] Using @ syntax extraction: {len(tags)} tag(s) to delete: {tags}")
|
||||
elif isinstance(result, list) and result and hasattr(result[0], '__class__') and result[0].__class__.__name__ == 'TagItem':
|
||||
# Got a list of TagItems (e.g. from delete-tag @1)
|
||||
tags = [getattr(item, 'tag_name') for item in result if getattr(item, 'tag_name', None)]
|
||||
# Use hash from first item
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result[0], "hash_hex", None))
|
||||
elif result and hasattr(result, '__class__') and result.__class__.__name__ == 'TagItem':
|
||||
# Got a piped TagItem - delete this specific tag
|
||||
tag_name = getattr(result, 'tag_name', None)
|
||||
if tag_name:
|
||||
tags = [tag_name]
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||||
file_path = file_path_from_at_syntax
|
||||
|
||||
if _process_deletion(tags, hash_hex, file_path, config):
|
||||
success_count += 1
|
||||
|
||||
else:
|
||||
# Traditional mode - parse tag arguments
|
||||
tags = parse_tag_arguments(rest)
|
||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
||||
# Process items from pipe (or single result)
|
||||
# If args are provided, they are the tags to delete from EACH item
|
||||
# If items are TagItems and no args, the tag to delete is the item itself
|
||||
|
||||
tags_arg = parse_tag_arguments(rest)
|
||||
|
||||
for item in items_to_process:
|
||||
tags_to_delete = []
|
||||
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(item, "hash_hex", None))
|
||||
item_path = getattr(item, "path", None) or getattr(item, "file_path", None) or getattr(item, "target", None)
|
||||
# If result is a dict (e.g. from search-file), try getting path from keys
|
||||
if not item_path and isinstance(item, dict):
|
||||
item_path = item.get("path") or item.get("file_path") or item.get("target")
|
||||
|
||||
item_source = getattr(item, "source", None)
|
||||
|
||||
if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem':
|
||||
# It's a TagItem
|
||||
if tags_arg:
|
||||
# User provided tags to delete FROM this file (ignoring the tag name in the item?)
|
||||
# Or maybe they want to delete the tag in the item AND the args?
|
||||
# Usually if piping TagItems, we delete THOSE tags.
|
||||
# If args are present, maybe we should warn?
|
||||
# For now, if args are present, assume they override or add to the tag item?
|
||||
# Let's assume if args are present, we use args. If not, we use the tag name.
|
||||
tags_to_delete = tags_arg
|
||||
else:
|
||||
tag_name = getattr(item, 'tag_name', None)
|
||||
if tag_name:
|
||||
tags_to_delete = [tag_name]
|
||||
else:
|
||||
# It's a File or other object
|
||||
if tags_arg:
|
||||
tags_to_delete = tags_arg
|
||||
else:
|
||||
# No tags provided for a file object - skip or error?
|
||||
# We already logged an error if no args and not TagItem mode globally,
|
||||
# but inside the loop we might have mixed items? Unlikely.
|
||||
continue
|
||||
|
||||
if tags_to_delete and (item_hash or item_path):
|
||||
if _process_deletion(tags_to_delete, item_hash, item_path, config, source=item_source):
|
||||
success_count += 1
|
||||
|
||||
if success_count > 0:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | None, config: Dict[str, Any], source: str | None = None) -> bool:
|
||||
"""Helper to execute the deletion logic for a single target."""
|
||||
|
||||
if not tags:
|
||||
log("No valid tags were provided")
|
||||
return 1
|
||||
|
||||
return False
|
||||
|
||||
if not hash_hex and not file_path:
|
||||
log("Item does not include a hash or file path")
|
||||
return False
|
||||
|
||||
# Handle local file tag deletion
|
||||
if file_path and (source == "local" or (not hash_hex and source != "hydrus")):
|
||||
try:
|
||||
from helper.local_library import LocalLibraryDB
|
||||
from pathlib import Path
|
||||
|
||||
path_obj = Path(file_path)
|
||||
if not path_obj.exists():
|
||||
log(f"File not found: {file_path}")
|
||||
return False
|
||||
|
||||
# Try to get local storage path from config
|
||||
from config import get_local_storage_path
|
||||
local_root = get_local_storage_path(config)
|
||||
|
||||
if not local_root:
|
||||
# Fallback: assume file is in a library root or use its parent
|
||||
local_root = path_obj.parent
|
||||
|
||||
db = LocalLibraryDB(local_root)
|
||||
db.remove_tags(path_obj, tags)
|
||||
debug(f"Removed {len(tags)} tag(s) from {path_obj.name} (local)")
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Failed to remove local tags: {exc}")
|
||||
return False
|
||||
|
||||
# Hydrus deletion logic
|
||||
if not hash_hex:
|
||||
log("Selected result does not include a hash")
|
||||
return 1
|
||||
return False
|
||||
|
||||
try:
|
||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||
except Exception as exc:
|
||||
log(f"Failed to resolve tag service: {exc}")
|
||||
return 1
|
||||
|
||||
try:
|
||||
client = hydrus_wrapper.get_client(config)
|
||||
except Exception as exc:
|
||||
log(f"Hydrus client unavailable: {exc}")
|
||||
return 1
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return 1
|
||||
|
||||
log(f"[delete_tag] Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
||||
try:
|
||||
result = client.delete_tags(hash_hex, tags, service_name)
|
||||
log(f"[delete_tag] Hydrus response: {result}")
|
||||
|
||||
if client is None:
|
||||
log("Hydrus client unavailable")
|
||||
return False
|
||||
|
||||
debug(f"Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
||||
client.delete_tags(hash_hex, tags, service_name)
|
||||
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
debug(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||
return True
|
||||
|
||||
except Exception as exc:
|
||||
log(f"Hydrus del-tag failed: {exc}")
|
||||
return 1
|
||||
|
||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||
log(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||
|
||||
# Re-fetch and emit updated tags after deletion
|
||||
try:
|
||||
payload = client.fetch_file_metadata(hashes=[str(hash_hex)], include_service_keys_to_tags=True, include_file_urls=False)
|
||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
||||
if isinstance(items, list) and items:
|
||||
meta = items[0] if isinstance(items[0], dict) else None
|
||||
if isinstance(meta, dict):
|
||||
# Extract tags from updated metadata
|
||||
from cmdlets.get_tag import _extract_my_tags_from_hydrus_meta, TagItem
|
||||
service_key = hydrus_wrapper.get_tag_service_key(client, service_name)
|
||||
updated_tags = _extract_my_tags_from_hydrus_meta(meta, service_key, service_name)
|
||||
|
||||
# Emit updated tags as TagItem objects
|
||||
from result_table import ResultTable
|
||||
table = ResultTable("Tags", max_columns=2)
|
||||
tag_items = []
|
||||
for idx, tag_name in enumerate(updated_tags, start=1):
|
||||
tag_item = TagItem(
|
||||
tag_name=tag_name,
|
||||
tag_index=idx,
|
||||
hash_hex=hash_hex,
|
||||
source="hydrus",
|
||||
service_name=service_name,
|
||||
)
|
||||
tag_items.append(tag_item)
|
||||
table.add_result(tag_item)
|
||||
ctx.emit(tag_item)
|
||||
|
||||
# Store items for @ selection in next command (CLI will handle table management)
|
||||
# Don't call set_last_result_table so we don't pollute history or table context
|
||||
except Exception as exc:
|
||||
log(f"Warning: Could not fetch updated tags after deletion: {exc}", file=__import__('sys').stderr)
|
||||
|
||||
return 0
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1611,8 +1611,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
||||
# Priority 1: --storage flag
|
||||
if storage_location:
|
||||
try:
|
||||
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||
debug(f"Using storage location: {storage_location} → {final_output_dir}")
|
||||
# For 'local' storage, check config first before using default
|
||||
if storage_location.lower() == 'local':
|
||||
from config import get_local_storage_path
|
||||
try:
|
||||
configured_path = get_local_storage_path(config)
|
||||
if configured_path:
|
||||
final_output_dir = configured_path
|
||||
debug(f"Using configured local storage path: {final_output_dir}")
|
||||
else:
|
||||
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||
debug(f"Using default storage location: {storage_location} → {final_output_dir}")
|
||||
except Exception as exc:
|
||||
log(f"⚠️ Error reading local storage config: {exc}", file=sys.stderr)
|
||||
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||
debug(f"Falling back to default storage location: {storage_location} → {final_output_dir}")
|
||||
else:
|
||||
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||
debug(f"Using storage location: {storage_location} → {final_output_dir}")
|
||||
except ValueError as e:
|
||||
log(str(e), file=sys.stderr)
|
||||
return 1
|
||||
@@ -2237,6 +2253,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
||||
if 0 < idx <= len(formats):
|
||||
fmt = formats[idx-1]
|
||||
current_format_selector = fmt.get("format_id")
|
||||
|
||||
# If video-only format is selected, append +bestaudio to merge with best audio
|
||||
vcodec = fmt.get("vcodec")
|
||||
acodec = fmt.get("acodec")
|
||||
if vcodec and vcodec != "none" and (not acodec or acodec == "none"):
|
||||
current_format_selector = f"{current_format_selector}+bestaudio"
|
||||
debug(f"Video-only format selected, appending bestaudio: {current_format_selector}")
|
||||
|
||||
debug(f"Selected format #{idx}: {current_format_selector}")
|
||||
playlist_items = None # Clear so it doesn't affect download options
|
||||
else:
|
||||
@@ -2461,6 +2485,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
||||
from result_table import ResultTable
|
||||
table = ResultTable("Downloaded Files")
|
||||
for i, file_path in enumerate(downloaded_files):
|
||||
# Ensure file_path is a Path object
|
||||
if isinstance(file_path, str):
|
||||
file_path = Path(file_path)
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("File", file_path.name)
|
||||
|
||||
@@ -6,13 +6,15 @@ import shutil as _shutil
|
||||
import subprocess as _subprocess
|
||||
import json
|
||||
import sys
|
||||
import platform
|
||||
|
||||
from helper.logger import log, debug
|
||||
import uuid as _uuid
|
||||
import time as _time
|
||||
|
||||
from downlow_helpers.progress import print_progress, print_final_progress, format_size
|
||||
from downlow_helpers.http_client import HTTPClient
|
||||
from helper.progress import print_progress, print_final_progress
|
||||
from helper.http_client import HTTPClient
|
||||
from helper.mpv_ipc import get_ipc_pipe_path, send_to_mpv
|
||||
import fnmatch as _fnmatch
|
||||
|
||||
from . import register
|
||||
@@ -21,7 +23,7 @@ import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash, create_pipe_object_result
|
||||
from config import resolve_output_dir, get_hydrus_url, get_hydrus_access_key
|
||||
from downlow_helpers.alldebrid import AllDebridClient
|
||||
from helper.alldebrid import AllDebridClient
|
||||
|
||||
|
||||
|
||||
@@ -248,158 +250,63 @@ def _is_playable_in_mpv(file_path_or_ext: str, mime_type: Optional[str] = None)
|
||||
return False
|
||||
|
||||
|
||||
def _get_fixed_ipc_pipe() -> str:
|
||||
"""Get the fixed IPC pipe name for persistent MPV connection.
|
||||
|
||||
Uses a fixed name 'mpv-medeia-macina' so all playback sessions
|
||||
connect to the same MPV window/process instead of creating new instances.
|
||||
"""
|
||||
import platform
|
||||
if platform.system() == 'Windows':
|
||||
return "\\\\.\\pipe\\mpv-medeia-macina"
|
||||
else:
|
||||
return "/tmp/mpv-medeia-macina.sock"
|
||||
|
||||
|
||||
def _send_to_mpv_pipe(file_url: str, ipc_pipe: str, title: str, headers: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Send loadfile command to existing MPV via IPC pipe.
|
||||
|
||||
Returns True if successfully sent to existing MPV, False if pipe unavailable.
|
||||
"""
|
||||
import json
|
||||
import socket
|
||||
import platform
|
||||
|
||||
try:
|
||||
# Prepare commands
|
||||
# Use set_property for headers as loadfile options can be unreliable via IPC
|
||||
header_str = ""
|
||||
if headers:
|
||||
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
||||
|
||||
# Command 1: Set headers (or clear them)
|
||||
cmd_headers = {
|
||||
"command": ["set_property", "http-header-fields", header_str],
|
||||
"request_id": 0
|
||||
}
|
||||
|
||||
# Command 2: Load file using memory:// M3U to preserve title
|
||||
# Sanitize title to avoid breaking M3U format
|
||||
safe_title = title.replace("\n", " ").replace("\r", "")
|
||||
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{file_url}\n"
|
||||
|
||||
cmd_load = {
|
||||
"command": ["loadfile", f"memory://{m3u_content}", "append-play"],
|
||||
"request_id": 1
|
||||
}
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
# Windows named pipes require special handling
|
||||
try:
|
||||
# Open in r+b to read response
|
||||
with open(ipc_pipe, 'r+b', buffering=0) as pipe:
|
||||
# Send headers
|
||||
pipe.write((json.dumps(cmd_headers) + "\n").encode('utf-8'))
|
||||
pipe.flush()
|
||||
pipe.readline() # Consume response for headers
|
||||
|
||||
# Send loadfile
|
||||
pipe.write((json.dumps(cmd_load) + "\n").encode('utf-8'))
|
||||
pipe.flush()
|
||||
|
||||
# Read response
|
||||
response_line = pipe.readline()
|
||||
if response_line:
|
||||
resp = json.loads(response_line.decode('utf-8'))
|
||||
if resp.get('error') != 'success':
|
||||
debug(f"[get-file] MPV error: {resp.get('error')}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
debug(f"[get-file] Sent to existing MPV: {title}", file=sys.stderr)
|
||||
return True
|
||||
except (OSError, IOError):
|
||||
# Pipe not available
|
||||
return False
|
||||
else:
|
||||
# Unix socket for Linux/macOS
|
||||
if not hasattr(socket, 'AF_UNIX'):
|
||||
return False
|
||||
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.connect(ipc_pipe)
|
||||
|
||||
# Send headers
|
||||
sock.sendall((json.dumps(cmd_headers) + "\n").encode('utf-8'))
|
||||
sock.recv(4096) # Consume response
|
||||
|
||||
# Send loadfile
|
||||
sock.sendall((json.dumps(cmd_load) + "\n").encode('utf-8'))
|
||||
|
||||
# Read response
|
||||
try:
|
||||
response_data = sock.recv(4096)
|
||||
if response_data:
|
||||
resp = json.loads(response_data.decode('utf-8'))
|
||||
if resp.get('error') != 'success':
|
||||
debug(f"[get-file] MPV error: {resp.get('error')}", file=sys.stderr)
|
||||
sock.close()
|
||||
return False
|
||||
except:
|
||||
pass
|
||||
sock.close()
|
||||
|
||||
debug(f"[get-file] Sent to existing MPV: {title}", file=sys.stderr)
|
||||
return True
|
||||
except (OSError, socket.error, ConnectionRefusedError):
|
||||
# Pipe doesn't exist or MPV not listening - will need to start new instance
|
||||
return False
|
||||
except Exception as e:
|
||||
debug(f"[get-file] IPC error: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def _play_in_mpv(file_url: str, file_title: str, is_stream: bool = False, headers: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Play file in MPV using IPC pipe, creating new instance if needed.
|
||||
"""Play file in MPV using centralized IPC pipe, creating new instance if needed.
|
||||
|
||||
Returns True on success, False on error.
|
||||
"""
|
||||
ipc_pipe = _get_fixed_ipc_pipe()
|
||||
import json
|
||||
import socket
|
||||
import platform
|
||||
|
||||
try:
|
||||
# First try to send to existing MPV instance
|
||||
if _send_to_mpv_pipe(file_url, ipc_pipe, file_title, headers):
|
||||
if send_to_mpv(file_url, file_title, headers):
|
||||
debug(f"Added to MPV: {file_title}")
|
||||
return True
|
||||
|
||||
# No existing MPV or pipe unavailable - start new instance
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
debug(f"[get-file] Starting new MPV instance (pipe: {ipc_pipe})", file=sys.stderr)
|
||||
cmd = ['mpv', file_url, f'--input-ipc-server={ipc_pipe}']
|
||||
|
||||
# Set title for new instance
|
||||
cmd.append(f'--force-media-title={file_title}')
|
||||
# Build command - start MPV without a file initially, just with IPC server
|
||||
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}']
|
||||
|
||||
if headers:
|
||||
# Format headers for command line
|
||||
# --http-header-fields="Header1: Val1,Header2: Val2"
|
||||
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
||||
cmd.append(f'--http-header-fields={header_str}')
|
||||
|
||||
|
||||
# Add --idle flag so MPV stays running and waits for playlist commands
|
||||
cmd.append('--idle')
|
||||
|
||||
# Detach process to prevent freezing parent CLI
|
||||
kwargs = {}
|
||||
if platform.system() == 'Windows':
|
||||
# CREATE_NEW_CONSOLE might be better than CREATE_NO_WINDOW if MPV needs a window
|
||||
# But usually MPV creates its own window.
|
||||
# DETACHED_PROCESS (0x00000008) is also an option.
|
||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||
|
||||
_subprocess.Popen(cmd, stdin=_subprocess.DEVNULL, stdout=_subprocess.DEVNULL, stderr=_subprocess.DEVNULL, **kwargs)
|
||||
|
||||
debug(f"{'Streaming' if is_stream else 'Playing'} in MPV: {file_title}")
|
||||
debug(f"[get-file] Started MPV with {file_title} (IPC: {ipc_pipe})", file=sys.stderr)
|
||||
return True
|
||||
debug(f"[get-file] Started MPV instance (IPC: {ipc_pipe})", file=sys.stderr)
|
||||
|
||||
# Give MPV time to start and open IPC pipe
|
||||
# Windows needs more time than Unix
|
||||
wait_time = 1.0 if platform.system() == 'Windows' else 0.5
|
||||
debug(f"[get-file] Waiting {wait_time}s for MPV to initialize IPC...", file=sys.stderr)
|
||||
_time.sleep(wait_time)
|
||||
|
||||
# Try up to 3 times to send the file via IPC
|
||||
for attempt in range(3):
|
||||
debug(f"[get-file] Sending file via IPC (attempt {attempt + 1}/3)", file=sys.stderr)
|
||||
if send_to_mpv(file_url, file_title, headers):
|
||||
debug(f"{'Streaming' if is_stream else 'Playing'} in MPV: {file_title}")
|
||||
debug(f"[get-file] Added to new MPV instance (IPC: {ipc_pipe})", file=sys.stderr)
|
||||
return True
|
||||
|
||||
if attempt < 2:
|
||||
# Wait before retrying
|
||||
_time.sleep(0.3)
|
||||
|
||||
# IPC send failed after all retries
|
||||
log("Error: Could not send file to MPV via IPC after startup", file=sys.stderr)
|
||||
return False
|
||||
|
||||
except FileNotFoundError:
|
||||
log("Error: MPV not found. Install mpv to play media files", file=sys.stderr)
|
||||
@@ -516,7 +423,7 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
||||
|
||||
if force_browser:
|
||||
# User explicitly wants browser
|
||||
ipc_pipe = _get_fixed_ipc_pipe()
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
result_dict = create_pipe_object_result(
|
||||
source='hydrus',
|
||||
identifier=file_hash,
|
||||
@@ -559,7 +466,7 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
||||
return 0
|
||||
else:
|
||||
# Not media, open in browser
|
||||
ipc_pipe = _get_fixed_ipc_pipe()
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
result_dict = create_pipe_object_result(
|
||||
source='hydrus',
|
||||
identifier=file_hash,
|
||||
@@ -1193,7 +1100,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Normal file export (happens regardless of -metadata flag)
|
||||
try:
|
||||
from downlow_helpers.hydrus import hydrus_export as _hydrus_export
|
||||
from helper.hydrus import hydrus_export as _hydrus_export
|
||||
except Exception:
|
||||
_hydrus_export = None # type: ignore
|
||||
if _hydrus_export is None:
|
||||
|
||||
@@ -49,6 +49,7 @@ class TagItem:
|
||||
hash_hex: Optional[str] = None
|
||||
source: str = "hydrus"
|
||||
service_name: Optional[str] = None
|
||||
file_path: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
# Make ResultTable happy by adding standard fields
|
||||
@@ -101,7 +102,9 @@ def _emit_tags_as_table(
|
||||
hash_hex: Optional[str],
|
||||
source: str = "hydrus",
|
||||
service_name: Optional[str] = None,
|
||||
config: Dict[str, Any] = None
|
||||
config: Dict[str, Any] = None,
|
||||
item_title: Optional[str] = None,
|
||||
file_path: Optional[str] = None
|
||||
) -> None:
|
||||
"""Emit tags as TagItem objects and display via ResultTable.
|
||||
|
||||
@@ -111,7 +114,13 @@ def _emit_tags_as_table(
|
||||
from result_table import ResultTable
|
||||
|
||||
# Create ResultTable with just tag column (no title)
|
||||
table = ResultTable("Tags", max_columns=1)
|
||||
table_title = "Tags"
|
||||
if item_title:
|
||||
table_title = f"Tags: {item_title}"
|
||||
if hash_hex:
|
||||
table_title += f" [{hash_hex[:8]}]"
|
||||
|
||||
table = ResultTable(table_title, max_columns=1)
|
||||
table.set_source_command("get-tag", [])
|
||||
|
||||
# Create TagItem for each tag
|
||||
@@ -123,6 +132,7 @@ def _emit_tags_as_table(
|
||||
hash_hex=hash_hex,
|
||||
source=source,
|
||||
service_name=service_name,
|
||||
file_path=file_path,
|
||||
)
|
||||
tag_items.append(tag_item)
|
||||
table.add_result(tag_item)
|
||||
@@ -1069,6 +1079,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Try Hydrus first (always prioritize if available and has hash)
|
||||
use_hydrus = False
|
||||
hydrus_meta = None # Cache the metadata from first fetch
|
||||
client = None
|
||||
if hash_hex and hydrus_available:
|
||||
try:
|
||||
client = hydrus.get_client(config)
|
||||
@@ -1093,7 +1104,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
try:
|
||||
# Use cached metadata from above, don't fetch again
|
||||
service_name = hydrus.get_tag_service_name(config)
|
||||
client = hydrus.get_client(config)
|
||||
if client is None:
|
||||
client = hydrus.get_client(config)
|
||||
service_key = hydrus.get_tag_service_key(client, service_name)
|
||||
current = _extract_my_tags_from_hydrus_meta(hydrus_meta, service_key, service_name)
|
||||
source = "hydrus"
|
||||
@@ -1148,10 +1160,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
# Always output to ResultTable (pipeline mode only)
|
||||
# Extract title for table header
|
||||
item_title = get_field(result, "title", None) or get_field(result, "name", None) or get_field(result, "filename", None)
|
||||
|
||||
if source == "hydrus":
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config)
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config, item_title=item_title)
|
||||
else:
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config)
|
||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config, item_title=item_title, file_path=str(local_path) if local_path else None)
|
||||
|
||||
# If emit requested or store key provided, emit payload
|
||||
if emit_mode:
|
||||
|
||||
14
cmdlets/output_json.py
Normal file
14
cmdlets/output_json.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from typing import Any, Dict, Sequence
|
||||
import json
|
||||
from ._shared import Cmdlet
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Output the current pipeline result as JSON."""
|
||||
print(json.dumps(result, indent=2, default=str))
|
||||
return 0
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name="output-json",
|
||||
summary="Output the current pipeline result as JSON.",
|
||||
usage="... | output-json",
|
||||
)
|
||||
445
cmdlets/pipe.py
445
cmdlets/pipe.py
@@ -8,92 +8,124 @@ import subprocess
|
||||
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||
from helper.logger import log, debug
|
||||
from result_table import ResultTable
|
||||
from .get_file import _get_fixed_ipc_pipe
|
||||
from helper.mpv_ipc import get_ipc_pipe_path, MPVIPCClient
|
||||
import pipeline as ctx
|
||||
|
||||
def _send_ipc_command(command: Dict[str, Any]) -> Optional[Any]:
|
||||
"""Send a command to the MPV IPC pipe and return the response."""
|
||||
ipc_pipe = _get_fixed_ipc_pipe()
|
||||
request = json.dumps(command) + "\n"
|
||||
|
||||
try:
|
||||
if platform.system() == 'Windows':
|
||||
# Windows named pipe
|
||||
# Opening in r+b mode to read response
|
||||
try:
|
||||
with open(ipc_pipe, 'r+b', buffering=0) as pipe:
|
||||
pipe.write(request.encode('utf-8'))
|
||||
pipe.flush()
|
||||
|
||||
# Read response
|
||||
# We'll try to read a line. This might block if MPV is unresponsive.
|
||||
response_line = pipe.readline()
|
||||
if response_line:
|
||||
return json.loads(response_line.decode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
return None # MPV not running
|
||||
except Exception as e:
|
||||
debug(f"Windows IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
else:
|
||||
# Unix socket
|
||||
af_unix = getattr(socket, 'AF_UNIX', None)
|
||||
if af_unix is None:
|
||||
debug("Unix sockets not supported on this platform", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
sock = socket.socket(af_unix, socket.SOCK_STREAM)
|
||||
sock.settimeout(2.0)
|
||||
sock.connect(ipc_pipe)
|
||||
sock.sendall(request.encode('utf-8'))
|
||||
|
||||
# Read response
|
||||
response_data = b""
|
||||
while True:
|
||||
try:
|
||||
chunk = sock.recv(4096)
|
||||
if not chunk:
|
||||
break
|
||||
response_data += chunk
|
||||
if b"\n" in chunk:
|
||||
break
|
||||
except socket.timeout:
|
||||
break
|
||||
|
||||
sock.close()
|
||||
|
||||
if response_data:
|
||||
# Parse lines, look for response to our request
|
||||
lines = response_data.decode('utf-8').strip().split('\n')
|
||||
for line in lines:
|
||||
try:
|
||||
resp = json.loads(line)
|
||||
# If it has 'error' field, it's a response
|
||||
if 'error' in resp:
|
||||
return resp
|
||||
except:
|
||||
pass
|
||||
except (FileNotFoundError, ConnectionRefusedError):
|
||||
return None # MPV not running
|
||||
except Exception as e:
|
||||
debug(f"Unix IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
debug(f"IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
return None
|
||||
from helper.local_library import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path
|
||||
|
||||
def _get_playlist() -> List[Dict[str, Any]]:
|
||||
"""Get the current playlist from MPV."""
|
||||
def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional[Any]:
|
||||
"""Send a command to the MPV IPC pipe and return the response."""
|
||||
try:
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
client = MPVIPCClient(socket_path=ipc_pipe)
|
||||
|
||||
if not client.connect():
|
||||
return None # MPV not running
|
||||
|
||||
response = client.send_command(command)
|
||||
client.disconnect()
|
||||
return response
|
||||
except Exception as e:
|
||||
if not silent:
|
||||
debug(f"IPC Error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get the current playlist from MPV. Returns None if MPV is not running."""
|
||||
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
resp = _send_ipc_command(cmd, silent=silent)
|
||||
if resp is None:
|
||||
return None
|
||||
if resp.get("error") == "success":
|
||||
return resp.get("data", [])
|
||||
return []
|
||||
|
||||
def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||
"""Extract a clean title from an MPV playlist item, handling memory:// M3U hacks."""
|
||||
title = item.get("title")
|
||||
filename = item.get("filename") or ""
|
||||
|
||||
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
||||
if "memory://" in filename and "#EXTINF:" in filename:
|
||||
try:
|
||||
# Extract title from #EXTINF:-1,Title
|
||||
# Use regex to find title between #EXTINF:-1, and newline
|
||||
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
|
||||
if match:
|
||||
extracted_title = match.group(1).strip()
|
||||
if not title or title == "memory://":
|
||||
title = extracted_title
|
||||
|
||||
# If we still don't have a title, try to find the URL in the M3U content
|
||||
if not title:
|
||||
lines = filename.splitlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#') and not line.startswith('memory://'):
|
||||
# Found the URL, use it as title
|
||||
return line
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return title or filename or "Unknown"
|
||||
|
||||
def _queue_items(items: List[Any], clear_first: bool = False) -> None:
|
||||
"""Queue items to MPV, starting it if necessary.
|
||||
|
||||
Args:
|
||||
items: List of items to queue
|
||||
clear_first: If True, the first item will replace the current playlist
|
||||
"""
|
||||
for i, item in enumerate(items):
|
||||
# Extract URL/Path
|
||||
target = None
|
||||
title = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||
title = item.get("title") or item.get("name")
|
||||
elif hasattr(item, "target"):
|
||||
target = item.target
|
||||
title = getattr(item, "title", None)
|
||||
elif isinstance(item, str):
|
||||
target = item
|
||||
|
||||
if target:
|
||||
# Add to MPV playlist
|
||||
# We use loadfile with append flag (or replace if clear_first is set)
|
||||
|
||||
# Use memory:// M3U hack to pass title to MPV
|
||||
if title:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target}"
|
||||
target_to_send = f"memory://{m3u_content}"
|
||||
else:
|
||||
target_to_send = target
|
||||
|
||||
mode = "append"
|
||||
if clear_first and i == 0:
|
||||
mode = "replace"
|
||||
|
||||
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
||||
resp = _send_ipc_command(cmd)
|
||||
|
||||
if resp is None:
|
||||
# MPV not running (or died)
|
||||
# Start MPV with remaining items
|
||||
_start_mpv(items[i:])
|
||||
return
|
||||
elif resp.get("error") == "success":
|
||||
# Also set property for good measure
|
||||
if title:
|
||||
title_cmd = {"command": ["set_property", "force-media-title", title], "request_id": 201}
|
||||
_send_ipc_command(title_cmd)
|
||||
debug(f"Queued: {title or target}")
|
||||
else:
|
||||
error_msg = str(resp.get('error'))
|
||||
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Manage and play items in the MPV playlist via IPC."""
|
||||
|
||||
@@ -106,7 +138,115 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
list_mode = parsed.get("list")
|
||||
play_mode = parsed.get("play")
|
||||
pause_mode = parsed.get("pause")
|
||||
save_mode = parsed.get("save")
|
||||
load_mode = parsed.get("load")
|
||||
|
||||
# Handle Save Playlist
|
||||
if save_mode:
|
||||
playlist_name = index_arg or f"Playlist {subprocess.check_output(['date', '/t'], shell=True).decode().strip()}"
|
||||
# If index_arg was used for name, clear it so it doesn't trigger index logic
|
||||
if index_arg:
|
||||
index_arg = None
|
||||
|
||||
items = _get_playlist()
|
||||
if not items:
|
||||
debug("Cannot save: MPV playlist is empty or MPV is not running.")
|
||||
return 1
|
||||
|
||||
# Clean up items for saving (remove current flag, etc)
|
||||
clean_items = []
|
||||
for item in items:
|
||||
# If title was extracted from memory://, we should probably save the original filename
|
||||
# if it's a URL, or reconstruct a clean object.
|
||||
# Actually, _extract_title_from_item handles the display title.
|
||||
# But for playback, we need the 'filename' (which might be memory://...)
|
||||
# If we save 'memory://...', it will work when loaded back.
|
||||
clean_items.append(item)
|
||||
|
||||
# Use config from context or load it
|
||||
config_data = config if config else {}
|
||||
|
||||
storage_path = get_local_storage_path(config_data)
|
||||
if not storage_path:
|
||||
debug("Local storage path not configured.")
|
||||
return 1
|
||||
|
||||
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||
if db.save_playlist(playlist_name, clean_items):
|
||||
debug(f"Playlist saved as '{playlist_name}'")
|
||||
return 0
|
||||
else:
|
||||
debug(f"Failed to save playlist '{playlist_name}'")
|
||||
return 1
|
||||
|
||||
# Handle Load Playlist
|
||||
current_playlist_name = None
|
||||
if load_mode:
|
||||
# Use config from context or load it
|
||||
config_data = config if config else {}
|
||||
|
||||
storage_path = get_local_storage_path(config_data)
|
||||
if not storage_path:
|
||||
debug("Local storage path not configured.")
|
||||
return 1
|
||||
|
||||
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||
if index_arg:
|
||||
try:
|
||||
pl_id = int(index_arg)
|
||||
result = db.get_playlist_by_id(pl_id)
|
||||
if result is None:
|
||||
debug(f"Playlist ID {pl_id} not found.")
|
||||
return 1
|
||||
|
||||
name, items = result
|
||||
current_playlist_name = name
|
||||
|
||||
# Queue items (replacing current playlist)
|
||||
if items:
|
||||
_queue_items(items, clear_first=True)
|
||||
else:
|
||||
# Empty playlist, just clear
|
||||
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
|
||||
|
||||
# Switch to list mode to show the result
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
# Fall through to list logic
|
||||
|
||||
except ValueError:
|
||||
debug(f"Invalid playlist ID: {index_arg}")
|
||||
return 1
|
||||
else:
|
||||
playlists = db.get_playlists()
|
||||
|
||||
if not playlists:
|
||||
debug("No saved playlists found.")
|
||||
return 0
|
||||
|
||||
table = ResultTable("Saved Playlists")
|
||||
for i, pl in enumerate(playlists):
|
||||
item_count = len(pl.get('items', []))
|
||||
row = table.add_row()
|
||||
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
|
||||
row.add_column("Name", pl['name'])
|
||||
row.add_column("Items", str(item_count))
|
||||
row.add_column("Updated", pl['updated_at'])
|
||||
|
||||
# Set the playlist items as the result object for this row
|
||||
# When user selects @N, they get the list of items
|
||||
# We also set the source command to .pipe -load <ID> so it loads it
|
||||
table.set_row_selection_args(i, ["-load", str(pl['id'])])
|
||||
|
||||
table.set_source_command(".pipe")
|
||||
|
||||
# Register results
|
||||
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
print(table)
|
||||
return 0
|
||||
|
||||
# Handle Play/Pause commands
|
||||
if play_mode:
|
||||
cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
@@ -148,64 +288,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
elif isinstance(result, dict):
|
||||
items_to_add = [result]
|
||||
|
||||
added_count = 0
|
||||
for i, item in enumerate(items_to_add):
|
||||
# Extract URL/Path
|
||||
target = None
|
||||
title = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
target = item.get("target") or item.get("url") or item.get("path")
|
||||
title = item.get("title") or item.get("name")
|
||||
elif hasattr(item, "target"):
|
||||
target = item.target
|
||||
title = getattr(item, "title", None)
|
||||
elif isinstance(item, str):
|
||||
target = item
|
||||
|
||||
if target:
|
||||
# Add to MPV playlist
|
||||
# We use loadfile with append flag
|
||||
|
||||
# Use memory:// M3U hack to pass title to MPV
|
||||
# This avoids "invalid parameter" errors with loadfile options
|
||||
# and ensures the title is displayed in the playlist/window
|
||||
if title:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target}"
|
||||
target_to_send = f"memory://{m3u_content}"
|
||||
else:
|
||||
target_to_send = target
|
||||
|
||||
cmd = {"command": ["loadfile", target_to_send, "append"], "request_id": 200}
|
||||
resp = _send_ipc_command(cmd)
|
||||
|
||||
if resp is None:
|
||||
# MPV not running (or died)
|
||||
# Start MPV with remaining items
|
||||
_start_mpv(items_to_add[i:])
|
||||
return 0
|
||||
elif resp.get("error") == "success":
|
||||
added_count += 1
|
||||
if title:
|
||||
debug(f"Queued: {title}")
|
||||
else:
|
||||
debug(f"Queued: {target}")
|
||||
else:
|
||||
error_msg = str(resp.get('error'))
|
||||
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
||||
|
||||
# If error indicates parameter issues, try without options
|
||||
# (Though memory:// should avoid this, we keep fallback just in case)
|
||||
if "option" in error_msg or "parameter" in error_msg:
|
||||
cmd = {"command": ["loadfile", target, "append"], "request_id": 201}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
added_count += 1
|
||||
debug(f"Queued (fallback): {title or target}")
|
||||
_queue_items(items_to_add)
|
||||
|
||||
if added_count > 0:
|
||||
if items_to_add:
|
||||
# If we added items, we might want to play the first one if nothing is playing?
|
||||
# For now, just list the playlist
|
||||
pass
|
||||
@@ -213,8 +298,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Get playlist from MPV
|
||||
items = _get_playlist()
|
||||
|
||||
if items is None:
|
||||
debug("MPV is not running. Starting new instance...")
|
||||
_start_mpv([])
|
||||
return 0
|
||||
|
||||
if not items:
|
||||
debug("MPV playlist is empty or MPV is not running.")
|
||||
debug("MPV playlist is empty.")
|
||||
return 0
|
||||
|
||||
# If index is provided, perform action (Play or Clear)
|
||||
@@ -228,7 +318,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
item = items[idx]
|
||||
title = item.get("title") or item.get("filename") or "Unknown"
|
||||
title = _extract_title_from_item(item)
|
||||
|
||||
if clear_mode:
|
||||
# Remove item
|
||||
@@ -237,7 +327,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if resp and resp.get("error") == "success":
|
||||
debug(f"Removed: {title}")
|
||||
# Refresh items for listing
|
||||
items = _get_playlist()
|
||||
items = _get_playlist() or []
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
else:
|
||||
@@ -268,46 +358,26 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
debug("MPV playlist is empty.")
|
||||
return 0
|
||||
|
||||
table = ResultTable("MPV Playlist")
|
||||
# Use the loaded playlist name if available, otherwise default
|
||||
# Note: current_playlist_name is defined in the load_mode block if a playlist was loaded
|
||||
try:
|
||||
table_title = current_playlist_name or "MPV Playlist"
|
||||
except NameError:
|
||||
table_title = "MPV Playlist"
|
||||
|
||||
table = ResultTable(table_title)
|
||||
|
||||
for i, item in enumerate(items):
|
||||
is_current = item.get("current", False)
|
||||
title = item.get("title") or ""
|
||||
filename = item.get("filename") or ""
|
||||
|
||||
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
||||
if "memory://" in filename and "#EXTINF:" in filename:
|
||||
try:
|
||||
# Extract title from #EXTINF:-1,Title
|
||||
# Use regex to find title between #EXTINF:-1, and newline
|
||||
match = re.search(r"#EXTINF:-1,(.*?)(?:\n|\r|$)", filename)
|
||||
if match:
|
||||
extracted_title = match.group(1).strip()
|
||||
if not title or title == "memory://":
|
||||
title = extracted_title
|
||||
|
||||
# Extract actual URL
|
||||
# Find the first line that looks like a URL and not a directive
|
||||
lines = filename.splitlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#') and not line.startswith('memory://'):
|
||||
filename = line
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
title = _extract_title_from_item(item)
|
||||
|
||||
# Truncate if too long
|
||||
if len(title) > 57:
|
||||
title = title[:57] + "..."
|
||||
if len(filename) > 27:
|
||||
filename = filename[:27] + "..."
|
||||
if len(title) > 80:
|
||||
title = title[:77] + "..."
|
||||
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Current", "*" if is_current else "")
|
||||
row.add_column("Title", title)
|
||||
row.add_column("Filename", filename)
|
||||
|
||||
table.set_row_selection_args(i, [str(i + 1)])
|
||||
|
||||
@@ -323,9 +393,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
def _start_mpv(items: List[Any]) -> None:
|
||||
"""Start MPV with a list of items."""
|
||||
ipc_pipe = _get_fixed_ipc_pipe()
|
||||
ipc_pipe = get_ipc_pipe_path()
|
||||
|
||||
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}']
|
||||
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}', '--idle', '--force-window']
|
||||
cmd.append('--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]')
|
||||
|
||||
# Add items
|
||||
@@ -334,7 +404,7 @@ def _start_mpv(items: List[Any]) -> None:
|
||||
title = None
|
||||
|
||||
if isinstance(item, dict):
|
||||
target = item.get("target") or item.get("url") or item.get("path")
|
||||
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||
title = item.get("title") or item.get("name")
|
||||
elif hasattr(item, "target"):
|
||||
target = item.target
|
||||
@@ -351,16 +421,15 @@ def _start_mpv(items: List[Any]) -> None:
|
||||
else:
|
||||
cmd.append(target)
|
||||
|
||||
if len(cmd) > 3: # mpv + ipc + format + at least one file
|
||||
try:
|
||||
kwargs = {}
|
||||
if platform.system() == 'Windows':
|
||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||
|
||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
||||
debug(f"Started MPV with {len(cmd)-3} items")
|
||||
except Exception as e:
|
||||
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||
try:
|
||||
kwargs = {}
|
||||
if platform.system() == 'Windows':
|
||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||
|
||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
||||
debug(f"Started MPV with {len(items)} items")
|
||||
except Exception as e:
|
||||
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".pipe",
|
||||
@@ -394,6 +463,16 @@ CMDLET = Cmdlet(
|
||||
type="flag",
|
||||
description="Pause playback"
|
||||
),
|
||||
CmdletArg(
|
||||
name="save",
|
||||
type="flag",
|
||||
description="Save current playlist to database"
|
||||
),
|
||||
CmdletArg(
|
||||
name="load",
|
||||
type="flag",
|
||||
description="List saved playlists"
|
||||
),
|
||||
],
|
||||
exec=_run
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@ from __future__ import annotations
|
||||
import contextlib
|
||||
import hashlib
|
||||
import importlib
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
import httpx
|
||||
@@ -17,8 +18,9 @@ from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||
from urllib.parse import urlsplit, quote, urljoin
|
||||
|
||||
from helper.logger import log
|
||||
from helper.logger import log, debug
|
||||
from helper.http_client import HTTPClient
|
||||
from helper.utils import ensure_directory, unique_path, unique_preserve_order
|
||||
|
||||
from . import register
|
||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, create_pipe_object_result, normalize_result_input
|
||||
@@ -70,6 +72,38 @@ USER_AGENT = (
|
||||
DEFAULT_VIEWPORT: ViewportSize = {"width": 1280, "height": 1200}
|
||||
ARCHIVE_TIMEOUT = 30.0
|
||||
|
||||
# Configurable selectors for specific websites
|
||||
SITE_SELECTORS: Dict[str, List[str]] = {
|
||||
"twitter.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"x.com": [
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
],
|
||||
"instagram.com": [
|
||||
"article[role='presentation']",
|
||||
"article[role='article']",
|
||||
"div[role='dialog'] article",
|
||||
"section main article",
|
||||
],
|
||||
"reddit.com": [
|
||||
"shreddit-post",
|
||||
"div[data-testid='post-container']",
|
||||
"div[data-click-id='background']",
|
||||
"article",
|
||||
],
|
||||
"rumble.com": [
|
||||
"rumble-player, iframe.rumble",
|
||||
"div.video-item--main",
|
||||
"main article",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
||||
class ScreenshotError(RuntimeError):
|
||||
"""Raised when screenshot capture or upload fails."""
|
||||
@@ -113,39 +147,6 @@ class ScreenshotResult:
|
||||
# Helper Functions
|
||||
# ============================================================================
|
||||
|
||||
def _ensure_directory(path: Path) -> None:
|
||||
"""Ensure directory exists."""
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def _unique_path(path: Path) -> Path:
|
||||
"""Get unique path by appending numbers if file exists."""
|
||||
if not path.exists():
|
||||
return path
|
||||
stem = path.stem
|
||||
suffix = path.suffix
|
||||
parent = path.parent
|
||||
counter = 1
|
||||
while True:
|
||||
new_path = parent / f"{stem}_{counter}{suffix}"
|
||||
if not new_path.exists():
|
||||
return new_path
|
||||
counter += 1
|
||||
|
||||
|
||||
def _unique_preserve_order(items: Sequence[str]) -> List[str]:
|
||||
"""Remove duplicates while preserving order."""
|
||||
seen = set()
|
||||
result = []
|
||||
for item in items:
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def _slugify_url(url: str) -> str:
|
||||
"""Convert URL to filesystem-safe slug."""
|
||||
parsed = urlsplit(url)
|
||||
@@ -180,36 +181,11 @@ def _selectors_for_url(url: str) -> List[str]:
|
||||
"""Return a list of likely content selectors for known platforms."""
|
||||
u = url.lower()
|
||||
sels: List[str] = []
|
||||
# Twitter/X
|
||||
if "twitter.com" in u or "x.com" in u:
|
||||
sels.extend([
|
||||
"article[role='article']",
|
||||
"div[data-testid='tweet']",
|
||||
"div[data-testid='cellInnerDiv'] article",
|
||||
])
|
||||
# Instagram
|
||||
if "instagram.com" in u:
|
||||
sels.extend([
|
||||
"article[role='presentation']",
|
||||
"article[role='article']",
|
||||
"div[role='dialog'] article",
|
||||
"section main article",
|
||||
])
|
||||
# Reddit
|
||||
if "reddit.com" in u:
|
||||
sels.extend([
|
||||
"shreddit-post",
|
||||
"div[data-testid='post-container']",
|
||||
"div[data-click-id='background']",
|
||||
"article",
|
||||
])
|
||||
# Rumble (video post)
|
||||
if "rumble.com" in u:
|
||||
sels.extend([
|
||||
"rumble-player, iframe.rumble",
|
||||
"div.video-item--main",
|
||||
"main article",
|
||||
])
|
||||
|
||||
for domain, selectors in SITE_SELECTORS.items():
|
||||
if domain in u:
|
||||
sels.extend(selectors)
|
||||
|
||||
return sels or ["article"]
|
||||
|
||||
|
||||
@@ -321,7 +297,7 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]:
|
||||
|
||||
def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
||||
"""Prepare and validate output path for screenshot."""
|
||||
_ensure_directory(options.output_dir)
|
||||
ensure_directory(options.output_dir)
|
||||
explicit_format = _normalise_format(options.output_format) if options.output_format else None
|
||||
inferred_format: Optional[str] = None
|
||||
if options.output_path is not None:
|
||||
@@ -344,20 +320,23 @@ def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
||||
if current_suffix != expected:
|
||||
path = path.with_suffix(expected)
|
||||
options.output_format = final_format
|
||||
return _unique_path(path)
|
||||
return unique_path(path)
|
||||
|
||||
|
||||
def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warnings: List[str]) -> None:
|
||||
def _capture(options: ScreenshotOptions, destination: Path, warnings: List[str]) -> None:
|
||||
"""Capture screenshot using Playwright."""
|
||||
debug(f"[_capture] Starting capture for {options.url} -> {destination}")
|
||||
playwright = None
|
||||
browser = None
|
||||
context = None
|
||||
try:
|
||||
log("Starting Playwright...", flush=True)
|
||||
debug("Starting Playwright...", flush=True)
|
||||
playwright = sync_playwright().start()
|
||||
log("Launching Chromium browser...", flush=True)
|
||||
format_name = _normalise_format(options.output_format)
|
||||
headless = options.headless or format_name == "pdf"
|
||||
debug(f"[_capture] Format: {format_name}, Headless: {headless}")
|
||||
|
||||
if format_name == "pdf" and not options.headless:
|
||||
warnings.append("pdf output requires headless Chromium; overriding headless mode")
|
||||
browser = playwright.chromium.launch(
|
||||
@@ -413,11 +392,14 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
||||
log("Attempting platform-specific content capture...", flush=True)
|
||||
try:
|
||||
_platform_preprocess(options.url, page, warnings)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
debug(f"[_capture] Platform preprocess failed: {e}")
|
||||
pass
|
||||
selectors = list(options.target_selectors or [])
|
||||
if not selectors:
|
||||
selectors = _selectors_for_url(options.url)
|
||||
|
||||
debug(f"[_capture] Trying selectors: {selectors}")
|
||||
for sel in selectors:
|
||||
try:
|
||||
log(f"Trying selector: {sel}", flush=True)
|
||||
@@ -466,6 +448,7 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
||||
page.screenshot(**screenshot_kwargs)
|
||||
log(f"Screenshot saved to {destination}", flush=True)
|
||||
except Exception as exc:
|
||||
debug(f"[_capture] Exception: {exc}")
|
||||
raise ScreenshotError(f"Failed to capture screenshot: {exc}") from exc
|
||||
finally:
|
||||
log("Cleaning up browser resources...", flush=True)
|
||||
@@ -483,20 +466,22 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
||||
|
||||
def _capture_screenshot(options: ScreenshotOptions) -> ScreenshotResult:
|
||||
"""Capture a screenshot for the given options."""
|
||||
debug(f"[_capture_screenshot] Preparing capture for {options.url}")
|
||||
destination = _prepare_output_path(options)
|
||||
warnings: List[str] = []
|
||||
_capture_with_playwright(options, destination, warnings)
|
||||
_capture(options, destination, warnings)
|
||||
|
||||
known_urls = _unique_preserve_order([options.url, *options.known_urls])
|
||||
known_urls = unique_preserve_order([options.url, *options.known_urls])
|
||||
archive_urls: List[str] = []
|
||||
if options.archive:
|
||||
debug(f"[_capture_screenshot] Archiving enabled for {options.url}")
|
||||
archives, archive_warnings = _archive_url(options.url, options.archive_timeout)
|
||||
archive_urls.extend(archives)
|
||||
warnings.extend(archive_warnings)
|
||||
if archives:
|
||||
known_urls = _unique_preserve_order([*known_urls, *archives])
|
||||
known_urls = unique_preserve_order([*known_urls, *archives])
|
||||
|
||||
applied_tags = _unique_preserve_order(list(tag for tag in options.tags if tag.strip()))
|
||||
applied_tags = unique_preserve_order(list(tag for tag in options.tags if tag.strip()))
|
||||
|
||||
return ScreenshotResult(
|
||||
path=destination,
|
||||
@@ -530,6 +515,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""
|
||||
from ._shared import parse_cmdlet_args
|
||||
|
||||
debug(f"[_run] screen-shot invoked with args: {args}")
|
||||
|
||||
# Help check
|
||||
try:
|
||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||
@@ -581,6 +568,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log(f"No URLs to process for screen-shot cmdlet", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
debug(f"[_run] URLs to process: {urls_to_process}")
|
||||
|
||||
# ========================================================================
|
||||
# OUTPUT DIRECTORY RESOLUTION - Priority chain
|
||||
# ========================================================================
|
||||
@@ -617,7 +606,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
screenshot_dir = Path.home() / "Videos"
|
||||
log(f"[screen_shot] Using default directory: {screenshot_dir}", flush=True)
|
||||
|
||||
_ensure_directory(screenshot_dir)
|
||||
ensure_directory(screenshot_dir)
|
||||
|
||||
# ========================================================================
|
||||
# PREPARE SCREENSHOT OPTIONS
|
||||
|
||||
@@ -249,6 +249,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
try:
|
||||
results_list = []
|
||||
import result_table
|
||||
import importlib
|
||||
importlib.reload(result_table)
|
||||
from result_table import ResultTable
|
||||
|
||||
# Create ResultTable for display
|
||||
table_title = f"Search: {query}"
|
||||
if provider_name:
|
||||
table_title += f" [{provider_name}]"
|
||||
elif storage_backend:
|
||||
table_title += f" [{storage_backend}]"
|
||||
|
||||
table = ResultTable(table_title)
|
||||
table.set_source_command("search-file", args_list)
|
||||
|
||||
# Try to search using provider (libgen, soulseek, debrid, openlibrary)
|
||||
if provider_name:
|
||||
@@ -264,10 +278,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
debug(f"[search_file] Provider search returned {len(search_result)} results")
|
||||
|
||||
for item in search_result:
|
||||
# Add to table
|
||||
table.add_result(item)
|
||||
|
||||
# Emit to pipeline
|
||||
item_dict = item.to_dict()
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
|
||||
# Set the result table in context for TUI/CLI display
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
|
||||
debug(f"[search_file] Emitted {len(results_list)} results")
|
||||
|
||||
# Write results to worker stdout
|
||||
@@ -316,6 +337,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Emit results and collect for workers table
|
||||
if results:
|
||||
for item in results:
|
||||
# Add to table
|
||||
table.add_result(item)
|
||||
|
||||
if isinstance(item, dict):
|
||||
normalized = _ensure_storage_columns(item)
|
||||
results_list.append(normalized)
|
||||
@@ -329,6 +353,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
results_list.append(item_dict)
|
||||
ctx.emit(item_dict)
|
||||
|
||||
# Set the result table in context for TUI/CLI display
|
||||
ctx.set_last_result_table(table, results_list)
|
||||
|
||||
# Write results to worker stdout
|
||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user