Compare commits
10 Commits
47b8e5f80c
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9eff65d1af | ||
|
|
e9b505e609 | ||
|
|
935ce303d0 | ||
|
|
d1f08216a2 | ||
|
|
f6ce155985 | ||
|
|
4187b778d2 | ||
| caaefb5910 | |||
| f5d7e7dac5 | |||
| 9b3d4c280b | |||
| 8d36b87d6e |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,6 @@ __pycache__/
|
|||||||
config.json
|
config.json
|
||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
@@ -217,3 +216,4 @@ luac.out
|
|||||||
*.hex
|
*.hex
|
||||||
|
|
||||||
|
|
||||||
|
config.json
|
||||||
|
|||||||
183
CLI.py
183
CLI.py
@@ -30,12 +30,16 @@ try:
|
|||||||
from prompt_toolkit import PromptSession
|
from prompt_toolkit import PromptSession
|
||||||
from prompt_toolkit.completion import Completer, Completion
|
from prompt_toolkit.completion import Completer, Completion
|
||||||
from prompt_toolkit.document import Document
|
from prompt_toolkit.document import Document
|
||||||
|
from prompt_toolkit.lexers import Lexer
|
||||||
|
from prompt_toolkit.styles import Style
|
||||||
PROMPT_TOOLKIT_AVAILABLE = True
|
PROMPT_TOOLKIT_AVAILABLE = True
|
||||||
except ImportError: # pragma: no cover - optional dependency
|
except ImportError: # pragma: no cover - optional dependency
|
||||||
PromptSession = None # type: ignore
|
PromptSession = None # type: ignore
|
||||||
Completer = None # type: ignore
|
Completer = None # type: ignore
|
||||||
Completion = None # type: ignore
|
Completion = None # type: ignore
|
||||||
Document = None # type: ignore
|
Document = None # type: ignore
|
||||||
|
Lexer = None # type: ignore
|
||||||
|
Style = None # type: ignore
|
||||||
PROMPT_TOOLKIT_AVAILABLE = False
|
PROMPT_TOOLKIT_AVAILABLE = False
|
||||||
|
|
||||||
|
|
||||||
@@ -236,6 +240,7 @@ def _close_cli_worker_manager() -> None:
|
|||||||
global _CLI_WORKER_MANAGER
|
global _CLI_WORKER_MANAGER
|
||||||
if _CLI_WORKER_MANAGER:
|
if _CLI_WORKER_MANAGER:
|
||||||
try:
|
try:
|
||||||
|
# print("[CLI] Closing worker manager...", file=sys.stderr)
|
||||||
_CLI_WORKER_MANAGER.close()
|
_CLI_WORKER_MANAGER.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@@ -269,7 +274,7 @@ def _ensure_worker_manager(config: Dict[str, Any]) -> Optional[WorkerManagerType
|
|||||||
_CLI_WORKER_MANAGER.close()
|
_CLI_WORKER_MANAGER.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
_CLI_WORKER_MANAGER = WorkerManager(resolved_root, auto_refresh_interval=0)
|
_CLI_WORKER_MANAGER = WorkerManager(resolved_root, auto_refresh_interval=0.5)
|
||||||
manager = _CLI_WORKER_MANAGER
|
manager = _CLI_WORKER_MANAGER
|
||||||
config['_worker_manager'] = manager
|
config['_worker_manager'] = manager
|
||||||
if manager and not _CLI_ORPHAN_CLEANUP_DONE:
|
if manager and not _CLI_ORPHAN_CLEANUP_DONE:
|
||||||
@@ -531,6 +536,46 @@ if (
|
|||||||
async def get_completions_async(self, document: Document, complete_event): # type: ignore[override]
|
async def get_completions_async(self, document: Document, complete_event): # type: ignore[override]
|
||||||
for completion in self.get_completions(document, complete_event):
|
for completion in self.get_completions(document, complete_event):
|
||||||
yield completion
|
yield completion
|
||||||
|
|
||||||
|
class MedeiaLexer(Lexer):
|
||||||
|
def lex_document(self, document):
|
||||||
|
def get_line(lineno):
|
||||||
|
line = document.lines[lineno]
|
||||||
|
tokens = []
|
||||||
|
|
||||||
|
import re
|
||||||
|
# Match: Whitespace, Pipe, Quoted string, or Word
|
||||||
|
pattern = re.compile(r'''
|
||||||
|
(\s+) | # 1. Whitespace
|
||||||
|
(\|) | # 2. Pipe
|
||||||
|
("(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*') | # 3. Quoted string
|
||||||
|
([^\s\|]+) # 4. Word
|
||||||
|
''', re.VERBOSE)
|
||||||
|
|
||||||
|
is_cmdlet = True
|
||||||
|
|
||||||
|
for match in pattern.finditer(line):
|
||||||
|
ws, pipe, quote, word = match.groups()
|
||||||
|
|
||||||
|
if ws:
|
||||||
|
tokens.append(('', ws))
|
||||||
|
elif pipe:
|
||||||
|
tokens.append(('class:pipe', pipe))
|
||||||
|
is_cmdlet = True
|
||||||
|
elif quote:
|
||||||
|
tokens.append(('class:string', quote))
|
||||||
|
is_cmdlet = False
|
||||||
|
elif word:
|
||||||
|
if is_cmdlet:
|
||||||
|
tokens.append(('class:cmdlet', word))
|
||||||
|
is_cmdlet = False
|
||||||
|
elif word.startswith('-'):
|
||||||
|
tokens.append(('class:argument', word))
|
||||||
|
else:
|
||||||
|
tokens.append(('class:value', word))
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
return get_line
|
||||||
else: # pragma: no cover - prompt toolkit unavailable
|
else: # pragma: no cover - prompt toolkit unavailable
|
||||||
CmdletCompleter = None # type: ignore[assignment]
|
CmdletCompleter = None # type: ignore[assignment]
|
||||||
|
|
||||||
@@ -542,17 +587,72 @@ def _create_cmdlet_cli():
|
|||||||
|
|
||||||
app = typer.Typer(help="Medeia-Macina CLI")
|
app = typer.Typer(help="Medeia-Macina CLI")
|
||||||
|
|
||||||
|
@app.command("pipeline")
|
||||||
|
def pipeline(
|
||||||
|
command: str = typer.Option(..., "--pipeline", "-p", help="Pipeline command string to execute"),
|
||||||
|
seeds_json: Optional[str] = typer.Option(None, "--seeds-json", "-s", help="JSON string of seed items")
|
||||||
|
):
|
||||||
|
"""Execute a pipeline command non-interactively."""
|
||||||
|
import shlex
|
||||||
|
import json
|
||||||
|
import pipeline as ctx
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
config = _load_cli_config()
|
||||||
|
|
||||||
|
# Initialize debug logging if enabled
|
||||||
|
if config:
|
||||||
|
from helper.logger import set_debug
|
||||||
|
set_debug(config.get("debug", False))
|
||||||
|
|
||||||
|
# Handle seeds if provided
|
||||||
|
if seeds_json:
|
||||||
|
try:
|
||||||
|
seeds = json.loads(seeds_json)
|
||||||
|
# If seeds is a list, use it directly. If single item, wrap in list.
|
||||||
|
if not isinstance(seeds, list):
|
||||||
|
seeds = [seeds]
|
||||||
|
|
||||||
|
# Set seeds as the result of a "virtual" previous stage
|
||||||
|
# This allows the first command in the pipeline to receive them as input
|
||||||
|
ctx.set_last_result_items_only(seeds)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error parsing seeds JSON: {e}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
tokens = shlex.split(command)
|
||||||
|
except ValueError:
|
||||||
|
tokens = command.split()
|
||||||
|
|
||||||
|
if not tokens:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Execute
|
||||||
|
_execute_pipeline(tokens)
|
||||||
|
|
||||||
@app.command("repl")
|
@app.command("repl")
|
||||||
def repl():
|
def repl():
|
||||||
"""Start interactive REPL for cmdlets with autocomplete."""
|
"""Start interactive REPL for cmdlets with autocomplete."""
|
||||||
banner = """
|
banner = """
|
||||||
Medeia-Macina
|
Medeia-Macina
|
||||||
=======================================
|
=====================
|
||||||
Commands: help | exit | <cmdlet> --help
|
|123456789|ABCDEFGHI|
|
||||||
Example: search-file --help
|
|246813579|JKLMNOPQR|
|
||||||
|
|369369369|STUVWXYZ0|
|
||||||
|
|483726159|ABCDEFGHI|
|
||||||
|
|516273849|JKLMNOPQR|
|
||||||
|
|639639639|STUVWXYZ0|
|
||||||
|
|753186429|ABCDEFGHI|
|
||||||
|
|876543219|JKLMNOPQR|
|
||||||
|
|999999999|STUVWXYZ0|
|
||||||
|
=====================
|
||||||
"""
|
"""
|
||||||
print(banner)
|
print(banner)
|
||||||
|
|
||||||
|
# Configurable prompt
|
||||||
|
prompt_text = ">>>|"
|
||||||
|
|
||||||
# Pre-acquire Hydrus session key at startup (like hub-ui does)
|
# Pre-acquire Hydrus session key at startup (like hub-ui does)
|
||||||
try:
|
try:
|
||||||
config = _load_cli_config()
|
config = _load_cli_config()
|
||||||
@@ -577,27 +677,44 @@ Example: search-file --help
|
|||||||
|
|
||||||
# Check MPV availability at startup
|
# Check MPV availability at startup
|
||||||
try:
|
try:
|
||||||
from hydrus_health_check import check_mpv_availability
|
from hydrus_health_check import check_mpv_availability, initialize_matrix_health_check, initialize_hydrus_health_check
|
||||||
check_mpv_availability()
|
check_mpv_availability()
|
||||||
|
initialize_hydrus_health_check(config)
|
||||||
|
initialize_matrix_health_check(config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
debug(f"⚠ Could not check MPV availability: {e}")
|
debug(f"⚠ Could not check service availability: {e}")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass # Silently ignore if config loading fails
|
pass # Silently ignore if config loading fails
|
||||||
|
|
||||||
if PROMPT_TOOLKIT_AVAILABLE and PromptSession is not None and CmdletCompleter is not None:
|
if PROMPT_TOOLKIT_AVAILABLE and PromptSession is not None and CmdletCompleter is not None:
|
||||||
completer = CmdletCompleter()
|
completer = CmdletCompleter()
|
||||||
session = PromptSession(completer=cast(Any, completer))
|
|
||||||
|
|
||||||
def get_input(prompt: str = ">>>|") -> str:
|
# Define style for syntax highlighting
|
||||||
|
style = Style.from_dict({
|
||||||
|
'cmdlet': '#ffffff', # white
|
||||||
|
'argument': '#3b8eea', # blue-ish
|
||||||
|
'value': "#9a3209", # red-ish
|
||||||
|
'string': "#6d0d93", # purple
|
||||||
|
'pipe': '#4caf50', # green
|
||||||
|
})
|
||||||
|
|
||||||
|
session = PromptSession(
|
||||||
|
completer=cast(Any, completer),
|
||||||
|
lexer=MedeiaLexer(),
|
||||||
|
style=style
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_input(prompt: str = prompt_text) -> str:
|
||||||
return session.prompt(prompt)
|
return session.prompt(prompt)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
def get_input(prompt: str = ">>>|") -> str:
|
def get_input(prompt: str = prompt_text) -> str:
|
||||||
return input(prompt)
|
return input(prompt)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
print("#-------------------------------------------------------------------------#")
|
||||||
try:
|
try:
|
||||||
user_input = get_input(">>>|").strip()
|
user_input = get_input(prompt_text).strip()
|
||||||
except (EOFError, KeyboardInterrupt):
|
except (EOFError, KeyboardInterrupt):
|
||||||
print("\nGoodbye!")
|
print("\nGoodbye!")
|
||||||
break
|
break
|
||||||
@@ -645,6 +762,7 @@ Example: search-file --help
|
|||||||
if last_table is None:
|
if last_table is None:
|
||||||
last_table = ctx.get_last_result_table()
|
last_table = ctx.get_last_result_table()
|
||||||
|
|
||||||
|
|
||||||
if last_table:
|
if last_table:
|
||||||
print()
|
print()
|
||||||
# Also update current stage table so @N expansion works correctly
|
# Also update current stage table so @N expansion works correctly
|
||||||
@@ -683,6 +801,17 @@ Example: search-file --help
|
|||||||
if pipeline_ctx_ref:
|
if pipeline_ctx_ref:
|
||||||
pipeline_ctx_ref.clear_current_command_text()
|
pipeline_ctx_ref.clear_current_command_text()
|
||||||
|
|
||||||
|
@app.callback(invoke_without_command=True)
|
||||||
|
def main_callback(ctx: typer.Context):
|
||||||
|
"""
|
||||||
|
Medeia-Macina CLI entry point.
|
||||||
|
If no command is provided, starts the interactive REPL.
|
||||||
|
"""
|
||||||
|
# Check if a subcommand is invoked
|
||||||
|
# Note: ctx.invoked_subcommand is None if no command was passed
|
||||||
|
if ctx.invoked_subcommand is None:
|
||||||
|
repl()
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
@@ -779,10 +908,10 @@ def _execute_pipeline(tokens: list):
|
|||||||
else:
|
else:
|
||||||
# Try command-based expansion first if we have source command info
|
# Try command-based expansion first if we have source command info
|
||||||
command_expanded = False
|
command_expanded = False
|
||||||
|
selected_row_args = []
|
||||||
|
|
||||||
if source_cmd:
|
if source_cmd:
|
||||||
# Try to find row args for the selected indices
|
# Try to find row args for the selected indices
|
||||||
selected_row_args = []
|
|
||||||
for idx in first_stage_selection_indices:
|
for idx in first_stage_selection_indices:
|
||||||
row_args = ctx.get_current_stage_table_row_selection_args(idx)
|
row_args = ctx.get_current_stage_table_row_selection_args(idx)
|
||||||
if row_args:
|
if row_args:
|
||||||
@@ -827,32 +956,9 @@ def _execute_pipeline(tokens: list):
|
|||||||
source_args = ctx.get_last_result_table_source_args()
|
source_args = ctx.get_last_result_table_source_args()
|
||||||
|
|
||||||
if source_cmd == 'search-file' and source_args and 'youtube' in source_args:
|
if source_cmd == 'search-file' and source_args and 'youtube' in source_args:
|
||||||
|
# Only auto-pipe if no other stages follow (stages is empty because we popped the selection)
|
||||||
|
if not stages:
|
||||||
print(f"Auto-piping YouTube selection to .pipe")
|
print(f"Auto-piping YouTube selection to .pipe")
|
||||||
# We can't modify stages here easily as we are outside the loop or before it?
|
|
||||||
# Actually, this block runs BEFORE the loop if stages[0] is a selection.
|
|
||||||
# But wait, the loop iterates over stages.
|
|
||||||
# If we are here, it means we handled the selection by filtering `piped_result`.
|
|
||||||
# The loop will then execute stages starting from 0?
|
|
||||||
# No, `_execute_pipeline` logic is complex.
|
|
||||||
|
|
||||||
# Let's look at where this block is.
|
|
||||||
# It is inside `_execute_pipeline`.
|
|
||||||
# It runs if `first_stage_selection_indices` is set (meaning stages[0] was a selection).
|
|
||||||
# And `command_expanded` is False (meaning we didn't replace stages[0] with a command).
|
|
||||||
|
|
||||||
# If we are here, `piped_result` holds the selected item(s).
|
|
||||||
# The loop below iterates `for stage_index, stage_tokens in enumerate(stages):`
|
|
||||||
# But we removed the first stage from `stages`? No.
|
|
||||||
|
|
||||||
# Wait, let's check how `first_stage_selection_indices` is used.
|
|
||||||
# It seems `stages` is modified earlier?
|
|
||||||
# "if stages and stages[0] and stages[0][0].startswith('@'): ... stages.pop(0)"
|
|
||||||
|
|
||||||
# Yes, lines 750-760 (approx) pop the first stage if it is a selection.
|
|
||||||
# So `stages` now contains the REST of the pipeline.
|
|
||||||
# If user typed just `@1`, `stages` is now empty `[]`.
|
|
||||||
|
|
||||||
# So if we want to pipe to `.pipe`, we should append `.pipe` to `stages`.
|
|
||||||
stages.append(['.pipe'])
|
stages.append(['.pipe'])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1313,7 +1419,8 @@ def _execute_cmdlet(cmd_name: str, args: list):
|
|||||||
|
|
||||||
# Special case: if this was a youtube search, print a hint about auto-piping
|
# Special case: if this was a youtube search, print a hint about auto-piping
|
||||||
if cmd_name == 'search-file' and filtered_args and 'youtube' in filtered_args:
|
if cmd_name == 'search-file' and filtered_args and 'youtube' in filtered_args:
|
||||||
print("\n[Hint] Type @N to play a video in MPV (e.g. @1)")
|
# print("\n[Hint] Type @N to play a video in MPV (e.g. @1)")
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# Fallback to raw output if ResultTable not available
|
# Fallback to raw output if ResultTable not available
|
||||||
for emitted in pipeline_ctx.emits:
|
for emitted in pipeline_ctx.emits:
|
||||||
|
|||||||
121
LUA/main.lua
Normal file
121
LUA/main.lua
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
local mp = require 'mp'
|
||||||
|
local utils = require 'mp.utils'
|
||||||
|
local msg = require 'mp.msg'
|
||||||
|
|
||||||
|
local M = {}
|
||||||
|
|
||||||
|
-- Configuration
|
||||||
|
local opts = {
|
||||||
|
python_path = "python",
|
||||||
|
cli_path = nil -- Will be auto-detected if nil
|
||||||
|
}
|
||||||
|
|
||||||
|
-- Detect CLI path
|
||||||
|
local script_dir = mp.get_script_directory()
|
||||||
|
if not opts.cli_path then
|
||||||
|
-- Assuming the structure is repo/LUA/script.lua and repo/CLI.py
|
||||||
|
-- We need to go up one level
|
||||||
|
local parent_dir = script_dir:match("(.*)[/\\]")
|
||||||
|
if parent_dir then
|
||||||
|
opts.cli_path = parent_dir .. "/CLI.py"
|
||||||
|
else
|
||||||
|
opts.cli_path = "CLI.py" -- Fallback
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Helper to run pipeline
|
||||||
|
function M.run_pipeline(pipeline_cmd, seeds)
|
||||||
|
local args = {opts.python_path, opts.cli_path, "pipeline", pipeline_cmd}
|
||||||
|
|
||||||
|
if seeds then
|
||||||
|
local seeds_json = utils.format_json(seeds)
|
||||||
|
table.insert(args, "--seeds")
|
||||||
|
table.insert(args, seeds_json)
|
||||||
|
end
|
||||||
|
|
||||||
|
msg.info("Running pipeline: " .. pipeline_cmd)
|
||||||
|
local res = utils.subprocess({
|
||||||
|
args = args,
|
||||||
|
cancellable = false,
|
||||||
|
})
|
||||||
|
|
||||||
|
if res.status ~= 0 then
|
||||||
|
msg.error("Pipeline error: " .. (res.stderr or "unknown"))
|
||||||
|
mp.osd_message("Error: " .. (res.stderr or "unknown"), 5)
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
return res.stdout
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Helper to run pipeline and parse JSON output
|
||||||
|
function M.run_pipeline_json(pipeline_cmd, seeds)
|
||||||
|
-- Append | output-json if not present
|
||||||
|
if not pipeline_cmd:match("output%-json$") then
|
||||||
|
pipeline_cmd = pipeline_cmd .. " | output-json"
|
||||||
|
end
|
||||||
|
|
||||||
|
local output = M.run_pipeline(pipeline_cmd, seeds)
|
||||||
|
if output then
|
||||||
|
local ok, data = pcall(utils.parse_json, output)
|
||||||
|
if ok then
|
||||||
|
return data
|
||||||
|
else
|
||||||
|
msg.error("Failed to parse JSON: " .. output)
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Command: Get info for current file
|
||||||
|
function M.get_file_info()
|
||||||
|
local path = mp.get_property("path")
|
||||||
|
if not path then return end
|
||||||
|
|
||||||
|
-- We can pass the path as a seed item
|
||||||
|
local seed = {{path = path}}
|
||||||
|
|
||||||
|
-- Run pipeline: get-metadata
|
||||||
|
local data = M.run_pipeline_json("get-metadata", seed)
|
||||||
|
|
||||||
|
if data then
|
||||||
|
-- Display metadata
|
||||||
|
msg.info("Metadata: " .. utils.format_json(data))
|
||||||
|
mp.osd_message("Metadata loaded (check console)", 3)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Command: Delete current file
|
||||||
|
function M.delete_current_file()
|
||||||
|
local path = mp.get_property("path")
|
||||||
|
if not path then return end
|
||||||
|
|
||||||
|
local seed = {{path = path}}
|
||||||
|
|
||||||
|
M.run_pipeline("delete-file", seed)
|
||||||
|
mp.osd_message("File deleted", 3)
|
||||||
|
mp.command("playlist-next")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Menu integration with UOSC
|
||||||
|
function M.show_menu()
|
||||||
|
local menu_data = {
|
||||||
|
title = "Medios Macina",
|
||||||
|
items = {
|
||||||
|
{ title = "Get Metadata", value = "script-binding medios-info", hint = "Ctrl+i" },
|
||||||
|
{ title = "Delete File", value = "script-binding medios-delete", hint = "Ctrl+Del" },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
local json = utils.format_json(menu_data)
|
||||||
|
mp.commandv('script-message-to', 'uosc', 'open-menu', json)
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Keybindings
|
||||||
|
mp.add_key_binding("m", "medios-menu", M.show_menu)
|
||||||
|
mp.add_key_binding("mbtn_right", "medios-menu-right-click", M.show_menu)
|
||||||
|
mp.add_key_binding("ctrl+i", "medios-info", M.get_file_info)
|
||||||
|
mp.add_key_binding("ctrl+del", "medios-delete", M.delete_current_file)
|
||||||
|
|
||||||
|
return M
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
# Medeia-Macina
|
|
||||||
|
|
||||||
A powerful CLI media management and search platform integrating local files, Hydrus, torrents, books, and P2P networks.
|
|
||||||
|
|
||||||
## Key Features
|
|
||||||
* **Unified Search**: Search across Local, Hydrus, LibGen, Soulseek, and Debrid.
|
|
||||||
* **Pipeline Architecture**: Chain commands like PowerShell (e.g., `search | filter | download`).
|
|
||||||
* **Smart Selection**: Use `@N` syntax to interact with results.
|
|
||||||
* **Metadata Management**: Tagging, notes, and relationships.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
1. Install Python 3.9+ and [Deno](https://deno.com/) (for YouTube support).
|
|
||||||
2. Install dependencies: `pip install -r requirements.txt`
|
|
||||||
3. Run the CLI: `python CLI.py`
|
|
||||||
|
|
||||||
## Command Examples
|
|
||||||
|
|
||||||
### Search & Download
|
|
||||||
```powershell
|
|
||||||
# Search and download the first result
|
|
||||||
search-file "daughter" | @1 | download-data
|
|
||||||
|
|
||||||
# Search specific provider and download
|
|
||||||
search-file -provider libgen "dune" | @1 | download-data
|
|
||||||
|
|
||||||
# Download YouTube video (auto-probes formats)
|
|
||||||
download-data "https://youtube.com/watch?v=..."
|
|
||||||
# Select format #2 from the list
|
|
||||||
@2 | download-data
|
|
||||||
```
|
|
||||||
|
|
||||||
### File Management
|
|
||||||
```powershell
|
|
||||||
# Add file to Hydrus
|
|
||||||
add-file -path "C:\Videos\movie.mp4" -storage hydrus
|
|
||||||
|
|
||||||
# Upload to 0x0.st and associate URL with Hydrus file
|
|
||||||
search-file "my_video" | @1 | add-file -provider 0x0
|
|
||||||
|
|
||||||
# Add tags to a file
|
|
||||||
search-file "video" | @1 | add-tag "creator:someone, character:hero"
|
|
||||||
|
|
||||||
# Use tag lists (from helper/adjective.json)
|
|
||||||
@1 | add-tag "{gnostic}"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Metadata & Notes
|
|
||||||
```powershell
|
|
||||||
# Add a note
|
|
||||||
search-file "doc" | @1 | add-note "comment" "This is important"
|
|
||||||
|
|
||||||
# Get tags
|
|
||||||
search-file "image" | @1 | get-tag
|
|
||||||
```
|
|
||||||
|
|
||||||
### Pipeline Syntax
|
|
||||||
* `|` : Pipe results from one command to another.
|
|
||||||
* `@N` : Select the Nth item from the previous result (e.g., `@1`).
|
|
||||||
* `@N-M` : Select a range (e.g., `@1-5`).
|
|
||||||
* `@{1,3,5}` : Select specific items.
|
|
||||||
* `@*` : Select all items.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
Edit `config.json` to set API keys (AllDebrid, OpenAI), storage paths, and Hydrus credentials.
|
|
||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
media management
|
media management
|
||||||
|
|
||||||
|
python cli.py
|
||||||
|
|
||||||
1. search-file -provider youtube "something in the way"
|
1. search-file -provider youtube "something in the way"
|
||||||
|
|
||||||
2. @1
|
2. @1
|
||||||
|
|||||||
@@ -68,37 +68,6 @@ def group_tags_by_namespace(tags: Sequence[str]) -> Dict[str, List[str]]:
|
|||||||
return grouped
|
return grouped
|
||||||
|
|
||||||
|
|
||||||
def build_metadata_snapshot(file_path: Path) -> Dict[str, Any]:
|
|
||||||
"""Load any available sidecar metadata for the selected file."""
|
|
||||||
|
|
||||||
snapshot: Dict[str, Any] = {
|
|
||||||
"file": str(file_path),
|
|
||||||
"tags": group_tags_by_namespace(load_tags(file_path)),
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
sidecar = metadata._derive_sidecar_path(file_path)
|
|
||||||
if sidecar.is_file():
|
|
||||||
title, tags, notes = metadata._read_sidecar_metadata(sidecar)
|
|
||||||
snapshot["sidecar"] = {
|
|
||||||
"title": title,
|
|
||||||
"tags": group_tags_by_namespace(tags),
|
|
||||||
"notes": notes,
|
|
||||||
}
|
|
||||||
except Exception:
|
|
||||||
snapshot["sidecar"] = None
|
|
||||||
|
|
||||||
return snapshot
|
|
||||||
|
|
||||||
|
|
||||||
def summarize_result(result: Dict[str, Any]) -> str:
|
|
||||||
"""Build a one-line summary for a pipeline result row."""
|
|
||||||
|
|
||||||
title = result.get("title") or result.get("identifier") or result.get("file_path")
|
|
||||||
source = result.get("source") or result.get("cmdlet") or "result"
|
|
||||||
return f"{source}: {title}" if title else source
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_tags(tags: Iterable[str]) -> List[str]:
|
def normalize_tags(tags: Iterable[str]) -> List[str]:
|
||||||
"""Expose metadata.normalize_tags for callers that imported the old helper."""
|
"""Expose metadata.normalize_tags for callers that imported the old helper."""
|
||||||
|
|
||||||
|
|||||||
@@ -69,33 +69,34 @@ class ExportModal(ModalScreen):
|
|||||||
"""
|
"""
|
||||||
ext_lower = ext.lower() if ext else ''
|
ext_lower = ext.lower() if ext else ''
|
||||||
|
|
||||||
# Audio formats
|
from helper.utils_constant import mime_maps
|
||||||
audio_exts = {'.mp3', '.flac', '.wav', '.aac', '.ogg', '.m4a', '.wma', '.opus', '.mka'}
|
|
||||||
audio_formats = [("MKA", "mka"), ("MP3", "mp3"), ("M4A", "m4a"), ("FLAC", "flac"), ("WAV", "wav"), ("AAC", "aac"), ("OGG", "ogg"), ("Opus", "opus")]
|
|
||||||
|
|
||||||
# Video formats (can have audio too)
|
found_type = "unknown"
|
||||||
video_exts = {'.mp4', '.mkv', '.webm', '.avi', '.mov', '.flv', '.wmv', '.m4v', '.ts', '.mpg', '.mpeg'}
|
|
||||||
video_formats = [("MP4", "mp4"), ("MKV", "mkv"), ("WebM", "webm"), ("AVI", "avi"), ("MOV", "mov")]
|
|
||||||
|
|
||||||
# Image formats
|
# Find type based on extension
|
||||||
image_exts = {'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff', '.ico'}
|
for category, formats in mime_maps.items():
|
||||||
image_formats = [("JPG", "jpg"), ("PNG", "png"), ("WebP", "webp"), ("GIF", "gif"), ("BMP", "bmp")]
|
for fmt_key, fmt_info in formats.items():
|
||||||
|
if fmt_info.get("ext") == ext_lower:
|
||||||
|
found_type = category
|
||||||
|
break
|
||||||
|
if found_type != "unknown":
|
||||||
|
break
|
||||||
|
|
||||||
# Document formats - no conversion for now
|
# Build format options for the found type
|
||||||
document_exts = {'.pdf', '.epub', '.txt', '.docx', '.doc', '.rtf', '.md', '.html', '.mobi', '.cbz', '.cbr'}
|
format_options = []
|
||||||
document_formats = []
|
|
||||||
|
|
||||||
if ext_lower in audio_exts:
|
# If unknown, fallback to audio (matching legacy behavior)
|
||||||
return ('audio', audio_formats)
|
target_type = found_type if found_type in mime_maps else "audio"
|
||||||
elif ext_lower in video_exts:
|
|
||||||
return ('video', video_formats)
|
if target_type in mime_maps:
|
||||||
elif ext_lower in image_exts:
|
# Sort formats alphabetically
|
||||||
return ('image', image_formats)
|
sorted_formats = sorted(mime_maps[target_type].items())
|
||||||
elif ext_lower in document_exts:
|
for fmt_key, fmt_info in sorted_formats:
|
||||||
return ('document', document_formats)
|
label = fmt_key.upper()
|
||||||
else:
|
value = fmt_key
|
||||||
# Default to audio if unknown
|
format_options.append((label, value))
|
||||||
return ('unknown', audio_formats)
|
|
||||||
|
return (target_type, format_options)
|
||||||
|
|
||||||
def _get_library_options(self) -> list:
|
def _get_library_options(self) -> list:
|
||||||
"""Get available library options from config.json."""
|
"""Get available library options from config.json."""
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ import asyncio
|
|||||||
# Add parent directory to path for imports
|
# Add parent directory to path for imports
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
from config import load_config
|
from config import load_config
|
||||||
|
from result_table import ResultTable
|
||||||
|
from helper.search_provider import get_provider
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -49,7 +51,8 @@ class SearchModal(ModalScreen):
|
|||||||
self.results_table: Optional[DataTable] = None
|
self.results_table: Optional[DataTable] = None
|
||||||
self.tags_textarea: Optional[TextArea] = None
|
self.tags_textarea: Optional[TextArea] = None
|
||||||
self.library_source_select: Optional[Select] = None
|
self.library_source_select: Optional[Select] = None
|
||||||
self.current_results: List[dict] = []
|
self.current_results: List[Any] = [] # List of SearchResult objects
|
||||||
|
self.current_result_table: Optional[ResultTable] = None
|
||||||
self.is_searching = False
|
self.is_searching = False
|
||||||
self.current_worker = None # Track worker for search operations
|
self.current_worker = None # Track worker for search operations
|
||||||
|
|
||||||
@@ -125,124 +128,6 @@ class SearchModal(ModalScreen):
|
|||||||
# Focus on search input
|
# Focus on search input
|
||||||
self.search_input.focus()
|
self.search_input.focus()
|
||||||
|
|
||||||
async def _search_openlibrary(self, query: str) -> List[dict]:
|
|
||||||
"""Search OpenLibrary for books."""
|
|
||||||
try:
|
|
||||||
from helper.search_provider import get_provider
|
|
||||||
|
|
||||||
logger.info(f"[search-modal] Searching OpenLibrary for: {query}")
|
|
||||||
|
|
||||||
# Get the OpenLibrary provider (now has smart search built-in)
|
|
||||||
provider = get_provider("openlibrary")
|
|
||||||
if not provider:
|
|
||||||
logger.error("[search-modal] OpenLibrary provider not available")
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Search using the provider (smart search is now default)
|
|
||||||
search_results = provider.search(query, limit=20)
|
|
||||||
|
|
||||||
formatted_results = []
|
|
||||||
for result in search_results:
|
|
||||||
# Extract metadata from SearchResult.full_metadata
|
|
||||||
metadata = result.full_metadata or {}
|
|
||||||
|
|
||||||
formatted_results.append({
|
|
||||||
"title": result.title,
|
|
||||||
"author": ", ".join(metadata.get("authors", [])) if metadata.get("authors") else "Unknown",
|
|
||||||
"year": metadata.get("year", ""),
|
|
||||||
"publisher": metadata.get("publisher", ""),
|
|
||||||
"isbn": metadata.get("isbn", ""),
|
|
||||||
"oclc": metadata.get("oclc", ""),
|
|
||||||
"lccn": metadata.get("lccn", ""),
|
|
||||||
"openlibrary_id": metadata.get("olid", ""),
|
|
||||||
"pages": metadata.get("pages", ""),
|
|
||||||
"language": metadata.get("language", ""),
|
|
||||||
"source": "openlibrary",
|
|
||||||
"columns": result.columns,
|
|
||||||
"raw_data": metadata
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(f"[search-modal] Found {len(formatted_results)} OpenLibrary results")
|
|
||||||
return formatted_results
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"[search-modal] OpenLibrary search error: {e}", exc_info=True)
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return []
|
|
||||||
|
|
||||||
async def _search_soulseek(self, query: str) -> List[dict]:
|
|
||||||
"""Search Soulseek for music with automatic worker tracking."""
|
|
||||||
try:
|
|
||||||
from helper.search_provider import get_provider
|
|
||||||
|
|
||||||
# Create worker for tracking
|
|
||||||
worker = None
|
|
||||||
if self.app_instance and hasattr(self.app_instance, 'create_worker'):
|
|
||||||
worker = self.app_instance.create_worker(
|
|
||||||
'soulseek',
|
|
||||||
title=f"Soulseek Search: {query[:40]}",
|
|
||||||
description=f"Searching P2P network for music"
|
|
||||||
)
|
|
||||||
self.current_worker = worker
|
|
||||||
|
|
||||||
if worker:
|
|
||||||
worker.log_step("Connecting to Soulseek peer network...")
|
|
||||||
|
|
||||||
logger.info(f"[search-modal] Searching Soulseek for: {query}")
|
|
||||||
provider = get_provider("soulseek")
|
|
||||||
search_results = provider.search(query, limit=20)
|
|
||||||
|
|
||||||
if worker:
|
|
||||||
worker.log_step(f"Search returned {len(search_results)} results")
|
|
||||||
|
|
||||||
logger.info(f"[search-modal] Found {len(search_results)} Soulseek results")
|
|
||||||
|
|
||||||
# Format results for display
|
|
||||||
formatted_results = []
|
|
||||||
for idx, result in enumerate(search_results):
|
|
||||||
metadata = result.full_metadata or {}
|
|
||||||
artist = metadata.get('artist', '')
|
|
||||||
album = metadata.get('album', '')
|
|
||||||
title = result.title
|
|
||||||
track_num = metadata.get('track_num', '')
|
|
||||||
size_bytes = result.size_bytes or 0
|
|
||||||
|
|
||||||
# Format size as human-readable
|
|
||||||
if size_bytes > 1024 * 1024:
|
|
||||||
size_str = f"{size_bytes / (1024 * 1024):.1f} MB"
|
|
||||||
elif size_bytes > 1024:
|
|
||||||
size_str = f"{size_bytes / 1024:.1f} KB"
|
|
||||||
else:
|
|
||||||
size_str = f"{size_bytes} B"
|
|
||||||
|
|
||||||
# Build columns for display
|
|
||||||
columns = [
|
|
||||||
("#", str(idx + 1)),
|
|
||||||
("Title", title[:50] if title else "Unknown"),
|
|
||||||
("Artist", artist[:30] if artist else "(no artist)"),
|
|
||||||
("Album", album[:30] if album else ""),
|
|
||||||
]
|
|
||||||
|
|
||||||
formatted_results.append({
|
|
||||||
"title": title if title else "Unknown",
|
|
||||||
"artist": artist if artist else "(no artist)",
|
|
||||||
"album": album,
|
|
||||||
"track": track_num,
|
|
||||||
"filesize": size_str,
|
|
||||||
"bitrate": "", # Not available in Soulseek results
|
|
||||||
"source": "soulseek",
|
|
||||||
"columns": columns,
|
|
||||||
"raw_data": result.to_dict()
|
|
||||||
})
|
|
||||||
|
|
||||||
return formatted_results
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"[search-modal] Soulseek search error: {e}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return []
|
|
||||||
|
|
||||||
async def _perform_search(self) -> None:
|
async def _perform_search(self) -> None:
|
||||||
"""Perform the actual search based on selected source."""
|
"""Perform the actual search based on selected source."""
|
||||||
if not self.search_input or not self.source_select or not self.results_table:
|
if not self.search_input or not self.source_select or not self.results_table:
|
||||||
@@ -257,87 +142,69 @@ class SearchModal(ModalScreen):
|
|||||||
source = self.source_select.value
|
source = self.source_select.value
|
||||||
|
|
||||||
# Clear existing results
|
# Clear existing results
|
||||||
self.results_table.clear()
|
self.results_table.clear(columns=True)
|
||||||
self.current_results = []
|
self.current_results = []
|
||||||
|
self.current_result_table = None
|
||||||
|
|
||||||
self.is_searching = True
|
self.is_searching = True
|
||||||
|
|
||||||
|
# Create worker for tracking
|
||||||
|
if self.app_instance and hasattr(self.app_instance, 'create_worker'):
|
||||||
|
self.current_worker = self.app_instance.create_worker(
|
||||||
|
source,
|
||||||
|
title=f"{source.capitalize()} Search: {query[:40]}",
|
||||||
|
description=f"Searching {source} for: {query}"
|
||||||
|
)
|
||||||
|
self.current_worker.log_step(f"Connecting to {source}...")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if source == "openlibrary":
|
provider = get_provider(source)
|
||||||
results = await self._search_openlibrary(query)
|
if not provider:
|
||||||
elif source == "soulseek":
|
logger.error(f"[search-modal] Provider not available: {source}")
|
||||||
results = await self._search_soulseek(query)
|
|
||||||
else:
|
|
||||||
logger.warning(f"[search-modal] Unknown source: {source}")
|
|
||||||
if self.current_worker:
|
if self.current_worker:
|
||||||
self.current_worker.finish("error", "Unknown search source")
|
self.current_worker.finish("error", f"Provider not available: {source}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.info(f"[search-modal] Searching {source} for: {query}")
|
||||||
|
results = provider.search(query, limit=20)
|
||||||
self.current_results = results
|
self.current_results = results
|
||||||
|
|
||||||
# Populate table with results
|
if self.current_worker:
|
||||||
if results:
|
self.current_worker.log_step(f"Found {len(results)} results")
|
||||||
# Check if first result has columns field
|
|
||||||
first_result = results[0]
|
|
||||||
if "columns" in first_result and first_result["columns"]:
|
|
||||||
# Use dynamic columns from result
|
|
||||||
# Clear existing columns and rebuild based on result columns
|
|
||||||
self.results_table.clear()
|
|
||||||
|
|
||||||
# Extract column headers from first result's columns field
|
# Create ResultTable
|
||||||
column_headers = [col[0] for col in first_result["columns"]]
|
table = ResultTable(f"Search Results: {query}")
|
||||||
|
for res in results:
|
||||||
# Remove existing columns (we'll readd them with the right headers)
|
row = table.add_row()
|
||||||
# Note: This is a workaround since Textual's DataTable doesn't support dynamic column management well
|
# Add columns from result.columns
|
||||||
# For now, we just use the dynamic column headers from the result
|
if res.columns:
|
||||||
logger.info(f"[search-modal] Using dynamic columns: {column_headers}")
|
for name, value in res.columns:
|
||||||
|
row.add_column(name, value)
|
||||||
# Populate rows using the column order from results
|
|
||||||
for result in results:
|
|
||||||
if "columns" in result and result["columns"]:
|
|
||||||
# Extract values in column order
|
|
||||||
row_data = [col[1] for col in result["columns"]]
|
|
||||||
self.results_table.add_row(*row_data)
|
|
||||||
else:
|
else:
|
||||||
# Fallback for results without columns
|
# Fallback if no columns defined
|
||||||
logger.warning(f"[search-modal] Result missing columns field: {result.get('title', 'Unknown')}")
|
row.add_column("Title", res.title)
|
||||||
|
row.add_column("Target", res.target)
|
||||||
|
|
||||||
|
self.current_result_table = table
|
||||||
|
|
||||||
|
# Populate UI
|
||||||
|
if table.rows:
|
||||||
|
# Add headers
|
||||||
|
headers = [col.name for col in table.rows[0].columns]
|
||||||
|
self.results_table.add_columns(*headers)
|
||||||
|
# Add rows
|
||||||
|
for row_vals in table.to_datatable_rows():
|
||||||
|
self.results_table.add_row(*row_vals)
|
||||||
else:
|
else:
|
||||||
# Fallback to original hardcoded behavior if columns not available
|
self.results_table.add_columns("Message")
|
||||||
logger.info("[search-modal] No dynamic columns found, using default formatting")
|
self.results_table.add_row("No results found")
|
||||||
|
|
||||||
for result in results:
|
# Finish worker
|
||||||
if source == "openlibrary":
|
|
||||||
# Format OpenLibrary results (original hardcoded)
|
|
||||||
year = str(result.get("year", ""))[:4] if result.get("year") else ""
|
|
||||||
details = f"ISBN: {result.get('isbn', '')}" if result.get('isbn') else ""
|
|
||||||
if result.get('openlibrary_id'):
|
|
||||||
details += f" | OL: {result.get('openlibrary_id')}"
|
|
||||||
|
|
||||||
row_data = [
|
|
||||||
result["title"][:60],
|
|
||||||
result["author"][:35],
|
|
||||||
year,
|
|
||||||
details[:40]
|
|
||||||
]
|
|
||||||
else: # soulseek
|
|
||||||
row_data = [
|
|
||||||
result["title"][:50],
|
|
||||||
result["artist"][:30],
|
|
||||||
result["album"][:30],
|
|
||||||
result['filesize']
|
|
||||||
]
|
|
||||||
|
|
||||||
self.results_table.add_row(*row_data)
|
|
||||||
else:
|
|
||||||
# Add a "no results" message
|
|
||||||
self.results_table.add_row("No results found", "", "", "")
|
|
||||||
|
|
||||||
# Finish worker if tracking
|
|
||||||
if self.current_worker:
|
if self.current_worker:
|
||||||
self.current_worker.finish("completed", f"Found {len(results)} results")
|
self.current_worker.finish("completed", f"Found {len(results)} results")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[search-modal] Search error: {e}")
|
logger.error(f"[search-modal] Search error: {e}", exc_info=True)
|
||||||
if self.current_worker:
|
if self.current_worker:
|
||||||
self.current_worker.finish("error", f"Search failed: {str(e)}")
|
self.current_worker.finish("error", f"Search failed: {str(e)}")
|
||||||
|
|
||||||
@@ -382,35 +249,58 @@ class SearchModal(ModalScreen):
|
|||||||
selected_row = self.results_table.cursor_row
|
selected_row = self.results_table.cursor_row
|
||||||
if 0 <= selected_row < len(self.current_results):
|
if 0 <= selected_row < len(self.current_results):
|
||||||
result = self.current_results[selected_row]
|
result = self.current_results[selected_row]
|
||||||
|
|
||||||
|
# Convert to dict if needed for submission
|
||||||
|
if hasattr(result, 'to_dict'):
|
||||||
|
result_dict = result.to_dict()
|
||||||
|
else:
|
||||||
|
result_dict = result
|
||||||
|
|
||||||
# Get tags from textarea
|
# Get tags from textarea
|
||||||
tags_text = self.tags_textarea.text if self.tags_textarea else ""
|
tags_text = self.tags_textarea.text if self.tags_textarea else ""
|
||||||
# Get library source (if OpenLibrary)
|
# Get library source (if OpenLibrary)
|
||||||
library_source = self.library_source_select.value if self.library_source_select else "local"
|
library_source = self.library_source_select.value if self.library_source_select else "local"
|
||||||
|
|
||||||
# Add tags and source to result
|
# Add tags and source to result
|
||||||
result["tags_text"] = tags_text
|
result_dict["tags_text"] = tags_text
|
||||||
result["library_source"] = library_source
|
result_dict["library_source"] = library_source
|
||||||
|
|
||||||
# Post message and dismiss
|
# Post message and dismiss
|
||||||
self.post_message(self.SearchSelected(result))
|
self.post_message(self.SearchSelected(result_dict))
|
||||||
self.dismiss(result)
|
self.dismiss(result_dict)
|
||||||
else:
|
else:
|
||||||
logger.warning("[search-modal] No result selected for submission")
|
logger.warning("[search-modal] No result selected for submission")
|
||||||
|
|
||||||
elif button_id == "cancel-button":
|
elif button_id == "cancel-button":
|
||||||
self.dismiss(None)
|
self.dismiss(None)
|
||||||
|
|
||||||
def _populate_tags_from_result(self, result: dict) -> None:
|
def _populate_tags_from_result(self, result: Any) -> None:
|
||||||
"""Populate the tags textarea from a selected result."""
|
"""Populate the tags textarea from a selected result."""
|
||||||
if not self.tags_textarea:
|
if not self.tags_textarea:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Handle both SearchResult objects and dicts
|
||||||
|
if hasattr(result, 'full_metadata'):
|
||||||
|
metadata = result.full_metadata or {}
|
||||||
|
source = result.origin
|
||||||
|
title = result.title
|
||||||
|
else:
|
||||||
|
# Handle dict (legacy or from to_dict)
|
||||||
|
if 'full_metadata' in result:
|
||||||
|
metadata = result['full_metadata'] or {}
|
||||||
|
elif 'raw_data' in result:
|
||||||
|
metadata = result['raw_data'] or {}
|
||||||
|
else:
|
||||||
|
metadata = result
|
||||||
|
|
||||||
|
source = result.get('origin', result.get('source', ''))
|
||||||
|
title = result.get('title', '')
|
||||||
|
|
||||||
# Format tags based on result source
|
# Format tags based on result source
|
||||||
if result.get("source") == "openlibrary":
|
if source == "openlibrary":
|
||||||
# For OpenLibrary: title, author, year
|
# For OpenLibrary: title, author, year
|
||||||
title = result.get("title", "")
|
author = ", ".join(metadata.get("authors", [])) if isinstance(metadata.get("authors"), list) else metadata.get("authors", "")
|
||||||
author = result.get("author", "")
|
year = str(metadata.get("year", ""))
|
||||||
year = result.get("year", "")
|
|
||||||
tags = []
|
tags = []
|
||||||
if title:
|
if title:
|
||||||
tags.append(title)
|
tags.append(title)
|
||||||
@@ -419,38 +309,51 @@ class SearchModal(ModalScreen):
|
|||||||
if year:
|
if year:
|
||||||
tags.append(year)
|
tags.append(year)
|
||||||
tags_text = "\n".join(tags)
|
tags_text = "\n".join(tags)
|
||||||
else: # soulseek
|
elif source == "soulseek":
|
||||||
# For Soulseek: artist, album, title, track
|
# For Soulseek: artist, album, title, track
|
||||||
tags = []
|
tags = []
|
||||||
if result.get("artist"):
|
if metadata.get("artist"):
|
||||||
tags.append(result["artist"])
|
tags.append(metadata["artist"])
|
||||||
if result.get("album"):
|
if metadata.get("album"):
|
||||||
tags.append(result["album"])
|
tags.append(metadata["album"])
|
||||||
if result.get("track"):
|
if metadata.get("track_num"):
|
||||||
tags.append(f"Track {result['track']}")
|
tags.append(f"Track {metadata['track_num']}")
|
||||||
if result.get("title"):
|
if title:
|
||||||
tags.append(result["title"])
|
tags.append(title)
|
||||||
|
tags_text = "\n".join(tags)
|
||||||
|
else:
|
||||||
|
# Generic fallback
|
||||||
|
tags = [title]
|
||||||
tags_text = "\n".join(tags)
|
tags_text = "\n".join(tags)
|
||||||
|
|
||||||
self.tags_textarea.text = tags_text
|
self.tags_textarea.text = tags_text
|
||||||
logger.info(f"[search-modal] Populated tags textarea from result")
|
logger.info(f"[search-modal] Populated tags textarea from result")
|
||||||
|
|
||||||
async def _download_book(self, result: dict) -> None:
|
async def _download_book(self, result: Any) -> None:
|
||||||
"""Download a book from OpenLibrary using unified downloader."""
|
"""Download a book from OpenLibrary using unified downloader."""
|
||||||
try:
|
try:
|
||||||
from helper.unified_book_downloader import UnifiedBookDownloader
|
from helper.unified_book_downloader import UnifiedBookDownloader
|
||||||
from config import load_config
|
from config import load_config
|
||||||
|
|
||||||
logger.info(f"[search-modal] Starting download for: {result.get('title')}")
|
# Convert SearchResult to dict if needed
|
||||||
|
if hasattr(result, 'to_dict'):
|
||||||
|
result_dict = result.to_dict()
|
||||||
|
# Ensure raw_data is populated for downloader
|
||||||
|
if 'raw_data' not in result_dict and result.full_metadata:
|
||||||
|
result_dict['raw_data'] = result.full_metadata
|
||||||
|
else:
|
||||||
|
result_dict = result
|
||||||
|
|
||||||
|
logger.info(f"[search-modal] Starting download for: {result_dict.get('title')}")
|
||||||
|
|
||||||
config = load_config()
|
config = load_config()
|
||||||
downloader = UnifiedBookDownloader(config=config)
|
downloader = UnifiedBookDownloader(config=config)
|
||||||
|
|
||||||
# Get download options for this book
|
# Get download options for this book
|
||||||
options = downloader.get_download_options(result)
|
options = downloader.get_download_options(result_dict)
|
||||||
|
|
||||||
if not options['methods']:
|
if not options['methods']:
|
||||||
logger.warning(f"[search-modal] No download methods available for: {result.get('title')}")
|
logger.warning(f"[search-modal] No download methods available for: {result_dict.get('title')}")
|
||||||
# Could show a modal dialog here
|
# Could show a modal dialog here
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ class PipelineStageResult:
|
|||||||
name: str
|
name: str
|
||||||
args: Sequence[str]
|
args: Sequence[str]
|
||||||
emitted: List[Any] = field(default_factory=list)
|
emitted: List[Any] = field(default_factory=list)
|
||||||
|
result_table: Optional[Any] = None # ResultTable object if available
|
||||||
status: str = "pending"
|
status: str = "pending"
|
||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
|
|
||||||
@@ -52,6 +53,7 @@ class PipelineRunResult:
|
|||||||
success: bool
|
success: bool
|
||||||
stages: List[PipelineStageResult] = field(default_factory=list)
|
stages: List[PipelineStageResult] = field(default_factory=list)
|
||||||
emitted: List[Any] = field(default_factory=list)
|
emitted: List[Any] = field(default_factory=list)
|
||||||
|
result_table: Optional[Any] = None # Final ResultTable object if available
|
||||||
stdout: str = ""
|
stdout: str = ""
|
||||||
stderr: str = ""
|
stderr: str = ""
|
||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
@@ -146,6 +148,7 @@ class PipelineExecutor:
|
|||||||
|
|
||||||
if index == len(stages) - 1:
|
if index == len(stages) - 1:
|
||||||
result.emitted = stage.emitted
|
result.emitted = stage.emitted
|
||||||
|
result.result_table = stage.result_table
|
||||||
else:
|
else:
|
||||||
piped_result = stage.emitted
|
piped_result = stage.emitted
|
||||||
|
|
||||||
@@ -212,6 +215,10 @@ class PipelineExecutor:
|
|||||||
emitted = list(getattr(pipeline_ctx, "emits", []) or [])
|
emitted = list(getattr(pipeline_ctx, "emits", []) or [])
|
||||||
stage.emitted = emitted
|
stage.emitted = emitted
|
||||||
|
|
||||||
|
# Capture the ResultTable if the cmdlet set one
|
||||||
|
# Check display table first (overlay), then last result table
|
||||||
|
stage.result_table = ctx.get_display_table() or ctx.get_last_result_table()
|
||||||
|
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
stage.status = "failed"
|
stage.status = "failed"
|
||||||
stage.error = f"Exit code {return_code}"
|
stage.error = f"Exit code {return_code}"
|
||||||
@@ -224,7 +231,12 @@ class PipelineExecutor:
|
|||||||
label = f"[Stage {index + 1}/{total}] {cmd_name} {stage.status}"
|
label = f"[Stage {index + 1}/{total}] {cmd_name} {stage.status}"
|
||||||
self._worker_manager.log_step(worker_id, label)
|
self._worker_manager.log_step(worker_id, label)
|
||||||
|
|
||||||
ctx.set_last_result_table(None, emitted)
|
# Don't clear the table if we just captured it, but ensure items are set for next stage
|
||||||
|
# If we have a table, we should probably keep it in ctx for history if needed
|
||||||
|
# But for pipeline execution, we mainly care about passing items to next stage
|
||||||
|
# ctx.set_last_result_table(None, emitted) <-- This was clearing it
|
||||||
|
|
||||||
|
# Ensure items are available for next stage
|
||||||
ctx.set_last_items(emitted)
|
ctx.set_last_items(emitted)
|
||||||
return stage
|
return stage
|
||||||
|
|
||||||
|
|||||||
64
TUI/tui.py
64
TUI/tui.py
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Sequence
|
from typing import Any, List, Optional, Sequence
|
||||||
|
|
||||||
from textual import work
|
from textual import work
|
||||||
from textual.app import App, ComposeResult
|
from textual.app import App, ComposeResult
|
||||||
@@ -32,10 +32,9 @@ for path in (BASE_DIR, ROOT_DIR):
|
|||||||
from menu_actions import ( # type: ignore # noqa: E402
|
from menu_actions import ( # type: ignore # noqa: E402
|
||||||
PIPELINE_PRESETS,
|
PIPELINE_PRESETS,
|
||||||
PipelinePreset,
|
PipelinePreset,
|
||||||
build_metadata_snapshot,
|
|
||||||
summarize_result,
|
|
||||||
)
|
)
|
||||||
from pipeline_runner import PipelineExecutor, PipelineRunResult # type: ignore # noqa: E402
|
from pipeline_runner import PipelineExecutor, PipelineRunResult # type: ignore # noqa: E402
|
||||||
|
from result_table import ResultTable # type: ignore # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
class PresetListItem(ListItem):
|
class PresetListItem(ListItem):
|
||||||
@@ -73,6 +72,7 @@ class PipelineHubApp(App):
|
|||||||
self.worker_table: Optional[DataTable] = None
|
self.worker_table: Optional[DataTable] = None
|
||||||
self.preset_list: Optional[ListView] = None
|
self.preset_list: Optional[ListView] = None
|
||||||
self.status_panel: Optional[Static] = None
|
self.status_panel: Optional[Static] = None
|
||||||
|
self.current_result_table: Optional[ResultTable] = None
|
||||||
self._pipeline_running = False
|
self._pipeline_running = False
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
@@ -81,7 +81,7 @@ class PipelineHubApp(App):
|
|||||||
def compose(self) -> ComposeResult: # noqa: D401 - Textual compose hook
|
def compose(self) -> ComposeResult: # noqa: D401 - Textual compose hook
|
||||||
yield Header(show_clock=True)
|
yield Header(show_clock=True)
|
||||||
with Container(id="app-shell"):
|
with Container(id="app-shell"):
|
||||||
with Horizontal(id="command-row"):
|
with Horizontal(id="command-pane"):
|
||||||
self.command_input = Input(
|
self.command_input = Input(
|
||||||
placeholder='download-data "<url>" | merge-file | add-tag | add-file -storage local',
|
placeholder='download-data "<url>" | merge-file | add-tag | add-file -storage local',
|
||||||
id="pipeline-input",
|
id="pipeline-input",
|
||||||
@@ -174,7 +174,7 @@ class PipelineHubApp(App):
|
|||||||
return
|
return
|
||||||
index = event.cursor_row
|
index = event.cursor_row
|
||||||
if 0 <= index < len(self.result_items):
|
if 0 <= index < len(self.result_items):
|
||||||
self._display_metadata(self.result_items[index])
|
self._display_metadata(index)
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Pipeline execution helpers
|
# Pipeline execution helpers
|
||||||
@@ -216,6 +216,7 @@ class PipelineHubApp(App):
|
|||||||
else:
|
else:
|
||||||
self.result_items = []
|
self.result_items = []
|
||||||
|
|
||||||
|
self.current_result_table = run_result.result_table
|
||||||
self._populate_results_table()
|
self._populate_results_table()
|
||||||
self.refresh_workers()
|
self.refresh_workers()
|
||||||
|
|
||||||
@@ -228,40 +229,45 @@ class PipelineHubApp(App):
|
|||||||
def _populate_results_table(self) -> None:
|
def _populate_results_table(self) -> None:
|
||||||
if not self.results_table:
|
if not self.results_table:
|
||||||
return
|
return
|
||||||
self.results_table.clear()
|
self.results_table.clear(columns=True)
|
||||||
|
|
||||||
|
if self.current_result_table and self.current_result_table.rows:
|
||||||
|
# Use ResultTable headers from the first row
|
||||||
|
first_row = self.current_result_table.rows[0]
|
||||||
|
headers = ["#"] + [col.name for col in first_row.columns]
|
||||||
|
self.results_table.add_columns(*headers)
|
||||||
|
|
||||||
|
rows = self.current_result_table.to_datatable_rows()
|
||||||
|
for idx, row_values in enumerate(rows, 1):
|
||||||
|
self.results_table.add_row(str(idx), *row_values, key=str(idx - 1))
|
||||||
|
else:
|
||||||
|
# Fallback or empty state
|
||||||
|
self.results_table.add_columns("Row", "Title", "Source", "File")
|
||||||
if not self.result_items:
|
if not self.result_items:
|
||||||
self.results_table.add_row("—", "No results", "", "")
|
self.results_table.add_row("—", "No results", "", "")
|
||||||
return
|
return
|
||||||
for idx, item in enumerate(self.result_items, start=1):
|
|
||||||
if isinstance(item, dict):
|
|
||||||
title = summarize_result(item)
|
|
||||||
source = item.get("source") or item.get("cmdlet_name") or item.get("cmdlet") or "—"
|
|
||||||
file_path = item.get("file_path") or item.get("path") or "—"
|
|
||||||
else:
|
|
||||||
title = str(item)
|
|
||||||
source = "—"
|
|
||||||
file_path = "—"
|
|
||||||
self.results_table.add_row(str(idx), title, source, file_path, key=str(idx - 1))
|
|
||||||
|
|
||||||
def _display_metadata(self, item: Any) -> None:
|
# Fallback for items without a table
|
||||||
|
for idx, item in enumerate(self.result_items, start=1):
|
||||||
|
self.results_table.add_row(str(idx), str(item), "—", "—", key=str(idx - 1))
|
||||||
|
|
||||||
|
def _display_metadata(self, index: int) -> None:
|
||||||
if not self.metadata_tree:
|
if not self.metadata_tree:
|
||||||
return
|
return
|
||||||
root = self.metadata_tree.root
|
root = self.metadata_tree.root
|
||||||
root.label = "Metadata"
|
root.label = "Metadata"
|
||||||
root.remove_children()
|
root.remove_children()
|
||||||
|
|
||||||
payload: Dict[str, Any]
|
if self.current_result_table and 0 <= index < len(self.current_result_table.rows):
|
||||||
|
row = self.current_result_table.rows[index]
|
||||||
|
for col in row.columns:
|
||||||
|
root.add(f"[b]{col.name}[/b]: {col.value}")
|
||||||
|
elif 0 <= index < len(self.result_items):
|
||||||
|
item = self.result_items[index]
|
||||||
if isinstance(item, dict):
|
if isinstance(item, dict):
|
||||||
file_path = item.get("file_path") or item.get("path")
|
self._populate_tree_node(root, item)
|
||||||
if file_path:
|
|
||||||
payload = build_metadata_snapshot(Path(file_path))
|
|
||||||
else:
|
else:
|
||||||
payload = item
|
root.add(str(item))
|
||||||
else:
|
|
||||||
payload = {"value": str(item)}
|
|
||||||
|
|
||||||
self._populate_tree_node(root, payload)
|
|
||||||
root.expand_all()
|
|
||||||
|
|
||||||
def _populate_tree_node(self, node, data: Any) -> None:
|
def _populate_tree_node(self, node, data: Any) -> None:
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
@@ -278,14 +284,14 @@ class PipelineHubApp(App):
|
|||||||
def _clear_log(self) -> None:
|
def _clear_log(self) -> None:
|
||||||
self.log_lines = []
|
self.log_lines = []
|
||||||
if self.log_output:
|
if self.log_output:
|
||||||
self.log_output.value = ""
|
self.log_output.text = ""
|
||||||
|
|
||||||
def _append_log_line(self, line: str) -> None:
|
def _append_log_line(self, line: str) -> None:
|
||||||
self.log_lines.append(line)
|
self.log_lines.append(line)
|
||||||
if len(self.log_lines) > 500:
|
if len(self.log_lines) > 500:
|
||||||
self.log_lines = self.log_lines[-500:]
|
self.log_lines = self.log_lines[-500:]
|
||||||
if self.log_output:
|
if self.log_output:
|
||||||
self.log_output.value = "\n".join(self.log_lines)
|
self.log_output.text = "\n".join(self.log_lines)
|
||||||
|
|
||||||
def _append_block(self, text: str) -> None:
|
def _append_block(self, text: str) -> None:
|
||||||
for line in text.strip().splitlines():
|
for line in text.strip().splitlines():
|
||||||
|
|||||||
20
TUI/tui.tcss
20
TUI/tui.tcss
@@ -6,7 +6,7 @@
|
|||||||
layout: vertical;
|
layout: vertical;
|
||||||
}
|
}
|
||||||
|
|
||||||
#command-row {
|
#command-pane {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: auto;
|
height: auto;
|
||||||
background: $boost;
|
background: $boost;
|
||||||
@@ -18,7 +18,6 @@
|
|||||||
width: 1fr;
|
width: 1fr;
|
||||||
min-height: 3;
|
min-height: 3;
|
||||||
padding: 0 1;
|
padding: 0 1;
|
||||||
margin-right: 1;
|
|
||||||
background: $surface;
|
background: $surface;
|
||||||
color: $text;
|
color: $text;
|
||||||
border: round $primary;
|
border: round $primary;
|
||||||
@@ -30,7 +29,9 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
#status-panel {
|
#status-panel {
|
||||||
min-width: 20;
|
width: auto;
|
||||||
|
max-width: 25;
|
||||||
|
height: 3;
|
||||||
text-style: bold;
|
text-style: bold;
|
||||||
content-align: center middle;
|
content-align: center middle;
|
||||||
padding: 0 1;
|
padding: 0 1;
|
||||||
@@ -52,7 +53,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
#left-pane {
|
#left-pane {
|
||||||
max-width: 48;
|
max-width: 60;
|
||||||
}
|
}
|
||||||
|
|
||||||
.section-title {
|
.section-title {
|
||||||
@@ -67,6 +68,11 @@
|
|||||||
margin-bottom: 1;
|
margin-bottom: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#preset-list {
|
||||||
|
height: 25;
|
||||||
|
border: solid $secondary;
|
||||||
|
}
|
||||||
|
|
||||||
#log-output {
|
#log-output {
|
||||||
height: 16;
|
height: 16;
|
||||||
}
|
}
|
||||||
@@ -98,3 +104,9 @@
|
|||||||
background: $error 20%;
|
background: $error 20%;
|
||||||
color: $error;
|
color: $error;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#run-button {
|
||||||
|
width: auto;
|
||||||
|
min-width: 10;
|
||||||
|
margin: 0 1;
|
||||||
|
}
|
||||||
@@ -119,7 +119,9 @@ for filename in os.listdir(cmdlet_dir):
|
|||||||
for alias in cmdlet_obj.aliases:
|
for alias in cmdlet_obj.aliases:
|
||||||
normalized_alias = alias.replace('_', '-').lower()
|
normalized_alias = alias.replace('_', '-').lower()
|
||||||
REGISTRY[normalized_alias] = run_fn
|
REGISTRY[normalized_alias] = run_fn
|
||||||
except Exception:
|
except Exception as e:
|
||||||
|
import sys
|
||||||
|
print(f"Error importing cmdlet '{mod_name}': {e}", file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Import root-level modules that also register cmdlets
|
# Import root-level modules that also register cmdlets
|
||||||
|
|||||||
@@ -371,7 +371,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
# Extract tags/known URLs from pipeline objects if available
|
# Extract tags/known URLs from pipeline objects if available
|
||||||
pipe_object_tags = extract_tags_from_result(result)
|
pipe_object_tags = extract_tags_from_result(result)
|
||||||
if pipe_object_tags:
|
if pipe_object_tags:
|
||||||
log(f"Extracted {len(pipe_object_tags)} tag(s) from pipeline result: {', '.join(pipe_object_tags[:5])}", file=sys.stderr)
|
debug(f"Extracted {len(pipe_object_tags)} tag(s) from pipeline result: {', '.join(pipe_object_tags[:5])}", file=sys.stderr)
|
||||||
pipe_known_urls = extract_known_urls_from_result(result)
|
pipe_known_urls = extract_known_urls_from_result(result)
|
||||||
|
|
||||||
# Resolve media path: get from piped result
|
# Resolve media path: get from piped result
|
||||||
@@ -574,11 +574,11 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
try:
|
try:
|
||||||
file_provider = get_file_provider(provider_name, config)
|
file_provider = get_file_provider(provider_name, config)
|
||||||
if file_provider is None:
|
if file_provider is None:
|
||||||
log(f"❌ File provider '{provider_name}' not available", file=sys.stderr)
|
log(f"File provider '{provider_name}' not available", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
hoster_url = file_provider.upload(media_path)
|
hoster_url = file_provider.upload(media_path)
|
||||||
log(f"✅ File uploaded to {provider_name}: {hoster_url}", file=sys.stderr)
|
log(f"File uploaded to {provider_name}: {hoster_url}", file=sys.stderr)
|
||||||
|
|
||||||
# Associate the URL with the file in Hydrus if possible
|
# Associate the URL with the file in Hydrus if possible
|
||||||
current_hash = locals().get('file_hash')
|
current_hash = locals().get('file_hash')
|
||||||
@@ -590,12 +590,12 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
client = hydrus_wrapper.get_client(config)
|
client = hydrus_wrapper.get_client(config)
|
||||||
if client:
|
if client:
|
||||||
client.associate_url(current_hash, hoster_url)
|
client.associate_url(current_hash, hoster_url)
|
||||||
log(f"✅ Associated URL with file hash {current_hash}", file=sys.stderr)
|
debug(f"Associated URL with file hash {current_hash}", file=sys.stderr)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"⚠️ Could not associate URL with Hydrus file: {exc}", file=sys.stderr)
|
log(f"Could not associate URL with Hydrus file: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"❌ {provider_name} upload failed: {exc}", file=sys.stderr)
|
log(f"{provider_name} upload failed: {exc}", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if delete_after_upload:
|
if delete_after_upload:
|
||||||
@@ -632,7 +632,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
log("❌ No local storage path configured. Set 'storage.local.path' in config.json", file=sys.stderr)
|
log("❌ No local storage path configured. Set 'storage.local.path' in config.json", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
log(f"Moving into configured local library: {resolved_dir}", file=sys.stderr)
|
debug(f"Moving into configured local library: {resolved_dir}", file=sys.stderr)
|
||||||
exit_code, dest_path = _handle_local_transfer(media_path, Path(resolved_dir), result, config)
|
exit_code, dest_path = _handle_local_transfer(media_path, Path(resolved_dir), result, config)
|
||||||
|
|
||||||
# After successful local transfer, emit result for pipeline continuation
|
# After successful local transfer, emit result for pipeline continuation
|
||||||
@@ -713,7 +713,7 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"❌ Failed to compute file hash: {exc}", file=sys.stderr)
|
log(f"❌ Failed to compute file hash: {exc}", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
log(f"File hash: {file_hash}", file=sys.stderr)
|
debug(f"File hash: {file_hash}", file=sys.stderr)
|
||||||
|
|
||||||
# Read sidecar tags and known URLs first (for tagging)
|
# Read sidecar tags and known URLs first (for tagging)
|
||||||
|
|
||||||
@@ -789,9 +789,9 @@ def _run(result: Any, _args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
config=config,
|
config=config,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
)
|
)
|
||||||
log(f"✅ File uploaded to Hydrus: {file_hash}", file=sys.stderr)
|
log(f"Hydrus: {file_hash}", file=sys.stderr)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"❌ Hydrus upload failed: {exc}", file=sys.stderr)
|
log(f"Failed: {exc}", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
# Associate known URLs in Hydrus metadata
|
# Associate known URLs in Hydrus metadata
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import pipeline as ctx
|
|||||||
from ._shared import normalize_result_input, filter_results_by_temp
|
from ._shared import normalize_result_input, filter_results_by_temp
|
||||||
from helper import hydrus as hydrus_wrapper
|
from helper import hydrus as hydrus_wrapper
|
||||||
from helper.local_library import read_sidecar, write_sidecar, find_sidecar, has_sidecar, LocalLibraryDB
|
from helper.local_library import read_sidecar, write_sidecar, find_sidecar, has_sidecar, LocalLibraryDB
|
||||||
from metadata import rename_by_metadata
|
from metadata import rename
|
||||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args
|
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments, expand_tag_groups, parse_cmdlet_args
|
||||||
from config import get_local_storage_path
|
from config import get_local_storage_path
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from typing import Any, Dict, Sequence
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from helper.logger import log
|
from helper.logger import debug, log
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@@ -84,64 +84,28 @@ def _delete_database_entry(db_path: Path, file_path: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
def _process_single_item(item: Any, override_hash: str | None, conserve: str | None,
|
||||||
# Help
|
lib_root: str | None, reason: str, config: Dict[str, Any]) -> bool:
|
||||||
try:
|
"""Process deletion for a single item."""
|
||||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
# Handle item as either dict or object
|
||||||
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
if isinstance(item, dict):
|
||||||
return 0
|
hash_hex_raw = item.get("hash_hex") or item.get("hash")
|
||||||
except Exception:
|
target = item.get("target")
|
||||||
pass
|
origin = item.get("origin")
|
||||||
|
|
||||||
# Handle @N selection which creates a list - extract the first item
|
|
||||||
if isinstance(result, list) and len(result) > 0:
|
|
||||||
result = result[0]
|
|
||||||
|
|
||||||
# Parse overrides and options
|
|
||||||
override_hash: str | None = None
|
|
||||||
conserve: str | None = None
|
|
||||||
lib_root: str | None = None
|
|
||||||
reason_tokens: list[str] = []
|
|
||||||
i = 0
|
|
||||||
while i < len(args):
|
|
||||||
token = args[i]
|
|
||||||
low = str(token).lower()
|
|
||||||
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
|
|
||||||
override_hash = str(args[i + 1]).strip()
|
|
||||||
i += 2
|
|
||||||
continue
|
|
||||||
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
|
|
||||||
value = str(args[i + 1]).strip().lower()
|
|
||||||
if value in {"local", "hydrus"}:
|
|
||||||
conserve = value
|
|
||||||
i += 2
|
|
||||||
continue
|
|
||||||
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
|
|
||||||
lib_root = str(args[i + 1]).strip()
|
|
||||||
i += 2
|
|
||||||
continue
|
|
||||||
reason_tokens.append(token)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Handle result as either dict or object
|
|
||||||
if isinstance(result, dict):
|
|
||||||
hash_hex_raw = result.get("hash_hex") or result.get("hash")
|
|
||||||
target = result.get("target")
|
|
||||||
origin = result.get("origin")
|
|
||||||
else:
|
else:
|
||||||
hash_hex_raw = getattr(result, "hash_hex", None) or getattr(result, "hash", None)
|
hash_hex_raw = getattr(item, "hash_hex", None) or getattr(item, "hash", None)
|
||||||
target = getattr(result, "target", None)
|
target = getattr(item, "target", None)
|
||||||
origin = getattr(result, "origin", None)
|
origin = getattr(item, "origin", None)
|
||||||
|
|
||||||
# For Hydrus files, the target IS the hash
|
# For Hydrus files, the target IS the hash
|
||||||
if origin and origin.lower() == "hydrus" and not hash_hex_raw:
|
if origin and origin.lower() == "hydrus" and not hash_hex_raw:
|
||||||
hash_hex_raw = target
|
hash_hex_raw = target
|
||||||
|
|
||||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_hex_raw)
|
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_hex_raw)
|
||||||
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
|
||||||
|
|
||||||
local_deleted = False
|
local_deleted = False
|
||||||
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
|
local_target = isinstance(target, str) and target.strip() and not str(target).lower().startswith(("http://", "https://"))
|
||||||
|
|
||||||
if conserve != "local" and local_target:
|
if conserve != "local" and local_target:
|
||||||
path = Path(str(target))
|
path = Path(str(target))
|
||||||
file_path_str = str(target) # Keep the original string for DB matching
|
file_path_str = str(target) # Keep the original string for DB matching
|
||||||
@@ -168,8 +132,6 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
if lib_root:
|
if lib_root:
|
||||||
lib_root_path = Path(lib_root)
|
lib_root_path = Path(lib_root)
|
||||||
db_path = lib_root_path / ".downlow_library.db"
|
db_path = lib_root_path / ".downlow_library.db"
|
||||||
log(f"Attempting DB cleanup: lib_root={lib_root}, db_path={db_path}", file=sys.stderr)
|
|
||||||
log(f"Deleting DB entry for: {file_path_str}", file=sys.stderr)
|
|
||||||
if _delete_database_entry(db_path, file_path_str):
|
if _delete_database_entry(db_path, file_path_str):
|
||||||
if ctx._PIPE_ACTIVE:
|
if ctx._PIPE_ACTIVE:
|
||||||
ctx.emit(f"Removed database entry: {path.name}")
|
ctx.emit(f"Removed database entry: {path.name}")
|
||||||
@@ -178,7 +140,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
else:
|
else:
|
||||||
log(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
|
log(f"Database entry not found or cleanup failed for {file_path_str}", file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
log(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
|
debug(f"No lib_root provided, skipping database cleanup", file=sys.stderr)
|
||||||
|
|
||||||
hydrus_deleted = False
|
hydrus_deleted = False
|
||||||
if conserve != "hydrus" and hash_hex:
|
if conserve != "hydrus" and hash_hex:
|
||||||
@@ -187,12 +149,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
if not local_deleted:
|
if not local_deleted:
|
||||||
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
|
log(f"Hydrus client unavailable: {exc}", file=sys.stderr)
|
||||||
return 1
|
return False
|
||||||
else:
|
else:
|
||||||
if client is None:
|
if client is None:
|
||||||
if not local_deleted:
|
if not local_deleted:
|
||||||
log("Hydrus client unavailable", file=sys.stderr)
|
log("Hydrus client unavailable", file=sys.stderr)
|
||||||
return 1
|
return False
|
||||||
else:
|
else:
|
||||||
payload: Dict[str, Any] = {"hashes": [hash_hex]}
|
payload: Dict[str, Any] = {"hashes": [hash_hex]}
|
||||||
if reason:
|
if reason:
|
||||||
@@ -201,11 +163,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
client._post("/add_files/delete_files", data=payload) # type: ignore[attr-defined]
|
client._post("/add_files/delete_files", data=payload) # type: ignore[attr-defined]
|
||||||
hydrus_deleted = True
|
hydrus_deleted = True
|
||||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||||
log(f"Deleted from Hydrus: {preview}…", file=sys.stderr)
|
debug(f"Deleted from Hydrus: {preview}…", file=sys.stderr)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"Hydrus delete failed: {exc}", file=sys.stderr)
|
log(f"Hydrus delete failed: {exc}", file=sys.stderr)
|
||||||
if not local_deleted:
|
if not local_deleted:
|
||||||
return 1
|
return False
|
||||||
|
|
||||||
if hydrus_deleted and hash_hex:
|
if hydrus_deleted and hash_hex:
|
||||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||||
@@ -216,11 +178,65 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
ctx.emit(f"Deleted {preview}.")
|
ctx.emit(f"Deleted {preview}.")
|
||||||
|
|
||||||
if hydrus_deleted or local_deleted:
|
if hydrus_deleted or local_deleted:
|
||||||
return 0
|
return True
|
||||||
|
|
||||||
log("Selected result has neither Hydrus hash nor local file target")
|
log("Selected result has neither Hydrus hash nor local file target")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||||
|
# Help
|
||||||
|
try:
|
||||||
|
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||||
|
log(json.dumps(CMDLET, ensure_ascii=False, indent=2))
|
||||||
|
return 0
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
override_hash: str | None = None
|
||||||
|
conserve: str | None = None
|
||||||
|
lib_root: str | None = None
|
||||||
|
reason_tokens: list[str] = []
|
||||||
|
i = 0
|
||||||
|
while i < len(args):
|
||||||
|
token = args[i]
|
||||||
|
low = str(token).lower()
|
||||||
|
if low in {"-hash", "--hash", "hash"} and i + 1 < len(args):
|
||||||
|
override_hash = str(args[i + 1]).strip()
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
if low in {"-conserve", "--conserve"} and i + 1 < len(args):
|
||||||
|
value = str(args[i + 1]).strip().lower()
|
||||||
|
if value in {"local", "hydrus"}:
|
||||||
|
conserve = value
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
if low in {"-lib-root", "--lib-root", "lib-root"} and i + 1 < len(args):
|
||||||
|
lib_root = str(args[i + 1]).strip()
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
reason_tokens.append(token)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
reason = " ".join(token for token in reason_tokens if str(token).strip()).strip()
|
||||||
|
|
||||||
|
items = []
|
||||||
|
if isinstance(result, list):
|
||||||
|
items = result
|
||||||
|
elif result:
|
||||||
|
items = [result]
|
||||||
|
|
||||||
|
if not items:
|
||||||
|
log("No items to delete", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
success_count = 0
|
||||||
|
for item in items:
|
||||||
|
if _process_single_item(item, override_hash, conserve, lib_root, reason, config):
|
||||||
|
success_count += 1
|
||||||
|
|
||||||
|
return 0 if success_count > 0 else 1
|
||||||
|
|
||||||
CMDLET = Cmdlet(
|
CMDLET = Cmdlet(
|
||||||
name="delete-file",
|
name="delete-file",
|
||||||
summary="Delete a file locally and/or from Hydrus, including database entries.",
|
summary="Delete a file locally and/or from Hydrus, including database entries.",
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import models
|
|||||||
import pipeline as ctx
|
import pipeline as ctx
|
||||||
from helper import hydrus as hydrus_wrapper
|
from helper import hydrus as hydrus_wrapper
|
||||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments
|
from ._shared import Cmdlet, CmdletArg, normalize_hash, parse_tag_arguments
|
||||||
from helper.logger import log
|
from helper.logger import debug, log
|
||||||
|
|
||||||
|
|
||||||
CMDLET = Cmdlet(
|
CMDLET = Cmdlet(
|
||||||
@@ -68,6 +68,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
# @5 or @{2,5,8} to delete tags from ResultTable by index
|
# @5 or @{2,5,8} to delete tags from ResultTable by index
|
||||||
tags_from_at_syntax = []
|
tags_from_at_syntax = []
|
||||||
hash_from_at_syntax = None
|
hash_from_at_syntax = None
|
||||||
|
file_path_from_at_syntax = None
|
||||||
|
|
||||||
if rest and str(rest[0]).startswith("@"):
|
if rest and str(rest[0]).startswith("@"):
|
||||||
selector_arg = str(rest[0])
|
selector_arg = str(rest[0])
|
||||||
@@ -100,6 +101,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
# Also get hash from first item for consistency
|
# Also get hash from first item for consistency
|
||||||
if not hash_from_at_syntax:
|
if not hash_from_at_syntax:
|
||||||
hash_from_at_syntax = getattr(item, 'hash_hex', None)
|
hash_from_at_syntax = getattr(item, 'hash_hex', None)
|
||||||
|
if not file_path_from_at_syntax:
|
||||||
|
file_path_from_at_syntax = getattr(item, 'file_path', None)
|
||||||
|
|
||||||
if not tags_from_at_syntax:
|
if not tags_from_at_syntax:
|
||||||
log(f"No tags found at indices: {indices}")
|
log(f"No tags found at indices: {indices}")
|
||||||
@@ -112,108 +115,165 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
# Handle @N selection which creates a list - extract the first item
|
# Handle @N selection which creates a list - extract the first item
|
||||||
if isinstance(result, list) and len(result) > 0:
|
|
||||||
# If we have a list of TagItems, we want to process ALL of them if no args provided
|
# If we have a list of TagItems, we want to process ALL of them if no args provided
|
||||||
# This handles: delete-tag @1 (where @1 expands to a list containing one TagItem)
|
# This handles: delete-tag @1 (where @1 expands to a list containing one TagItem)
|
||||||
if not args and hasattr(result[0], '__class__') and result[0].__class__.__name__ == 'TagItem':
|
# Also handles: delete-tag @1,2 (where we want to delete tags from multiple files)
|
||||||
# We will extract tags from the list later
|
|
||||||
|
# Normalize result to a list for processing
|
||||||
|
items_to_process = []
|
||||||
|
if isinstance(result, list):
|
||||||
|
items_to_process = result
|
||||||
|
elif result:
|
||||||
|
items_to_process = [result]
|
||||||
|
|
||||||
|
# If we have TagItems and no args, we are deleting the tags themselves
|
||||||
|
# If we have Files (or other objects) and args, we are deleting tags FROM those files
|
||||||
|
|
||||||
|
# Check if we are in "delete selected tags" mode (TagItems)
|
||||||
|
is_tag_item_mode = (items_to_process and hasattr(items_to_process[0], '__class__') and
|
||||||
|
items_to_process[0].__class__.__name__ == 'TagItem')
|
||||||
|
|
||||||
|
if is_tag_item_mode:
|
||||||
|
# Collect all tags to delete from the TagItems
|
||||||
|
# Group by hash/file_path to batch operations if needed, or just process one by one
|
||||||
|
# For simplicity, we'll process one by one or group by file
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
result = result[0]
|
# "Delete tags from files" mode
|
||||||
|
# We need args (tags to delete)
|
||||||
|
if not args and not tags_from_at_syntax:
|
||||||
|
log("Requires at least one tag argument when deleting from files")
|
||||||
|
return 1
|
||||||
|
|
||||||
# Determine tags and hash to use
|
# Process each item
|
||||||
tags: list[str] = []
|
success_count = 0
|
||||||
hash_hex = None
|
|
||||||
|
# If we have tags from @ syntax (e.g. delete-tag @{1,2}), we ignore the piped result for tag selection
|
||||||
|
# but we might need the piped result for the file context if @ selection was from a Tag table
|
||||||
|
# Actually, the @ selection logic above already extracted tags.
|
||||||
|
|
||||||
if tags_from_at_syntax:
|
if tags_from_at_syntax:
|
||||||
# Use tags extracted from @ syntax
|
# Special case: @ selection of tags.
|
||||||
|
# We already extracted tags and hash/path.
|
||||||
|
# Just run the deletion once using the extracted info.
|
||||||
|
# This preserves the existing logic for @ selection.
|
||||||
|
|
||||||
tags = tags_from_at_syntax
|
tags = tags_from_at_syntax
|
||||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(hash_from_at_syntax)
|
||||||
log(f"[delete_tag] Using @ syntax extraction: {len(tags)} tag(s) to delete: {tags}")
|
file_path = file_path_from_at_syntax
|
||||||
elif isinstance(result, list) and result and hasattr(result[0], '__class__') and result[0].__class__.__name__ == 'TagItem':
|
|
||||||
# Got a list of TagItems (e.g. from delete-tag @1)
|
if _process_deletion(tags, hash_hex, file_path, config):
|
||||||
tags = [getattr(item, 'tag_name') for item in result if getattr(item, 'tag_name', None)]
|
success_count += 1
|
||||||
# Use hash from first item
|
|
||||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result[0], "hash_hex", None))
|
|
||||||
elif result and hasattr(result, '__class__') and result.__class__.__name__ == 'TagItem':
|
|
||||||
# Got a piped TagItem - delete this specific tag
|
|
||||||
tag_name = getattr(result, 'tag_name', None)
|
|
||||||
if tag_name:
|
|
||||||
tags = [tag_name]
|
|
||||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
|
||||||
else:
|
else:
|
||||||
# Traditional mode - parse tag arguments
|
# Process items from pipe (or single result)
|
||||||
tags = parse_tag_arguments(rest)
|
# If args are provided, they are the tags to delete from EACH item
|
||||||
hash_hex = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(result, "hash_hex", None))
|
# If items are TagItems and no args, the tag to delete is the item itself
|
||||||
|
|
||||||
|
tags_arg = parse_tag_arguments(rest)
|
||||||
|
|
||||||
|
for item in items_to_process:
|
||||||
|
tags_to_delete = []
|
||||||
|
item_hash = normalize_hash(override_hash) if override_hash else normalize_hash(getattr(item, "hash_hex", None))
|
||||||
|
item_path = getattr(item, "path", None) or getattr(item, "file_path", None) or getattr(item, "target", None)
|
||||||
|
# If result is a dict (e.g. from search-file), try getting path from keys
|
||||||
|
if not item_path and isinstance(item, dict):
|
||||||
|
item_path = item.get("path") or item.get("file_path") or item.get("target")
|
||||||
|
|
||||||
|
item_source = getattr(item, "source", None)
|
||||||
|
|
||||||
|
if hasattr(item, '__class__') and item.__class__.__name__ == 'TagItem':
|
||||||
|
# It's a TagItem
|
||||||
|
if tags_arg:
|
||||||
|
# User provided tags to delete FROM this file (ignoring the tag name in the item?)
|
||||||
|
# Or maybe they want to delete the tag in the item AND the args?
|
||||||
|
# Usually if piping TagItems, we delete THOSE tags.
|
||||||
|
# If args are present, maybe we should warn?
|
||||||
|
# For now, if args are present, assume they override or add to the tag item?
|
||||||
|
# Let's assume if args are present, we use args. If not, we use the tag name.
|
||||||
|
tags_to_delete = tags_arg
|
||||||
|
else:
|
||||||
|
tag_name = getattr(item, 'tag_name', None)
|
||||||
|
if tag_name:
|
||||||
|
tags_to_delete = [tag_name]
|
||||||
|
else:
|
||||||
|
# It's a File or other object
|
||||||
|
if tags_arg:
|
||||||
|
tags_to_delete = tags_arg
|
||||||
|
else:
|
||||||
|
# No tags provided for a file object - skip or error?
|
||||||
|
# We already logged an error if no args and not TagItem mode globally,
|
||||||
|
# but inside the loop we might have mixed items? Unlikely.
|
||||||
|
continue
|
||||||
|
|
||||||
|
if tags_to_delete and (item_hash or item_path):
|
||||||
|
if _process_deletion(tags_to_delete, item_hash, item_path, config, source=item_source):
|
||||||
|
success_count += 1
|
||||||
|
|
||||||
|
if success_count > 0:
|
||||||
|
return 0
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def _process_deletion(tags: list[str], hash_hex: str | None, file_path: str | None, config: Dict[str, Any], source: str | None = None) -> bool:
|
||||||
|
"""Helper to execute the deletion logic for a single target."""
|
||||||
|
|
||||||
if not tags:
|
if not tags:
|
||||||
log("No valid tags were provided")
|
return False
|
||||||
return 1
|
|
||||||
|
|
||||||
|
if not hash_hex and not file_path:
|
||||||
|
log("Item does not include a hash or file path")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Handle local file tag deletion
|
||||||
|
if file_path and (source == "local" or (not hash_hex and source != "hydrus")):
|
||||||
|
try:
|
||||||
|
from helper.local_library import LocalLibraryDB
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
path_obj = Path(file_path)
|
||||||
|
if not path_obj.exists():
|
||||||
|
log(f"File not found: {file_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Try to get local storage path from config
|
||||||
|
from config import get_local_storage_path
|
||||||
|
local_root = get_local_storage_path(config)
|
||||||
|
|
||||||
|
if not local_root:
|
||||||
|
# Fallback: assume file is in a library root or use its parent
|
||||||
|
local_root = path_obj.parent
|
||||||
|
|
||||||
|
db = LocalLibraryDB(local_root)
|
||||||
|
db.remove_tags(path_obj, tags)
|
||||||
|
debug(f"Removed {len(tags)} tag(s) from {path_obj.name} (local)")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
log(f"Failed to remove local tags: {exc}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Hydrus deletion logic
|
||||||
if not hash_hex:
|
if not hash_hex:
|
||||||
log("Selected result does not include a hash")
|
return False
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
service_name = hydrus_wrapper.get_tag_service_name(config)
|
service_name = hydrus_wrapper.get_tag_service_name(config)
|
||||||
except Exception as exc:
|
|
||||||
log(f"Failed to resolve tag service: {exc}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
client = hydrus_wrapper.get_client(config)
|
client = hydrus_wrapper.get_client(config)
|
||||||
except Exception as exc:
|
|
||||||
log(f"Hydrus client unavailable: {exc}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if client is None:
|
if client is None:
|
||||||
log("Hydrus client unavailable")
|
log("Hydrus client unavailable")
|
||||||
return 1
|
return False
|
||||||
|
|
||||||
log(f"[delete_tag] Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
debug(f"Sending deletion request: hash={hash_hex}, tags={tags}, service={service_name}")
|
||||||
try:
|
client.delete_tags(hash_hex, tags, service_name)
|
||||||
result = client.delete_tags(hash_hex, tags, service_name)
|
|
||||||
log(f"[delete_tag] Hydrus response: {result}")
|
|
||||||
except Exception as exc:
|
|
||||||
log(f"Hydrus del-tag failed: {exc}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
preview = hash_hex[:12] + ('…' if len(hash_hex) > 12 else '')
|
||||||
log(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
debug(f"Removed {len(tags)} tag(s) from {preview} via '{service_name}'.")
|
||||||
|
return True
|
||||||
|
|
||||||
# Re-fetch and emit updated tags after deletion
|
|
||||||
try:
|
|
||||||
payload = client.fetch_file_metadata(hashes=[str(hash_hex)], include_service_keys_to_tags=True, include_file_urls=False)
|
|
||||||
items = payload.get("metadata") if isinstance(payload, dict) else None
|
|
||||||
if isinstance(items, list) and items:
|
|
||||||
meta = items[0] if isinstance(items[0], dict) else None
|
|
||||||
if isinstance(meta, dict):
|
|
||||||
# Extract tags from updated metadata
|
|
||||||
from cmdlets.get_tag import _extract_my_tags_from_hydrus_meta, TagItem
|
|
||||||
service_key = hydrus_wrapper.get_tag_service_key(client, service_name)
|
|
||||||
updated_tags = _extract_my_tags_from_hydrus_meta(meta, service_key, service_name)
|
|
||||||
|
|
||||||
# Emit updated tags as TagItem objects
|
|
||||||
from result_table import ResultTable
|
|
||||||
table = ResultTable("Tags", max_columns=2)
|
|
||||||
tag_items = []
|
|
||||||
for idx, tag_name in enumerate(updated_tags, start=1):
|
|
||||||
tag_item = TagItem(
|
|
||||||
tag_name=tag_name,
|
|
||||||
tag_index=idx,
|
|
||||||
hash_hex=hash_hex,
|
|
||||||
source="hydrus",
|
|
||||||
service_name=service_name,
|
|
||||||
)
|
|
||||||
tag_items.append(tag_item)
|
|
||||||
table.add_result(tag_item)
|
|
||||||
ctx.emit(tag_item)
|
|
||||||
|
|
||||||
# Store items for @ selection in next command (CLI will handle table management)
|
|
||||||
# Don't call set_last_result_table so we don't pollute history or table context
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"Warning: Could not fetch updated tags after deletion: {exc}", file=__import__('sys').stderr)
|
log(f"Hydrus del-tag failed: {exc}")
|
||||||
|
return False
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1058,11 +1058,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
debug(f"Torrent/magnet added: {arg[:50]}...")
|
debug(f"Torrent/magnet added: {arg[:50]}...")
|
||||||
elif _is_torrent_file_or_url(arg):
|
elif _is_torrent_file_or_url(arg):
|
||||||
# Handle .torrent files and URLs
|
# Handle .torrent files and URLs
|
||||||
log(f"Processing torrent file/URL: {arg}", flush=True)
|
debug(f"Processing torrent file/URL: {arg}")
|
||||||
magnet = _process_torrent_input(arg)
|
magnet = _process_torrent_input(arg)
|
||||||
if magnet and magnet.lower().startswith('magnet:'):
|
if magnet and magnet.lower().startswith('magnet:'):
|
||||||
urls_to_download.append(magnet)
|
urls_to_download.append(magnet)
|
||||||
log(f"✓ Converted to magnet: {magnet[:70]}...", flush=True)
|
debug(f"✓ Converted to magnet: {magnet[:70]}...")
|
||||||
elif magnet:
|
elif magnet:
|
||||||
urls_to_download.append(magnet)
|
urls_to_download.append(magnet)
|
||||||
else:
|
else:
|
||||||
@@ -1081,17 +1081,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line and line.lower().startswith(('http://', 'https://')):
|
if line and line.lower().startswith(('http://', 'https://')):
|
||||||
urls_to_download.append(line)
|
urls_to_download.append(line)
|
||||||
log(f"Loaded URLs from file: {arg}", flush=True)
|
debug(f"Loaded URLs from file: {arg}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Error reading file {arg}: {e}", file=sys.stderr)
|
log(f"Error reading file {arg}: {e}", file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
log(f"Ignored argument: {arg}", file=sys.stderr)
|
debug(f"Ignored argument: {arg}")
|
||||||
|
|
||||||
# Item selection (for playlists/formats)
|
# Item selection (for playlists/formats)
|
||||||
# Note: -item flag is deprecated in favor of @N pipeline selection, but kept for compatibility
|
# Note: -item flag is deprecated in favor of @N pipeline selection, but kept for compatibility
|
||||||
playlist_items = parsed.get("item")
|
playlist_items = parsed.get("item")
|
||||||
if playlist_items:
|
if playlist_items:
|
||||||
log(f"Item selection: {playlist_items}", flush=True)
|
debug(f"Item selection: {playlist_items}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1149,7 +1149,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if isinstance(item, dict) and item.get('__playlist_url'):
|
if isinstance(item, dict) and item.get('__playlist_url'):
|
||||||
playlist_url = item.get('__playlist_url')
|
playlist_url = item.get('__playlist_url')
|
||||||
item_num = item.get('__playlist_item', 1)
|
item_num = item.get('__playlist_item', 1)
|
||||||
log(f"📍 Playlist item from add-file: #{item_num}", flush=True)
|
debug(f"📍 Playlist item from add-file: #{item_num}")
|
||||||
# Add to download list with marker
|
# Add to download list with marker
|
||||||
urls_to_download.append({
|
urls_to_download.append({
|
||||||
'__playlist_url': playlist_url,
|
'__playlist_url': playlist_url,
|
||||||
@@ -1166,7 +1166,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
if playlist_url:
|
if playlist_url:
|
||||||
# Playlist item selected - need to download this specific track
|
# Playlist item selected - need to download this specific track
|
||||||
log(f"📍 Playlist item selected: #{item_num} - {item.get('title', 'Unknown')}", flush=True)
|
debug(f"📍 Playlist item selected: #{item_num} - {item.get('title', 'Unknown')}")
|
||||||
# Add to download list - the playlist will be probed and item extracted
|
# Add to download list - the playlist will be probed and item extracted
|
||||||
# Store with special marker so we know which item to select
|
# Store with special marker so we know which item to select
|
||||||
urls_to_download.append({
|
urls_to_download.append({
|
||||||
@@ -1177,14 +1177,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
# ====== CHECK FOR FORMAT SELECTION RESULT ======
|
# ====== CHECK FOR FORMAT SELECTION RESULT ======
|
||||||
if isinstance(item, dict) and item.get('format_id') is not None and item.get('source_url'):
|
if isinstance(item, dict) and item.get('format_id') is not None and item.get('source_url'):
|
||||||
log(f"🎬 Format selected from pipe: {item.get('format_id')}", flush=True)
|
debug(f"🎬 Format selected from pipe: {item.get('format_id')}")
|
||||||
log(f" Source URL: {item.get('source_url')}", flush=True)
|
debug(f" Source URL: {item.get('source_url')}")
|
||||||
# Store as dict so we can extract format_id + source_url during download
|
# Store as dict so we can extract format_id + source_url during download
|
||||||
urls_to_download.append(item)
|
urls_to_download.append(item)
|
||||||
continue
|
continue
|
||||||
elif hasattr(item, 'format_id') and hasattr(item, 'source_url') and item.format_id is not None:
|
elif hasattr(item, 'format_id') and hasattr(item, 'source_url') and item.format_id is not None:
|
||||||
log(f"🎬 Format selected from pipe: {item.format_id}", flush=True)
|
debug(f"🎬 Format selected from pipe: {item.format_id}")
|
||||||
log(f" Source URL: {item.source_url}", flush=True)
|
debug(f" Source URL: {item.source_url}")
|
||||||
urls_to_download.append({
|
urls_to_download.append({
|
||||||
'format_id': item.format_id,
|
'format_id': item.format_id,
|
||||||
'source_url': item.source_url,
|
'source_url': item.source_url,
|
||||||
@@ -1204,9 +1204,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
isbn = metadata.get('isbn') or item.get('isbn')
|
isbn = metadata.get('isbn') or item.get('isbn')
|
||||||
olid = metadata.get('olid') or item.get('olid')
|
olid = metadata.get('olid') or item.get('olid')
|
||||||
|
|
||||||
log(f"[search-result] OpenLibrary: '{title}'", flush=True)
|
debug(f"[search-result] OpenLibrary: '{title}'")
|
||||||
if isbn:
|
if isbn:
|
||||||
log(f" ISBN: {isbn}", flush=True)
|
debug(f" ISBN: {isbn}")
|
||||||
|
|
||||||
# Check if book is borrowable from ebook_access field or status
|
# Check if book is borrowable from ebook_access field or status
|
||||||
ebook_access = metadata.get('ebook_access') or item.get('ebook_access', '')
|
ebook_access = metadata.get('ebook_access') or item.get('ebook_access', '')
|
||||||
@@ -1217,8 +1217,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
is_borrowable = _is_openlibrary_downloadable(ebook_access, status_text)
|
is_borrowable = _is_openlibrary_downloadable(ebook_access, status_text)
|
||||||
|
|
||||||
if is_borrowable:
|
if is_borrowable:
|
||||||
log(f" ✓ Available for borrowing on Archive.org", flush=True)
|
debug(f" ✓ Available for borrowing on Archive.org")
|
||||||
log(f" → Queued for auto-borrowing...", flush=True)
|
debug(f" → Queued for auto-borrowing...")
|
||||||
# Queue borrow request as special dict object
|
# Queue borrow request as special dict object
|
||||||
# We need OCAID (Archive.org ID), not just numeric OLID
|
# We need OCAID (Archive.org ID), not just numeric OLID
|
||||||
ocaid = archive_id
|
ocaid = archive_id
|
||||||
@@ -1233,7 +1233,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
ol_data = r.json()
|
ol_data = r.json()
|
||||||
ocaid = ol_data.get('ocaid')
|
ocaid = ol_data.get('ocaid')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ⚠ Could not fetch OCAID from OpenLibrary: {e}", file=sys.stderr)
|
debug(f" ⚠ Could not fetch OCAID from OpenLibrary: {e}")
|
||||||
|
|
||||||
if ocaid:
|
if ocaid:
|
||||||
urls_to_download.append({
|
urls_to_download.append({
|
||||||
@@ -1246,7 +1246,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
else:
|
else:
|
||||||
# OCAID not found - book claims borrowable but not on Archive.org
|
# OCAID not found - book claims borrowable but not on Archive.org
|
||||||
# Fall back to LibGen search instead
|
# Fall back to LibGen search instead
|
||||||
log(f" ⚠ Book marked borrowable but not found on Archive.org", file=sys.stderr)
|
debug(f" ⚠ Book marked borrowable but not found on Archive.org")
|
||||||
if isbn:
|
if isbn:
|
||||||
try:
|
try:
|
||||||
from helper.search_provider import get_provider
|
from helper.search_provider import get_provider
|
||||||
@@ -1258,19 +1258,19 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
||||||
if url:
|
if url:
|
||||||
urls_to_download.append(url)
|
urls_to_download.append(url)
|
||||||
log(f" ✓ Found on LibGen instead", flush=True)
|
debug(f" ✓ Found on LibGen instead")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ LibGen provider not available", file=sys.stderr)
|
debug(f" ⚠ LibGen provider not available")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Error searching LibGen: {e}", file=sys.stderr)
|
debug(f" ✗ Error searching LibGen: {e}")
|
||||||
else:
|
else:
|
||||||
# Book is NOT borrowable - route to LibGen
|
# Book is NOT borrowable - route to LibGen
|
||||||
if isbn:
|
if isbn:
|
||||||
log(f" ⚠ Not available on Archive.org - attempting LibGen...", flush=True)
|
debug(f" ⚠ Not available on Archive.org - attempting LibGen...")
|
||||||
try:
|
try:
|
||||||
from helper.search_provider import get_provider
|
from helper.search_provider import get_provider
|
||||||
libgen_provider = get_provider("libgen", config)
|
libgen_provider = get_provider("libgen", config)
|
||||||
@@ -1281,21 +1281,21 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
||||||
if url:
|
if url:
|
||||||
urls_to_download.append(url)
|
urls_to_download.append(url)
|
||||||
log(f" ✓ Found on LibGen", flush=True)
|
debug(f" ✓ Found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", flush=True)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
else:
|
else:
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ⚠ Could not search LibGen: {e}", file=sys.stderr)
|
debug(f" ⚠ Could not search LibGen: {e}")
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ ISBN not available", flush=True)
|
debug(f" ⚠ ISBN not available")
|
||||||
log(f" ▶ Visit: {item.get('target', 'https://openlibrary.org')}", flush=True)
|
debug(f" ▶ Visit: {item.get('target', 'https://openlibrary.org')}")
|
||||||
log(f" ▶ Or find ISBN and use: search-file -provider libgen 'isbn:\"<ISBN>\"'", flush=True)
|
debug(f" ▶ Or find ISBN and use: search-file -provider libgen 'isbn:\"<ISBN>\"'")
|
||||||
elif origin == 'soulseek':
|
elif origin == 'soulseek':
|
||||||
# Handle Soulseek downloads using the provider
|
# Handle Soulseek downloads using the provider
|
||||||
metadata = item.get('full_metadata', {}) if isinstance(item.get('full_metadata'), dict) else {}
|
metadata = item.get('full_metadata', {}) if isinstance(item.get('full_metadata'), dict) else {}
|
||||||
@@ -1350,18 +1350,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
)
|
)
|
||||||
pipeline_context.emit(result_dict)
|
pipeline_context.emit(result_dict)
|
||||||
else:
|
else:
|
||||||
log(f" ✗ Download failed (peer may be offline)", file=sys.stderr)
|
debug(f" ✗ Download failed (peer may be offline)")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"✗ Download failed for {title}")
|
db.append_worker_stdout(worker_id, f"✗ Download failed for {title}")
|
||||||
log(f" ▶ Try another result: search-file -provider soulseek \"...\" | @2 | download-data", flush=True)
|
debug(f" ▶ Try another result: search-file -provider soulseek \"...\" | @2 | download-data")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Download error: {e}", file=sys.stderr)
|
debug(f" ✗ Download error: {e}")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"✗ Error: {e}")
|
db.append_worker_stdout(worker_id, f"✗ Error: {e}")
|
||||||
log(f" ▶ Alternative: search-soulseek -download \"{title}\" -storage <location>", flush=True)
|
debug(f" ▶ Alternative: search-soulseek -download \"{title}\" -storage <location>")
|
||||||
else:
|
else:
|
||||||
log(f"[search-result] Soulseek: '{title}'", flush=True)
|
debug(f"[search-result] Soulseek: '{title}'")
|
||||||
log(f" ⚠ Missing download info (username/filename)", flush=True)
|
debug(f" ⚠ Missing download info (username/filename)")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"⚠ Missing download info for {title}")
|
db.append_worker_stdout(worker_id, f"⚠ Missing download info for {title}")
|
||||||
elif origin == 'libgen':
|
elif origin == 'libgen':
|
||||||
@@ -1380,17 +1380,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
'book_id': book_id,
|
'book_id': book_id,
|
||||||
}
|
}
|
||||||
urls_to_download.append(url_entry)
|
urls_to_download.append(url_entry)
|
||||||
log(f"[search-result] LibGen: '{title}'", flush=True)
|
debug(f"[search-result] LibGen: '{title}'")
|
||||||
log(f" ✓ Queued for download", flush=True)
|
debug(f" ✓ Queued for download")
|
||||||
if mirrors:
|
if mirrors:
|
||||||
log(f" Mirrors available: {len(mirrors)}", flush=True)
|
debug(f" Mirrors available: {len(mirrors)}")
|
||||||
elif origin == 'debrid':
|
elif origin == 'debrid':
|
||||||
# Debrid results can use download-data
|
# Debrid results can use download-data
|
||||||
url = item.get('target')
|
url = item.get('target')
|
||||||
if url:
|
if url:
|
||||||
urls_to_download.append(str(url))
|
urls_to_download.append(str(url))
|
||||||
log(f"[search-result] Debrid: '{title}'", flush=True)
|
debug(f"[search-result] Debrid: '{title}'")
|
||||||
log(f" ✓ Queued for download", flush=True)
|
debug(f" ✓ Queued for download")
|
||||||
else:
|
else:
|
||||||
# Regular fields for non-search results
|
# Regular fields for non-search results
|
||||||
url = item.get('url') or item.get('link') or item.get('href') or item.get('target')
|
url = item.get('url') or item.get('link') or item.get('href') or item.get('target')
|
||||||
@@ -1407,9 +1407,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
isbn = metadata.get('isbn') or getattr(item, 'isbn', None)
|
isbn = metadata.get('isbn') or getattr(item, 'isbn', None)
|
||||||
olid = metadata.get('olid') or getattr(item, 'olid', None)
|
olid = metadata.get('olid') or getattr(item, 'olid', None)
|
||||||
|
|
||||||
log(f"[search-result] OpenLibrary: '{title}'", flush=True)
|
debug(f"[search-result] OpenLibrary: '{title}'")
|
||||||
if isbn:
|
if isbn:
|
||||||
log(f" ISBN: {isbn}", flush=True)
|
debug(f" ISBN: {isbn}")
|
||||||
|
|
||||||
# Check if book is borrowable from ebook_access field or status
|
# Check if book is borrowable from ebook_access field or status
|
||||||
ebook_access = metadata.get('ebook_access') or getattr(item, 'ebook_access', '')
|
ebook_access = metadata.get('ebook_access') or getattr(item, 'ebook_access', '')
|
||||||
@@ -1421,8 +1421,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
if is_borrowable:
|
if is_borrowable:
|
||||||
# Book IS borrowable on Archive.org
|
# Book IS borrowable on Archive.org
|
||||||
log(f" ✓ Available for borrowing on Archive.org", flush=True)
|
debug(f" ✓ Available for borrowing on Archive.org")
|
||||||
log(f" → Queued for auto-borrowing...", flush=True)
|
debug(f" → Queued for auto-borrowing...")
|
||||||
# Queue borrow request as special dict object
|
# Queue borrow request as special dict object
|
||||||
ocaid = archive_id
|
ocaid = archive_id
|
||||||
if not ocaid and isbn:
|
if not ocaid and isbn:
|
||||||
@@ -1434,7 +1434,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
ol_data = r.json()
|
ol_data = r.json()
|
||||||
ocaid = ol_data.get('ocaid')
|
ocaid = ol_data.get('ocaid')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ⚠ Could not fetch OCAID from OpenLibrary: {e}", file=sys.stderr)
|
debug(f" ⚠ Could not fetch OCAID from OpenLibrary: {e}")
|
||||||
if ocaid:
|
if ocaid:
|
||||||
urls_to_download.append({
|
urls_to_download.append({
|
||||||
'__borrow_request__': True,
|
'__borrow_request__': True,
|
||||||
@@ -1446,7 +1446,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
else:
|
else:
|
||||||
# OCAID not found - book claims borrowable but not on Archive.org
|
# OCAID not found - book claims borrowable but not on Archive.org
|
||||||
# Fall back to LibGen search instead
|
# Fall back to LibGen search instead
|
||||||
log(f" ⚠ No Archive.org ID found - attempting LibGen instead...", file=sys.stderr)
|
debug(f" ⚠ No Archive.org ID found - attempting LibGen instead...")
|
||||||
if isbn:
|
if isbn:
|
||||||
try:
|
try:
|
||||||
from helper.search_provider import get_provider
|
from helper.search_provider import get_provider
|
||||||
@@ -1458,21 +1458,21 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
||||||
if url:
|
if url:
|
||||||
urls_to_download.append(url)
|
urls_to_download.append(url)
|
||||||
log(f" ✓ Found on LibGen instead", flush=True)
|
debug(f" ✓ Found on LibGen instead")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ LibGen provider not available", file=sys.stderr)
|
debug(f" ⚠ LibGen provider not available")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Error searching LibGen: {e}", file=sys.stderr)
|
debug(f" ✗ Error searching LibGen: {e}")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ ISBN not available for LibGen fallback", file=sys.stderr)
|
debug(f" ⚠ ISBN not available for LibGen fallback")
|
||||||
else:
|
else:
|
||||||
# Book is NOT borrowable - route to LibGen
|
# Book is NOT borrowable - route to LibGen
|
||||||
if isbn:
|
if isbn:
|
||||||
log(f" ⚠ Not available on Archive.org - attempting LibGen...", flush=True)
|
debug(f" ⚠ Not available on Archive.org - attempting LibGen...")
|
||||||
try:
|
try:
|
||||||
from helper.search_provider import get_provider
|
from helper.search_provider import get_provider
|
||||||
libgen_provider = get_provider("libgen", config)
|
libgen_provider = get_provider("libgen", config)
|
||||||
@@ -1483,21 +1483,21 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
url = libgen_result.get('target') if isinstance(libgen_result, dict) else getattr(libgen_result, 'target', None)
|
||||||
if url:
|
if url:
|
||||||
urls_to_download.append(url)
|
urls_to_download.append(url)
|
||||||
log(f" ✓ Found on LibGen", flush=True)
|
debug(f" ✓ Found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", file=sys.stderr)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Not found on LibGen", flush=True)
|
debug(f" ⚠ Not found on LibGen")
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
else:
|
else:
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ⚠ Could not search LibGen: {e}", file=sys.stderr)
|
debug(f" ⚠ Could not search LibGen: {e}")
|
||||||
log(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data", flush=True)
|
debug(f" ▶ To search LibGen: search-file -provider libgen 'isbn:{isbn}' | @1 | download-data")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ ISBN not available", flush=True)
|
debug(f" ⚠ ISBN not available")
|
||||||
log(f" ▶ Visit: {getattr(item, 'target', 'https://openlibrary.org')}", flush=True)
|
debug(f" ▶ Visit: {getattr(item, 'target', 'https://openlibrary.org')}")
|
||||||
log(f" ▶ Or find ISBN and use: search-file -provider libgen 'isbn:\"<ISBN>\"'", flush=True)
|
debug(f" ▶ Or find ISBN and use: search-file -provider libgen 'isbn:\"<ISBN>\"'")
|
||||||
elif origin == 'soulseek':
|
elif origin == 'soulseek':
|
||||||
# Handle Soulseek downloads using the provider
|
# Handle Soulseek downloads using the provider
|
||||||
metadata = getattr(item, 'full_metadata', {}) if isinstance(getattr(item, 'full_metadata', None), dict) else {}
|
metadata = getattr(item, 'full_metadata', {}) if isinstance(getattr(item, 'full_metadata', None), dict) else {}
|
||||||
@@ -1510,8 +1510,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
import asyncio
|
import asyncio
|
||||||
from helper.search_provider import SoulSeekProvider
|
from helper.search_provider import SoulSeekProvider
|
||||||
provider = SoulSeekProvider(config)
|
provider = SoulSeekProvider(config)
|
||||||
log(f"[search-result] Soulseek: '{title}'", flush=True)
|
debug(f"[search-result] Soulseek: '{title}'")
|
||||||
log(f" ▶ Downloading from {username}...", flush=True)
|
debug(f" ▶ Downloading from {username}...")
|
||||||
|
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"Downloading from Soulseek: {title} (from {username})")
|
db.append_worker_stdout(worker_id, f"Downloading from Soulseek: {title} (from {username})")
|
||||||
@@ -1532,7 +1532,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if success:
|
if success:
|
||||||
downloaded_file = Path(provider.DOWNLOAD_DIR) / Path(filename).name
|
downloaded_file = Path(provider.DOWNLOAD_DIR) / Path(filename).name
|
||||||
if downloaded_file.exists():
|
if downloaded_file.exists():
|
||||||
log(f" ✓ Downloaded: {downloaded_file.name}", flush=True)
|
debug(f" ✓ Downloaded: {downloaded_file.name}")
|
||||||
files_downloaded_directly += 1
|
files_downloaded_directly += 1
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"✓ Downloaded: {downloaded_file.name}")
|
db.append_worker_stdout(worker_id, f"✓ Downloaded: {downloaded_file.name}")
|
||||||
@@ -1552,18 +1552,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
)
|
)
|
||||||
pipeline_context.emit(result_dict)
|
pipeline_context.emit(result_dict)
|
||||||
else:
|
else:
|
||||||
log(f" ✗ Download failed (peer may be offline)", file=sys.stderr)
|
debug(f" ✗ Download failed (peer may be offline)")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"✗ Download failed for {title}")
|
db.append_worker_stdout(worker_id, f"✗ Download failed for {title}")
|
||||||
log(f" ▶ Try another result: search-file -provider soulseek \"...\" | @2 | download-data", flush=True)
|
debug(f" ▶ Try another result: search-file -provider soulseek \"...\" | @2 | download-data")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Download error: {e}", file=sys.stderr)
|
debug(f" ✗ Download error: {e}")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"✗ Error: {e}")
|
db.append_worker_stdout(worker_id, f"✗ Error: {e}")
|
||||||
log(f" ▶ Alternative: search-soulseek -download \"{title}\" -storage <location>", flush=True)
|
debug(f" ▶ Alternative: search-soulseek -download \"{title}\" -storage <location>")
|
||||||
else:
|
else:
|
||||||
log(f"[search-result] Soulseek: '{title}'", flush=True)
|
debug(f"[search-result] Soulseek: '{title}'")
|
||||||
log(f" ⚠ Missing download info (username/filename)", flush=True)
|
debug(f" ⚠ Missing download info (username/filename)")
|
||||||
if db:
|
if db:
|
||||||
db.append_worker_stdout(worker_id, f"⚠ Missing download info for {title}")
|
db.append_worker_stdout(worker_id, f"⚠ Missing download info for {title}")
|
||||||
elif origin == 'libgen':
|
elif origin == 'libgen':
|
||||||
@@ -1592,15 +1592,15 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
urls_to_download.append(str(url))
|
urls_to_download.append(str(url))
|
||||||
|
|
||||||
if not urls_to_download and files_downloaded_directly == 0:
|
if not urls_to_download and files_downloaded_directly == 0:
|
||||||
log(f"No downloadable URLs found", file=sys.stderr)
|
debug(f"No downloadable URLs found")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
log(f"Processing {len(urls_to_download)} URL(s)", flush=True)
|
debug(f"Processing {len(urls_to_download)} URL(s)")
|
||||||
for i, u in enumerate(urls_to_download, 1):
|
for i, u in enumerate(urls_to_download, 1):
|
||||||
if isinstance(u, dict):
|
if isinstance(u, dict):
|
||||||
log(f" [{i}] Format: {u.get('format_id', '?')} from {u.get('source_url', '?')[:60]}...", flush=True)
|
debug(f" [{i}] Format: {u.get('format_id', '?')} from {u.get('source_url', '?')[:60]}...")
|
||||||
else:
|
else:
|
||||||
log(f" [{i}] URL: {str(u)[:60]}...", flush=True)
|
debug(f" [{i}] URL: {str(u)[:60]}...")
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# RESOLVE OUTPUT DIRECTORY
|
# RESOLVE OUTPUT DIRECTORY
|
||||||
@@ -1611,8 +1611,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
# Priority 1: --storage flag
|
# Priority 1: --storage flag
|
||||||
if storage_location:
|
if storage_location:
|
||||||
try:
|
try:
|
||||||
|
# For 'local' storage, check config first before using default
|
||||||
|
if storage_location.lower() == 'local':
|
||||||
|
from config import get_local_storage_path
|
||||||
|
try:
|
||||||
|
configured_path = get_local_storage_path(config)
|
||||||
|
if configured_path:
|
||||||
|
final_output_dir = configured_path
|
||||||
|
debug(f"Using configured local storage path: {final_output_dir}")
|
||||||
|
else:
|
||||||
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||||
log(f"Using storage location: {storage_location} → {final_output_dir}", flush=True)
|
debug(f"Using default storage location: {storage_location} → {final_output_dir}")
|
||||||
|
except Exception as exc:
|
||||||
|
log(f"⚠️ Error reading local storage config: {exc}", file=sys.stderr)
|
||||||
|
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||||
|
debug(f"Falling back to default storage location: {storage_location} → {final_output_dir}")
|
||||||
|
else:
|
||||||
|
final_output_dir = SharedArgs.resolve_storage(storage_location)
|
||||||
|
debug(f"Using storage location: {storage_location} → {final_output_dir}")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
log(str(e), file=sys.stderr)
|
log(str(e), file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
@@ -1621,7 +1637,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if final_output_dir is None and resolve_output_dir is not None:
|
if final_output_dir is None and resolve_output_dir is not None:
|
||||||
try:
|
try:
|
||||||
final_output_dir = resolve_output_dir(config)
|
final_output_dir = resolve_output_dir(config)
|
||||||
log(f"Using config resolver: {final_output_dir}", flush=True)
|
debug(f"Using config resolver: {final_output_dir}")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1629,14 +1645,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if final_output_dir is None and config and config.get("outfile"):
|
if final_output_dir is None and config and config.get("outfile"):
|
||||||
try:
|
try:
|
||||||
final_output_dir = Path(config["outfile"]).expanduser()
|
final_output_dir = Path(config["outfile"]).expanduser()
|
||||||
log(f"Using config outfile: {final_output_dir}", flush=True)
|
debug(f"Using config outfile: {final_output_dir}")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Priority 5: Default (home/Videos)
|
# Priority 5: Default (home/Videos)
|
||||||
if final_output_dir is None:
|
if final_output_dir is None:
|
||||||
final_output_dir = Path.home() / "Videos"
|
final_output_dir = Path.home() / "Videos"
|
||||||
log(f"Using default directory: {final_output_dir}", flush=True)
|
debug(f"Using default directory: {final_output_dir}")
|
||||||
|
|
||||||
# Ensure directory exists
|
# Ensure directory exists
|
||||||
try:
|
try:
|
||||||
@@ -1664,7 +1680,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
current_format_selector = format_selector
|
current_format_selector = format_selector
|
||||||
actual_url = url
|
actual_url = url
|
||||||
if isinstance(url, dict) and url.get('format_id') and url.get('source_url'):
|
if isinstance(url, dict) and url.get('format_id') and url.get('source_url'):
|
||||||
log(f"🎬 Format selected: {url.get('format_id')}", flush=True)
|
debug(f"🎬 Format selected: {url.get('format_id')}")
|
||||||
format_id = url.get('format_id')
|
format_id = url.get('format_id')
|
||||||
current_format_selector = format_id
|
current_format_selector = format_id
|
||||||
|
|
||||||
@@ -1674,7 +1690,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if vcodec and vcodec != "none" and (not acodec or acodec == "none"):
|
if vcodec and vcodec != "none" and (not acodec or acodec == "none"):
|
||||||
# Video-only format, add bestaudio automatically
|
# Video-only format, add bestaudio automatically
|
||||||
current_format_selector = f"{format_id}+bestaudio"
|
current_format_selector = f"{format_id}+bestaudio"
|
||||||
log(f" ℹ️ Video-only format detected, automatically adding bestaudio", flush=True)
|
debug(f" ℹ️ Video-only format detected, automatically adding bestaudio")
|
||||||
|
|
||||||
actual_url = url.get('source_url')
|
actual_url = url.get('source_url')
|
||||||
url = actual_url # Use the actual URL for further processing
|
url = actual_url # Use the actual URL for further processing
|
||||||
@@ -1688,15 +1704,15 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
book_id = url.get('book_id')
|
book_id = url.get('book_id')
|
||||||
if not book_id:
|
if not book_id:
|
||||||
log(f" ✗ Missing book ID for borrowing", file=sys.stderr)
|
debug(f" ✗ Missing book ID for borrowing")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title_val = url.get('title', 'Unknown Book')
|
title_val = url.get('title', 'Unknown Book')
|
||||||
book_id_str = str(book_id)
|
book_id_str = str(book_id)
|
||||||
|
|
||||||
log(f"[auto-borrow] Starting borrow for: {title_val}", flush=True)
|
debug(f"[auto-borrow] Starting borrow for: {title_val}")
|
||||||
log(f" Book ID: {book_id_str}", flush=True)
|
debug(f" Book ID: {book_id_str}")
|
||||||
|
|
||||||
# Get Archive.org credentials
|
# Get Archive.org credentials
|
||||||
email, password = credential_openlibrary(config)
|
email, password = credential_openlibrary(config)
|
||||||
@@ -1708,33 +1724,33 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
# Attempt to borrow and download
|
# Attempt to borrow and download
|
||||||
try:
|
try:
|
||||||
log(f" → Logging into Archive.org...", flush=True)
|
debug(f" → Logging into Archive.org...")
|
||||||
from helper.archive_client import login
|
from helper.archive_client import login
|
||||||
import requests
|
import requests
|
||||||
try:
|
try:
|
||||||
session = login(email, password)
|
session = login(email, password)
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
log(f" ✗ Timeout logging into Archive.org (server not responding)", file=sys.stderr)
|
debug(f" ✗ Timeout logging into Archive.org (server not responding)")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
log(f" ✗ Error connecting to Archive.org: {e}", file=sys.stderr)
|
debug(f" ✗ Error connecting to Archive.org: {e}")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log(f" → Borrowing book...", flush=True)
|
debug(f" → Borrowing book...")
|
||||||
try:
|
try:
|
||||||
session = loan(session, book_id_str, verbose=True)
|
session = loan(session, book_id_str, verbose=True)
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
log(f" ✗ Timeout while borrowing (server not responding)", file=sys.stderr)
|
debug(f" ✗ Timeout while borrowing (server not responding)")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
log(f" ✗ Error while borrowing: {e}", file=sys.stderr)
|
debug(f" ✗ Error while borrowing: {e}")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log(f" → Extracting page information...", flush=True)
|
debug(f" → Extracting page information...")
|
||||||
# Try both URL formats
|
# Try both URL formats
|
||||||
book_urls = [
|
book_urls = [
|
||||||
f"https://archive.org/borrow/{book_id_str}",
|
f"https://archive.org/borrow/{book_id_str}",
|
||||||
@@ -1749,24 +1765,24 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
try:
|
try:
|
||||||
title, links, metadata = get_book_infos(session, book_url)
|
title, links, metadata = get_book_infos(session, book_url)
|
||||||
if title and links:
|
if title and links:
|
||||||
log(f" → Found {len(links)} pages", flush=True)
|
debug(f" → Found {len(links)} pages")
|
||||||
break
|
break
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
last_error = "Timeout while extracting pages"
|
last_error = "Timeout while extracting pages"
|
||||||
log(f" ⚠ Timeout while extracting from {book_url}", flush=True)
|
debug(f" ⚠ Timeout while extracting from {book_url}")
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
last_error = str(e)
|
last_error = str(e)
|
||||||
log(f" ⚠ Failed to extract from {book_url}: {e}", flush=True)
|
debug(f" ⚠ Failed to extract from {book_url}: {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not links:
|
if not links:
|
||||||
log(f" ✗ Could not extract book pages (Last error: {last_error})", file=sys.stderr)
|
debug(f" ✗ Could not extract book pages (Last error: {last_error})")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Download pages
|
# Download pages
|
||||||
log(f" → Downloading {len(links)} pages...", flush=True)
|
debug(f" → Downloading {len(links)} pages...")
|
||||||
with tempfile.TemporaryDirectory() as temp_dir:
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
# download(session, n_threads, directory, links, scale, book_id)
|
# download(session, n_threads, directory, links, scale, book_id)
|
||||||
images = download(
|
images = download(
|
||||||
@@ -1779,16 +1795,16 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not images:
|
if not images:
|
||||||
log(f" ✗ No pages downloaded", file=sys.stderr)
|
debug(f" ✗ No pages downloaded")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log(f" ✓ Downloaded {len(images)} pages", flush=True)
|
debug(f" ✓ Downloaded {len(images)} pages")
|
||||||
|
|
||||||
# Try to merge into PDF
|
# Try to merge into PDF
|
||||||
try:
|
try:
|
||||||
import img2pdf
|
import img2pdf
|
||||||
log(f" → Merging pages into PDF...", flush=True)
|
debug(f" → Merging pages into PDF...")
|
||||||
|
|
||||||
filename = title if title else f"book_{book_id_str}"
|
filename = title if title else f"book_{book_id_str}"
|
||||||
filename = "".join(c for c in filename if c.isalnum() or c in (' ', '.', '-'))[:100]
|
filename = "".join(c for c in filename if c.isalnum() or c in (' ', '.', '-'))[:100]
|
||||||
@@ -1805,7 +1821,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
with open(output_path, 'wb') as f:
|
with open(output_path, 'wb') as f:
|
||||||
f.write(pdf_content)
|
f.write(pdf_content)
|
||||||
|
|
||||||
log(f" ✓ Successfully borrowed and saved to: {output_path}", flush=True)
|
debug(f" ✓ Successfully borrowed and saved to: {output_path}")
|
||||||
downloaded_files.append(str(output_path))
|
downloaded_files.append(str(output_path))
|
||||||
|
|
||||||
# Emit result for downstream cmdlets
|
# Emit result for downstream cmdlets
|
||||||
@@ -1836,7 +1852,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
pipeline_context.emit(pipe_obj)
|
pipeline_context.emit(pipe_obj)
|
||||||
exit_code = 0
|
exit_code = 0
|
||||||
except ImportError:
|
except ImportError:
|
||||||
log(f" ⚠ img2pdf not available - saving pages as collection", file=sys.stderr)
|
debug(f" ⚠ img2pdf not available - saving pages as collection")
|
||||||
# Just copy images to output dir
|
# Just copy images to output dir
|
||||||
filename = title if title else f"book_{book_id_str}"
|
filename = title if title else f"book_{book_id_str}"
|
||||||
filename = "".join(c for c in filename if c.isalnum() or c in (' ', '.', '-'))[:100]
|
filename = "".join(c for c in filename if c.isalnum() or c in (' ', '.', '-'))[:100]
|
||||||
@@ -1847,7 +1863,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
shutil.copytree(temp_dir, str(output_dir))
|
shutil.copytree(temp_dir, str(output_dir))
|
||||||
log(f" ✓ Successfully borrowed and saved to: {output_dir}", flush=True)
|
debug(f" ✓ Successfully borrowed and saved to: {output_dir}")
|
||||||
downloaded_files.append(str(output_dir))
|
downloaded_files.append(str(output_dir))
|
||||||
|
|
||||||
# Emit result for downstream cmdlets
|
# Emit result for downstream cmdlets
|
||||||
@@ -1877,7 +1893,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
exit_code = 0
|
exit_code = 0
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Borrow/download failed: {e}", file=sys.stderr)
|
debug(f" ✗ Borrow/download failed: {e}")
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -1885,11 +1901,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
continue # Skip normal URL handling
|
continue # Skip normal URL handling
|
||||||
|
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
log(f" ✗ Archive.org tools not available: {e}", file=sys.stderr)
|
debug(f" ✗ Archive.org tools not available: {e}")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ Auto-borrow error: {e}", file=sys.stderr)
|
debug(f" ✗ Auto-borrow error: {e}")
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -1905,7 +1921,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
book_id = url.get('book_id', '')
|
book_id = url.get('book_id', '')
|
||||||
|
|
||||||
if not primary_url:
|
if not primary_url:
|
||||||
log(f"Skipping libgen entry: no primary URL", file=sys.stderr)
|
debug(f"Skipping libgen entry: no primary URL")
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1916,11 +1932,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
# Remove duplicates while preserving order
|
# Remove duplicates while preserving order
|
||||||
mirrors_to_try = list(dict.fromkeys(mirrors_to_try))
|
mirrors_to_try = list(dict.fromkeys(mirrors_to_try))
|
||||||
|
|
||||||
log(f"🔄 LibGen download with mirror fallback (book_id: {book_id})", flush=True)
|
debug(f"🔄 LibGen download with mirror fallback (book_id: {book_id})")
|
||||||
log(f" Primary: {primary_url[:80]}...", flush=True)
|
debug(f" Primary: {primary_url[:80]}...")
|
||||||
|
|
||||||
if len(mirrors_to_try) > 1:
|
if len(mirrors_to_try) > 1:
|
||||||
log(f" {len(mirrors_to_try) - 1} alternative mirror(s) available", flush=True)
|
debug(f" {len(mirrors_to_try) - 1} alternative mirror(s) available")
|
||||||
|
|
||||||
# Resolve cookies path
|
# Resolve cookies path
|
||||||
final_cookies_path_libgen = None
|
final_cookies_path_libgen = None
|
||||||
@@ -1941,7 +1957,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
for mirror_idx, mirror_url in enumerate(mirrors_to_try, 1):
|
for mirror_idx, mirror_url in enumerate(mirrors_to_try, 1):
|
||||||
try:
|
try:
|
||||||
if mirror_idx > 1:
|
if mirror_idx > 1:
|
||||||
log(f" → Trying mirror #{mirror_idx}: {mirror_url[:80]}...", flush=True)
|
debug(f" → Trying mirror #{mirror_idx}: {mirror_url[:80]}...")
|
||||||
|
|
||||||
# Use libgen_service's download_from_mirror for proper libgen handling
|
# Use libgen_service's download_from_mirror for proper libgen handling
|
||||||
from helper.libgen_service import download_from_mirror
|
from helper.libgen_service import download_from_mirror
|
||||||
@@ -1954,12 +1970,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
success = download_from_mirror(
|
success = download_from_mirror(
|
||||||
mirror_url=mirror_url,
|
mirror_url=mirror_url,
|
||||||
output_path=file_path,
|
output_path=file_path,
|
||||||
log_info=lambda msg: log(f" {msg}", flush=True),
|
log_info=lambda msg: debug(f" {msg}"),
|
||||||
log_error=lambda msg: log(f" ⚠ {msg}", file=sys.stderr)
|
log_error=lambda msg: debug(f" ⚠ {msg}")
|
||||||
)
|
)
|
||||||
|
|
||||||
if success and file_path.exists():
|
if success and file_path.exists():
|
||||||
log(f" ✓ Downloaded successfully from mirror #{mirror_idx}", flush=True)
|
debug(f" ✓ Downloaded successfully from mirror #{mirror_idx}")
|
||||||
successful_mirror = mirror_url
|
successful_mirror = mirror_url
|
||||||
download_succeeded = True
|
download_succeeded = True
|
||||||
|
|
||||||
@@ -1984,9 +2000,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
last_error = str(e)
|
last_error = str(e)
|
||||||
if mirror_idx == 1:
|
if mirror_idx == 1:
|
||||||
log(f" ⚠ Primary mirror failed: {e}", flush=True)
|
debug(f" ⚠ Primary mirror failed: {e}")
|
||||||
else:
|
else:
|
||||||
log(f" ⚠ Mirror #{mirror_idx} failed: {e}", flush=True)
|
debug(f" ⚠ Mirror #{mirror_idx} failed: {e}")
|
||||||
|
|
||||||
if not download_succeeded:
|
if not download_succeeded:
|
||||||
log(f" ✗ All mirrors failed. Last error: {last_error}", file=sys.stderr)
|
log(f" ✗ All mirrors failed. Last error: {last_error}", file=sys.stderr)
|
||||||
@@ -1998,7 +2014,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
continue # Skip to next URL
|
continue # Skip to next URL
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f" ✗ LibGen mirror fallback error: {e}", file=sys.stderr)
|
debug(f" ✗ LibGen mirror fallback error: {e}")
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -2010,20 +2026,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if isinstance(url, dict) and url.get('__playlist_url'):
|
if isinstance(url, dict) and url.get('__playlist_url'):
|
||||||
playlist_url = url.get('__playlist_url')
|
playlist_url = url.get('__playlist_url')
|
||||||
item_num = url.get('__playlist_item', 1)
|
item_num = url.get('__playlist_item', 1)
|
||||||
log(f"📍 Handling selected playlist item #{item_num}", flush=True)
|
debug(f"📍 Handling selected playlist item #{item_num}")
|
||||||
# Convert to actual URL and set playlist_items to download only this item
|
# Convert to actual URL and set playlist_items to download only this item
|
||||||
url = playlist_url
|
url = playlist_url
|
||||||
playlist_items = str(item_num)
|
playlist_items = str(item_num)
|
||||||
# Fall through to normal handling below
|
# Fall through to normal handling below
|
||||||
else:
|
else:
|
||||||
log(f"Skipping invalid URL entry: {url}", file=sys.stderr)
|
debug(f"Skipping invalid URL entry: {url}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log(f"Probing URL: {url}", flush=True)
|
debug(f"Probing URL: {url}")
|
||||||
|
|
||||||
# ====== TORRENT MODE - INTERCEPT BEFORE NORMAL DOWNLOAD ======
|
# ====== TORRENT MODE - INTERCEPT BEFORE NORMAL DOWNLOAD ======
|
||||||
if torrent_mode or url.lower().startswith('magnet:'):
|
if torrent_mode or url.lower().startswith('magnet:'):
|
||||||
log(f"🧲 Torrent/magnet mode - spawning background worker...", flush=True)
|
debug(f"🧲 Torrent/magnet mode - spawning background worker...")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Get API key from config
|
# Get API key from config
|
||||||
@@ -2051,9 +2067,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
description=f"Torrent/magnet download via AllDebrid",
|
description=f"Torrent/magnet download via AllDebrid",
|
||||||
pipe=pipeline_context.get_current_command_text()
|
pipe=pipeline_context.get_current_command_text()
|
||||||
)
|
)
|
||||||
log(f"✓ Worker created (ID: {worker_id})", flush=True)
|
debug(f"✓ Worker created (ID: {worker_id})")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"⚠ Failed to create worker: {e}", file=sys.stderr)
|
debug(f"⚠ Failed to create worker: {e}")
|
||||||
worker_manager = None
|
worker_manager = None
|
||||||
|
|
||||||
# Spawn background thread to handle the download
|
# Spawn background thread to handle the download
|
||||||
@@ -2075,7 +2091,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
)
|
)
|
||||||
|
|
||||||
worker_thread.start()
|
worker_thread.start()
|
||||||
log(f"✓ Background worker started (ID: {worker_id})", flush=True)
|
debug(f"✓ Background worker started (ID: {worker_id})")
|
||||||
|
|
||||||
# Emit worker info so user can track it
|
# Emit worker info so user can track it
|
||||||
worker_info = {
|
worker_info = {
|
||||||
@@ -2110,7 +2126,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
is_actual_playlist = False # Track if we have a real multi-item playlist
|
is_actual_playlist = False # Track if we have a real multi-item playlist
|
||||||
|
|
||||||
if probe_info:
|
if probe_info:
|
||||||
log(f"✓ Probed: {probe_info.get('title', url)} ({probe_info.get('extractor', 'unknown')})")
|
debug(f"✓ Probed: {probe_info.get('title', url)} ({probe_info.get('extractor', 'unknown')})")
|
||||||
|
|
||||||
# If it's a playlist, show the result table and skip download for now
|
# If it's a playlist, show the result table and skip download for now
|
||||||
entries = probe_info.get("entries", [])
|
entries = probe_info.get("entries", [])
|
||||||
@@ -2118,9 +2134,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
is_actual_playlist = True # We have a real playlist with multiple items
|
is_actual_playlist = True # We have a real playlist with multiple items
|
||||||
# Playlist detected but NO selection provided
|
# Playlist detected but NO selection provided
|
||||||
# Always show table for user to select items
|
# Always show table for user to select items
|
||||||
log(f"📋 Found playlist with {len(entries)} items")
|
debug(f"📋 Found playlist with {len(entries)} items")
|
||||||
_show_playlist_table(url, probe_info)
|
_show_playlist_table(url, probe_info)
|
||||||
log(f"ℹ️ Playlist displayed. To select items, use @* or @1,3,5-8 syntax after piping results")
|
debug(f"ℹ️ Playlist displayed. To select items, use @* or @1,3,5-8 syntax after piping results")
|
||||||
playlists_displayed += 1
|
playlists_displayed += 1
|
||||||
continue # Skip to next URL - don't download playlist without selection
|
continue # Skip to next URL - don't download playlist without selection
|
||||||
elif entries and playlist_items:
|
elif entries and playlist_items:
|
||||||
@@ -2130,13 +2146,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
expanded_items = _expand_playlist_selection(playlist_items, len(entries))
|
expanded_items = _expand_playlist_selection(playlist_items, len(entries))
|
||||||
playlist_items = expanded_items
|
playlist_items = expanded_items
|
||||||
selected_playlist_entries = _select_playlist_entries(entries, playlist_items)
|
selected_playlist_entries = _select_playlist_entries(entries, playlist_items)
|
||||||
log(f"📋 Found playlist with {len(entries)} items - downloading selected: {playlist_items}")
|
debug(f"📋 Found playlist with {len(entries)} items - downloading selected: {playlist_items}")
|
||||||
else:
|
else:
|
||||||
log(f"Single item: {probe_info.get('title', 'Unknown')}")
|
debug(f"Single item: {probe_info.get('title', 'Unknown')}")
|
||||||
|
|
||||||
# ====== FORMAT LISTING MODE ======
|
# ====== FORMAT LISTING MODE ======
|
||||||
if list_formats_mode and isinstance(url, str) and url.startswith(('http://', 'https://')):
|
if list_formats_mode and isinstance(url, str) and url.startswith(('http://', 'https://')):
|
||||||
log(f"Fetching formats for: {url}", flush=True)
|
debug(f"Fetching formats for: {url}")
|
||||||
from helper.download import list_formats
|
from helper.download import list_formats
|
||||||
from result_table import ResultTable
|
from result_table import ResultTable
|
||||||
|
|
||||||
@@ -2209,7 +2225,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
"source_url": url,
|
"source_url": url,
|
||||||
"index": i,
|
"index": i,
|
||||||
})
|
})
|
||||||
log(f"Use @N syntax to select a format and download", flush=True)
|
debug(f"Use @N syntax to select a format and download")
|
||||||
else:
|
else:
|
||||||
log(f"✗ No formats available for this URL", file=sys.stderr)
|
log(f"✗ No formats available for this URL", file=sys.stderr)
|
||||||
|
|
||||||
@@ -2224,7 +2240,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
from result_table import ResultTable
|
from result_table import ResultTable
|
||||||
|
|
||||||
if is_url_supported_by_ytdlp(url):
|
if is_url_supported_by_ytdlp(url):
|
||||||
log(f"Checking available formats for: {url}", flush=True)
|
debug(f"Checking available formats for: {url}")
|
||||||
all_formats = list_formats(url, no_playlist=is_youtube_url, playlist_items=playlist_items)
|
all_formats = list_formats(url, no_playlist=is_youtube_url, playlist_items=playlist_items)
|
||||||
|
|
||||||
if all_formats:
|
if all_formats:
|
||||||
@@ -2237,14 +2253,22 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
if 0 < idx <= len(formats):
|
if 0 < idx <= len(formats):
|
||||||
fmt = formats[idx-1]
|
fmt = formats[idx-1]
|
||||||
current_format_selector = fmt.get("format_id")
|
current_format_selector = fmt.get("format_id")
|
||||||
log(f"Selected format #{idx}: {current_format_selector}")
|
|
||||||
|
# If video-only format is selected, append +bestaudio to merge with best audio
|
||||||
|
vcodec = fmt.get("vcodec")
|
||||||
|
acodec = fmt.get("acodec")
|
||||||
|
if vcodec and vcodec != "none" and (not acodec or acodec == "none"):
|
||||||
|
current_format_selector = f"{current_format_selector}+bestaudio"
|
||||||
|
debug(f"Video-only format selected, appending bestaudio: {current_format_selector}")
|
||||||
|
|
||||||
|
debug(f"Selected format #{idx}: {current_format_selector}")
|
||||||
playlist_items = None # Clear so it doesn't affect download options
|
playlist_items = None # Clear so it doesn't affect download options
|
||||||
else:
|
else:
|
||||||
log(f"Invalid format index: {idx}", file=sys.stderr)
|
log(f"Invalid format index: {idx}", file=sys.stderr)
|
||||||
|
|
||||||
elif len(formats) > 1:
|
elif len(formats) > 1:
|
||||||
# Multiple formats available
|
# Multiple formats available
|
||||||
log(f"📊 Found {len(formats)} available formats for: {probe_info.get('title', 'Unknown')}", flush=True)
|
debug(f"📊 Found {len(formats)} available formats for: {probe_info.get('title', 'Unknown')}")
|
||||||
|
|
||||||
# Always show table for format selection via @N syntax
|
# Always show table for format selection via @N syntax
|
||||||
# Show table and wait for @N selection
|
# Show table and wait for @N selection
|
||||||
@@ -2294,8 +2318,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
table.set_row_selection_args(i, ["-item", str(i + 1)])
|
table.set_row_selection_args(i, ["-item", str(i + 1)])
|
||||||
|
|
||||||
# Display table and emit formats so they can be selected with @N
|
# Display table and emit formats so they can be selected with @N
|
||||||
log(str(table), flush=True)
|
debug(str(table))
|
||||||
log(f"💡 Use @N syntax to select a format and download (e.g., @1)", flush=True)
|
debug(f"💡 Use @N syntax to select a format and download (e.g., @1)")
|
||||||
|
|
||||||
# Store table for @N expansion so CLI can reconstruct commands
|
# Store table for @N expansion so CLI can reconstruct commands
|
||||||
pipeline_context.set_current_stage_table(table)
|
pipeline_context.set_current_stage_table(table)
|
||||||
@@ -2317,7 +2341,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
formats_displayed = True # Mark that we displayed formats
|
formats_displayed = True # Mark that we displayed formats
|
||||||
continue # Skip download, user must select format via @N
|
continue # Skip download, user must select format via @N
|
||||||
|
|
||||||
log(f"Downloading: {url}", flush=True)
|
debug(f"Downloading: {url}")
|
||||||
|
|
||||||
# Resolve cookies path if specified
|
# Resolve cookies path if specified
|
||||||
final_cookies_path = None
|
final_cookies_path = None
|
||||||
@@ -2362,19 +2386,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
# Check if this was a playlist download (is_actual_playlist tracks if we have a multi-item playlist)
|
# Check if this was a playlist download (is_actual_playlist tracks if we have a multi-item playlist)
|
||||||
if is_actual_playlist:
|
if is_actual_playlist:
|
||||||
if not selected_playlist_entries:
|
if not selected_playlist_entries:
|
||||||
log(
|
debug("⚠ Playlist metadata unavailable; cannot emit selected items for this stage.")
|
||||||
"⚠ Playlist metadata unavailable; cannot emit selected items for this stage.",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
matched_after, _ = _snapshot_playlist_paths(selected_playlist_entries, final_output_dir)
|
matched_after, _ = _snapshot_playlist_paths(selected_playlist_entries, final_output_dir)
|
||||||
if not matched_after:
|
if not matched_after:
|
||||||
log(
|
debug("⚠ No playlist files found for the selected items after download.")
|
||||||
"⚠ No playlist files found for the selected items after download.",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -2389,9 +2407,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
|
|
||||||
emit_targets = new_playlist_files if new_playlist_files else matched_after
|
emit_targets = new_playlist_files if new_playlist_files else matched_after
|
||||||
if new_playlist_files:
|
if new_playlist_files:
|
||||||
log(f"📋 Playlist download completed: {len(new_playlist_files)} new file(s)")
|
debug(f"📋 Playlist download completed: {len(new_playlist_files)} new file(s)")
|
||||||
else:
|
else:
|
||||||
log(f"📁 Reusing {len(emit_targets)} cached playlist file(s)", flush=True)
|
debug(f"📁 Reusing {len(emit_targets)} cached playlist file(s)")
|
||||||
|
|
||||||
for playlist_file in emit_targets:
|
for playlist_file in emit_targets:
|
||||||
file_hash = _compute_file_hash(playlist_file)
|
file_hash = _compute_file_hash(playlist_file)
|
||||||
@@ -2444,7 +2462,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
downloaded_files.append(file_path)
|
downloaded_files.append(file_path)
|
||||||
pipeline_context.emit(pipe_obj)
|
pipeline_context.emit(pipe_obj)
|
||||||
|
|
||||||
log(f"✓ Downloaded: {file_path}", flush=True)
|
debug(f"✓ Downloaded: {file_path}")
|
||||||
else:
|
else:
|
||||||
log(f"Download returned no result for {url}", file=sys.stderr)
|
log(f"Download returned no result for {url}", file=sys.stderr)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -2458,20 +2476,56 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any], emit_results:
|
|||||||
# Success if we downloaded files or displayed playlists/formats
|
# Success if we downloaded files or displayed playlists/formats
|
||||||
if downloaded_files or files_downloaded_directly > 0:
|
if downloaded_files or files_downloaded_directly > 0:
|
||||||
total_files = len(downloaded_files) + files_downloaded_directly
|
total_files = len(downloaded_files) + files_downloaded_directly
|
||||||
log(f"✓ Successfully downloaded {total_files} file(s)", flush=True)
|
debug(f"✓ Successfully downloaded {total_files} file(s)")
|
||||||
|
|
||||||
|
# Create a result table for the downloaded files
|
||||||
|
# This ensures that subsequent @N commands select from these files
|
||||||
|
# instead of trying to expand the previous command (e.g. search-file)
|
||||||
|
if downloaded_files:
|
||||||
|
from result_table import ResultTable
|
||||||
|
table = ResultTable("Downloaded Files")
|
||||||
|
for i, file_path in enumerate(downloaded_files):
|
||||||
|
# Ensure file_path is a Path object
|
||||||
|
if isinstance(file_path, str):
|
||||||
|
file_path = Path(file_path)
|
||||||
|
|
||||||
|
row = table.add_row()
|
||||||
|
row.add_column("#", str(i + 1))
|
||||||
|
row.add_column("File", file_path.name)
|
||||||
|
row.add_column("Path", str(file_path))
|
||||||
|
try:
|
||||||
|
size_mb = file_path.stat().st_size / (1024*1024)
|
||||||
|
row.add_column("Size", f"{size_mb:.1f} MB")
|
||||||
|
except OSError:
|
||||||
|
row.add_column("Size", "?")
|
||||||
|
|
||||||
|
# Set selection args to just the file path (or index if we want item selection)
|
||||||
|
# For item selection fallback, we don't strictly need row args if source command is None
|
||||||
|
# But setting them helps if we want to support command expansion later
|
||||||
|
table.set_row_selection_args(i, [str(file_path)])
|
||||||
|
|
||||||
|
# Register the table but DO NOT set a source command
|
||||||
|
# This forces CLI to use item-based selection (filtering the pipe)
|
||||||
|
# instead of command expansion
|
||||||
|
pipeline_context.set_last_result_table_overlay(table, downloaded_files)
|
||||||
|
pipeline_context.set_current_stage_table(table)
|
||||||
|
|
||||||
|
# Also print the table so user sees what they got
|
||||||
|
log(str(table), flush=True)
|
||||||
|
|
||||||
if db:
|
if db:
|
||||||
db.update_worker_status(worker_id, 'completed')
|
db.update_worker_status(worker_id, 'completed')
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if playlists_displayed:
|
if playlists_displayed:
|
||||||
log(f"✓ Displayed {playlists_displayed} playlist(s) for selection", flush=True)
|
debug(f"✓ Displayed {playlists_displayed} playlist(s) for selection")
|
||||||
if db:
|
if db:
|
||||||
db.update_worker_status(worker_id, 'completed')
|
db.update_worker_status(worker_id, 'completed')
|
||||||
db.close()
|
db.close()
|
||||||
return 0 # Success - playlists shown
|
return 0 # Success - playlists shown
|
||||||
|
|
||||||
if formats_displayed:
|
if formats_displayed:
|
||||||
log(f"✓ Format selection table displayed - use @N to select and download", flush=True)
|
debug(f"✓ Format selection table displayed - use @N to select and download")
|
||||||
if db:
|
if db:
|
||||||
db.update_worker_status(worker_id, 'completed')
|
db.update_worker_status(worker_id, 'completed')
|
||||||
db.close()
|
db.close()
|
||||||
|
|||||||
@@ -6,13 +6,15 @@ import shutil as _shutil
|
|||||||
import subprocess as _subprocess
|
import subprocess as _subprocess
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
import platform
|
||||||
|
|
||||||
from helper.logger import log
|
from helper.logger import log, debug
|
||||||
import uuid as _uuid
|
import uuid as _uuid
|
||||||
import time as _time
|
import time as _time
|
||||||
|
|
||||||
from downlow_helpers.progress import print_progress, print_final_progress, format_size
|
from helper.progress import print_progress, print_final_progress
|
||||||
from downlow_helpers.http_client import HTTPClient
|
from helper.http_client import HTTPClient
|
||||||
|
from helper.mpv_ipc import get_ipc_pipe_path, send_to_mpv
|
||||||
import fnmatch as _fnmatch
|
import fnmatch as _fnmatch
|
||||||
|
|
||||||
from . import register
|
from . import register
|
||||||
@@ -21,7 +23,7 @@ import pipeline as ctx
|
|||||||
from helper import hydrus as hydrus_wrapper
|
from helper import hydrus as hydrus_wrapper
|
||||||
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash, create_pipe_object_result
|
from ._shared import Cmdlet, CmdletArg, normalize_hash, looks_like_hash, create_pipe_object_result
|
||||||
from config import resolve_output_dir, get_hydrus_url, get_hydrus_access_key
|
from config import resolve_output_dir, get_hydrus_url, get_hydrus_access_key
|
||||||
from downlow_helpers.alldebrid import AllDebridClient
|
from helper.alldebrid import AllDebridClient
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -248,138 +250,23 @@ def _is_playable_in_mpv(file_path_or_ext: str, mime_type: Optional[str] = None)
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _get_fixed_ipc_pipe() -> str:
|
|
||||||
"""Get the fixed IPC pipe name for persistent MPV connection.
|
|
||||||
|
|
||||||
Uses a fixed name 'mpv-medeia-macina' so all playback sessions
|
|
||||||
connect to the same MPV window/process instead of creating new instances.
|
|
||||||
"""
|
|
||||||
import platform
|
|
||||||
if platform.system() == 'Windows':
|
|
||||||
return "\\\\.\\pipe\\mpv-medeia-macina"
|
|
||||||
else:
|
|
||||||
return "/tmp/mpv-medeia-macina.sock"
|
|
||||||
|
|
||||||
|
|
||||||
def _send_to_mpv_pipe(file_url: str, ipc_pipe: str, title: str, headers: Optional[Dict[str, str]] = None) -> bool:
|
|
||||||
"""Send loadfile command to existing MPV via IPC pipe.
|
|
||||||
|
|
||||||
Returns True if successfully sent to existing MPV, False if pipe unavailable.
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
import socket
|
|
||||||
import platform
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Prepare commands
|
|
||||||
# Use set_property for headers as loadfile options can be unreliable via IPC
|
|
||||||
header_str = ""
|
|
||||||
if headers:
|
|
||||||
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
|
||||||
|
|
||||||
# Command 1: Set headers (or clear them)
|
|
||||||
cmd_headers = {
|
|
||||||
"command": ["set_property", "http-header-fields", header_str],
|
|
||||||
"request_id": 0
|
|
||||||
}
|
|
||||||
|
|
||||||
# Command 2: Load file using memory:// M3U to preserve title
|
|
||||||
# Sanitize title to avoid breaking M3U format
|
|
||||||
safe_title = title.replace("\n", " ").replace("\r", "")
|
|
||||||
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{file_url}\n"
|
|
||||||
|
|
||||||
cmd_load = {
|
|
||||||
"command": ["loadfile", f"memory://{m3u_content}", "append-play"],
|
|
||||||
"request_id": 1
|
|
||||||
}
|
|
||||||
|
|
||||||
if platform.system() == 'Windows':
|
|
||||||
# Windows named pipes require special handling
|
|
||||||
try:
|
|
||||||
# Open in r+b to read response
|
|
||||||
with open(ipc_pipe, 'r+b', buffering=0) as pipe:
|
|
||||||
# Send headers
|
|
||||||
pipe.write((json.dumps(cmd_headers) + "\n").encode('utf-8'))
|
|
||||||
pipe.flush()
|
|
||||||
pipe.readline() # Consume response for headers
|
|
||||||
|
|
||||||
# Send loadfile
|
|
||||||
pipe.write((json.dumps(cmd_load) + "\n").encode('utf-8'))
|
|
||||||
pipe.flush()
|
|
||||||
|
|
||||||
# Read response
|
|
||||||
response_line = pipe.readline()
|
|
||||||
if response_line:
|
|
||||||
resp = json.loads(response_line.decode('utf-8'))
|
|
||||||
if resp.get('error') != 'success':
|
|
||||||
log(f"[get-file] MPV error: {resp.get('error')}", file=sys.stderr)
|
|
||||||
return False
|
|
||||||
|
|
||||||
log(f"[get-file] Sent to existing MPV: {title}", file=sys.stderr)
|
|
||||||
return True
|
|
||||||
except (OSError, IOError):
|
|
||||||
# Pipe not available
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
# Unix socket for Linux/macOS
|
|
||||||
if not hasattr(socket, 'AF_UNIX'):
|
|
||||||
return False
|
|
||||||
|
|
||||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
||||||
sock.connect(ipc_pipe)
|
|
||||||
|
|
||||||
# Send headers
|
|
||||||
sock.sendall((json.dumps(cmd_headers) + "\n").encode('utf-8'))
|
|
||||||
sock.recv(4096) # Consume response
|
|
||||||
|
|
||||||
# Send loadfile
|
|
||||||
sock.sendall((json.dumps(cmd_load) + "\n").encode('utf-8'))
|
|
||||||
|
|
||||||
# Read response
|
|
||||||
try:
|
|
||||||
response_data = sock.recv(4096)
|
|
||||||
if response_data:
|
|
||||||
resp = json.loads(response_data.decode('utf-8'))
|
|
||||||
if resp.get('error') != 'success':
|
|
||||||
log(f"[get-file] MPV error: {resp.get('error')}", file=sys.stderr)
|
|
||||||
sock.close()
|
|
||||||
return False
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
log(f"[get-file] Sent to existing MPV: {title}", file=sys.stderr)
|
|
||||||
return True
|
|
||||||
except (OSError, socket.error, ConnectionRefusedError):
|
|
||||||
# Pipe doesn't exist or MPV not listening - will need to start new instance
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
log(f"[get-file] IPC error: {e}", file=sys.stderr)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _play_in_mpv(file_url: str, file_title: str, is_stream: bool = False, headers: Optional[Dict[str, str]] = None) -> bool:
|
def _play_in_mpv(file_url: str, file_title: str, is_stream: bool = False, headers: Optional[Dict[str, str]] = None) -> bool:
|
||||||
"""Play file in MPV using IPC pipe, creating new instance if needed.
|
"""Play file in MPV using centralized IPC pipe, creating new instance if needed.
|
||||||
|
|
||||||
Returns True on success, False on error.
|
Returns True on success, False on error.
|
||||||
"""
|
"""
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
|
||||||
import json
|
|
||||||
import socket
|
|
||||||
import platform
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# First try to send to existing MPV instance
|
# First try to send to existing MPV instance
|
||||||
if _send_to_mpv_pipe(file_url, ipc_pipe, file_title, headers):
|
if send_to_mpv(file_url, file_title, headers):
|
||||||
print(f"Added to MPV: {file_title}")
|
debug(f"Added to MPV: {file_title}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# No existing MPV or pipe unavailable - start new instance
|
# No existing MPV or pipe unavailable - start new instance
|
||||||
log(f"[get-file] Starting new MPV instance (pipe: {ipc_pipe})", file=sys.stderr)
|
ipc_pipe = get_ipc_pipe_path()
|
||||||
cmd = ['mpv', file_url, f'--input-ipc-server={ipc_pipe}']
|
debug(f"[get-file] Starting new MPV instance (pipe: {ipc_pipe})", file=sys.stderr)
|
||||||
|
|
||||||
# Set title for new instance
|
# Build command - start MPV without a file initially, just with IPC server
|
||||||
cmd.append(f'--force-media-title={file_title}')
|
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}']
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
# Format headers for command line
|
# Format headers for command line
|
||||||
@@ -387,20 +274,40 @@ def _play_in_mpv(file_url: str, file_title: str, is_stream: bool = False, header
|
|||||||
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
||||||
cmd.append(f'--http-header-fields={header_str}')
|
cmd.append(f'--http-header-fields={header_str}')
|
||||||
|
|
||||||
|
# Add --idle flag so MPV stays running and waits for playlist commands
|
||||||
|
cmd.append('--idle')
|
||||||
|
|
||||||
# Detach process to prevent freezing parent CLI
|
# Detach process to prevent freezing parent CLI
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
# CREATE_NEW_CONSOLE might be better than CREATE_NO_WINDOW if MPV needs a window
|
|
||||||
# But usually MPV creates its own window.
|
|
||||||
# DETACHED_PROCESS (0x00000008) is also an option.
|
|
||||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||||
|
|
||||||
_subprocess.Popen(cmd, stdin=_subprocess.DEVNULL, stdout=_subprocess.DEVNULL, stderr=_subprocess.DEVNULL, **kwargs)
|
_subprocess.Popen(cmd, stdin=_subprocess.DEVNULL, stdout=_subprocess.DEVNULL, stderr=_subprocess.DEVNULL, **kwargs)
|
||||||
|
|
||||||
print(f"{'Streaming' if is_stream else 'Playing'} in MPV: {file_title}")
|
debug(f"[get-file] Started MPV instance (IPC: {ipc_pipe})", file=sys.stderr)
|
||||||
log(f"[get-file] Started MPV with {file_title} (IPC: {ipc_pipe})", file=sys.stderr)
|
|
||||||
|
# Give MPV time to start and open IPC pipe
|
||||||
|
# Windows needs more time than Unix
|
||||||
|
wait_time = 1.0 if platform.system() == 'Windows' else 0.5
|
||||||
|
debug(f"[get-file] Waiting {wait_time}s for MPV to initialize IPC...", file=sys.stderr)
|
||||||
|
_time.sleep(wait_time)
|
||||||
|
|
||||||
|
# Try up to 3 times to send the file via IPC
|
||||||
|
for attempt in range(3):
|
||||||
|
debug(f"[get-file] Sending file via IPC (attempt {attempt + 1}/3)", file=sys.stderr)
|
||||||
|
if send_to_mpv(file_url, file_title, headers):
|
||||||
|
debug(f"{'Streaming' if is_stream else 'Playing'} in MPV: {file_title}")
|
||||||
|
debug(f"[get-file] Added to new MPV instance (IPC: {ipc_pipe})", file=sys.stderr)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
if attempt < 2:
|
||||||
|
# Wait before retrying
|
||||||
|
_time.sleep(0.3)
|
||||||
|
|
||||||
|
# IPC send failed after all retries
|
||||||
|
log("Error: Could not send file to MPV via IPC after startup", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
log("Error: MPV not found. Install mpv to play media files", file=sys.stderr)
|
log("Error: MPV not found. Install mpv to play media files", file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
@@ -447,16 +354,16 @@ def _handle_search_result(result: Any, args: Sequence[str], config: Dict[str, An
|
|||||||
log("Error: No storage backend specified in result", file=sys.stderr)
|
log("Error: No storage backend specified in result", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
log(f"[get-file] Retrieving file from storage: {storage_name}", file=sys.stderr)
|
debug(f"[get-file] Retrieving file from storage: {storage_name}", file=sys.stderr)
|
||||||
|
|
||||||
# Handle different storage backends
|
# Handle different storage backends
|
||||||
if storage_name.lower() == 'hydrus':
|
if storage_name.lower() == 'hydrus':
|
||||||
return _handle_hydrus_file(file_hash, file_title, config, args, mime_type=mime_type)
|
return _handle_hydrus_file(file_hash, file_title, config, args, mime_type=mime_type)
|
||||||
elif storage_name.lower() == 'local':
|
elif storage_name.lower() == 'local':
|
||||||
return _handle_local_file(file_path, file_title, args, file_hash=file_hash)
|
return _handle_local_file(file_path, file_title, config, args, file_hash=file_hash)
|
||||||
elif storage_name.lower() == 'download':
|
elif storage_name.lower() == 'download':
|
||||||
# Downloads are local files
|
# Downloads are local files
|
||||||
return _handle_local_file(file_path, file_title, args, file_hash=file_hash)
|
return _handle_local_file(file_path, file_title, config, args, file_hash=file_hash)
|
||||||
elif storage_name.lower() == 'debrid':
|
elif storage_name.lower() == 'debrid':
|
||||||
# Extract magnet_id from result (search-file stores it in full_metadata or as custom attribute)
|
# Extract magnet_id from result (search-file stores it in full_metadata or as custom attribute)
|
||||||
if not magnet_id:
|
if not magnet_id:
|
||||||
@@ -516,7 +423,7 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
|||||||
|
|
||||||
if force_browser:
|
if force_browser:
|
||||||
# User explicitly wants browser
|
# User explicitly wants browser
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
ipc_pipe = get_ipc_pipe_path()
|
||||||
result_dict = create_pipe_object_result(
|
result_dict = create_pipe_object_result(
|
||||||
source='hydrus',
|
source='hydrus',
|
||||||
identifier=file_hash,
|
identifier=file_hash,
|
||||||
@@ -536,44 +443,30 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
|||||||
try:
|
try:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(web_url)
|
webbrowser.open(web_url)
|
||||||
log(f"[get-file] Opened in browser: {file_title}", file=sys.stderr)
|
debug(f"[get-file] Opened in browser: {file_title}", file=sys.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return 0
|
return 0
|
||||||
elif force_mpv or (is_media and mpv_available):
|
elif force_mpv or (is_media and mpv_available):
|
||||||
# Auto-play in MPV for media files (if available), or user requested it
|
# Auto-play in MPV for media files (if available), or user requested it
|
||||||
if _play_in_mpv(stream_url, file_title, is_stream=True, headers=headers):
|
if _play_in_mpv(stream_url, file_title, is_stream=True, headers=headers):
|
||||||
# Emit result as PipeObject-compatible dict for pipelining
|
# Show pipe menu instead of emitting result for display
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
# This allows immediate @N selection from the playlist
|
||||||
result_dict = create_pipe_object_result(
|
from . import pipe
|
||||||
source='hydrus',
|
pipe._run(None, [], config)
|
||||||
identifier=file_hash,
|
|
||||||
file_path=stream_url,
|
|
||||||
cmdlet_name='get-file',
|
|
||||||
title=file_title,
|
|
||||||
file_hash=file_hash,
|
|
||||||
extra={
|
|
||||||
'ipc': ipc_pipe,
|
|
||||||
'action_type': 'streaming',
|
|
||||||
'web_url': web_url,
|
|
||||||
'hydrus_url': hydrus_url,
|
|
||||||
'access_key': access_key
|
|
||||||
}
|
|
||||||
)
|
|
||||||
ctx.emit(result_dict)
|
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
# Fall back to browser
|
# Fall back to browser
|
||||||
try:
|
try:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(web_url)
|
webbrowser.open(web_url)
|
||||||
log(f"[get-file] Opened in browser instead", file=sys.stderr)
|
debug(f"[get-file] Opened in browser instead", file=sys.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
# Not media, open in browser
|
# Not media, open in browser
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
ipc_pipe = get_ipc_pipe_path()
|
||||||
result_dict = create_pipe_object_result(
|
result_dict = create_pipe_object_result(
|
||||||
source='hydrus',
|
source='hydrus',
|
||||||
identifier=file_hash,
|
identifier=file_hash,
|
||||||
@@ -593,7 +486,7 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
|||||||
try:
|
try:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(web_url)
|
webbrowser.open(web_url)
|
||||||
log(f"[get-file] Opened in browser: {file_title}", file=sys.stderr)
|
debug(f"[get-file] Opened in browser: {file_title}", file=sys.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return 0
|
return 0
|
||||||
@@ -605,7 +498,7 @@ def _handle_hydrus_file(file_hash: Optional[str], file_title: str, config: Dict[
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|
||||||
def _handle_local_file(file_path: Optional[str], file_title: str, args: Sequence[str], file_hash: Optional[str] = None) -> int:
|
def _handle_local_file(file_path: Optional[str], file_title: str, config: Dict[str, Any], args: Sequence[str], file_hash: Optional[str] = None) -> int:
|
||||||
"""Handle file from local storage - auto-play in MPV if media, otherwise open with default app."""
|
"""Handle file from local storage - auto-play in MPV if media, otherwise open with default app."""
|
||||||
if not file_path:
|
if not file_path:
|
||||||
log("Error: No file path provided", file=sys.stderr)
|
log("Error: No file path provided", file=sys.stderr)
|
||||||
@@ -641,7 +534,7 @@ def _handle_local_file(file_path: Optional[str], file_title: str, args: Sequence
|
|||||||
else: # Linux
|
else: # Linux
|
||||||
sp.run(['xdg-open', file_path])
|
sp.run(['xdg-open', file_path])
|
||||||
ctx.emit(f"Opened: {file_title}")
|
ctx.emit(f"Opened: {file_title}")
|
||||||
log(f"[get-file] Opened {file_title} with default app", file=sys.stderr)
|
debug(f"[get-file] Opened {file_title} with default app", file=sys.stderr)
|
||||||
return 0
|
return 0
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Error opening file: {e}", file=sys.stderr)
|
log(f"Error opening file: {e}", file=sys.stderr)
|
||||||
@@ -649,21 +542,10 @@ def _handle_local_file(file_path: Optional[str], file_title: str, args: Sequence
|
|||||||
elif force_mpv or (is_media and mpv_available):
|
elif force_mpv or (is_media and mpv_available):
|
||||||
# Auto-play in MPV for media files (if available), or user requested it
|
# Auto-play in MPV for media files (if available), or user requested it
|
||||||
if _play_in_mpv(file_path, file_title, is_stream=False):
|
if _play_in_mpv(file_path, file_title, is_stream=False):
|
||||||
# Emit result as PipeObject-compatible dict for pipelining
|
# Show pipe menu instead of emitting result for display
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
# This allows immediate @N selection from the playlist
|
||||||
result_dict = create_pipe_object_result(
|
from . import pipe
|
||||||
source='local',
|
pipe._run(None, [], config)
|
||||||
identifier=str(Path(file_path).stem) if file_path else 'unknown',
|
|
||||||
file_path=file_path,
|
|
||||||
cmdlet_name='get-file',
|
|
||||||
title=file_title,
|
|
||||||
file_hash=file_hash, # Include hash from search result if available
|
|
||||||
extra={
|
|
||||||
'ipc': ipc_pipe, # MPV IPC pipe for Lua script control
|
|
||||||
'action_type': 'playing' # Distinguish from other get-file actions
|
|
||||||
}
|
|
||||||
)
|
|
||||||
ctx.emit(result_dict)
|
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
# Fall back to default application
|
# Fall back to default application
|
||||||
@@ -676,7 +558,7 @@ def _handle_local_file(file_path: Optional[str], file_title: str, args: Sequence
|
|||||||
os.startfile(file_path)
|
os.startfile(file_path)
|
||||||
else: # Linux
|
else: # Linux
|
||||||
_subprocess.run(['xdg-open', file_path])
|
_subprocess.run(['xdg-open', file_path])
|
||||||
log(f"[get-file] Opened with default app instead", file=sys.stderr)
|
debug(f"[get-file] Opened with default app instead", file=sys.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return 0
|
return 0
|
||||||
@@ -694,7 +576,7 @@ def _handle_local_file(file_path: Optional[str], file_title: str, args: Sequence
|
|||||||
else: # Linux
|
else: # Linux
|
||||||
sp.run(['xdg-open', file_path])
|
sp.run(['xdg-open', file_path])
|
||||||
print(f"Opened: {file_title}")
|
print(f"Opened: {file_title}")
|
||||||
log(f"[get-file] Opened {file_title} with default app", file=sys.stderr)
|
debug(f"[get-file] Opened {file_title} with default app", file=sys.stderr)
|
||||||
|
|
||||||
# Emit result for downstream processing
|
# Emit result for downstream processing
|
||||||
result_dict = create_pipe_object_result(
|
result_dict = create_pipe_object_result(
|
||||||
@@ -751,7 +633,7 @@ def _handle_debrid_file(magnet_id: int, magnet_title: str, config: Dict[str, Any
|
|||||||
try:
|
try:
|
||||||
client = AllDebridClient(api_key)
|
client = AllDebridClient(api_key)
|
||||||
|
|
||||||
log(f"[get-file] Downloading magnet {magnet_id}: {magnet_title}", file=sys.stderr)
|
debug(f"[get-file] Downloading magnet {magnet_id}: {magnet_title}", file=sys.stderr)
|
||||||
|
|
||||||
# Fetch magnet files
|
# Fetch magnet files
|
||||||
try:
|
try:
|
||||||
@@ -1218,7 +1100,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
|
|
||||||
# Normal file export (happens regardless of -metadata flag)
|
# Normal file export (happens regardless of -metadata flag)
|
||||||
try:
|
try:
|
||||||
from downlow_helpers.hydrus import hydrus_export as _hydrus_export
|
from helper.hydrus import hydrus_export as _hydrus_export
|
||||||
except Exception:
|
except Exception:
|
||||||
_hydrus_export = None # type: ignore
|
_hydrus_export = None # type: ignore
|
||||||
if _hydrus_export is None:
|
if _hydrus_export is None:
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ class TagItem:
|
|||||||
hash_hex: Optional[str] = None
|
hash_hex: Optional[str] = None
|
||||||
source: str = "hydrus"
|
source: str = "hydrus"
|
||||||
service_name: Optional[str] = None
|
service_name: Optional[str] = None
|
||||||
|
file_path: Optional[str] = None
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
# Make ResultTable happy by adding standard fields
|
# Make ResultTable happy by adding standard fields
|
||||||
@@ -101,7 +102,9 @@ def _emit_tags_as_table(
|
|||||||
hash_hex: Optional[str],
|
hash_hex: Optional[str],
|
||||||
source: str = "hydrus",
|
source: str = "hydrus",
|
||||||
service_name: Optional[str] = None,
|
service_name: Optional[str] = None,
|
||||||
config: Dict[str, Any] = None
|
config: Dict[str, Any] = None,
|
||||||
|
item_title: Optional[str] = None,
|
||||||
|
file_path: Optional[str] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Emit tags as TagItem objects and display via ResultTable.
|
"""Emit tags as TagItem objects and display via ResultTable.
|
||||||
|
|
||||||
@@ -111,7 +114,13 @@ def _emit_tags_as_table(
|
|||||||
from result_table import ResultTable
|
from result_table import ResultTable
|
||||||
|
|
||||||
# Create ResultTable with just tag column (no title)
|
# Create ResultTable with just tag column (no title)
|
||||||
table = ResultTable("Tags", max_columns=1)
|
table_title = "Tags"
|
||||||
|
if item_title:
|
||||||
|
table_title = f"Tags: {item_title}"
|
||||||
|
if hash_hex:
|
||||||
|
table_title += f" [{hash_hex[:8]}]"
|
||||||
|
|
||||||
|
table = ResultTable(table_title, max_columns=1)
|
||||||
table.set_source_command("get-tag", [])
|
table.set_source_command("get-tag", [])
|
||||||
|
|
||||||
# Create TagItem for each tag
|
# Create TagItem for each tag
|
||||||
@@ -123,6 +132,7 @@ def _emit_tags_as_table(
|
|||||||
hash_hex=hash_hex,
|
hash_hex=hash_hex,
|
||||||
source=source,
|
source=source,
|
||||||
service_name=service_name,
|
service_name=service_name,
|
||||||
|
file_path=file_path,
|
||||||
)
|
)
|
||||||
tag_items.append(tag_item)
|
tag_items.append(tag_item)
|
||||||
table.add_result(tag_item)
|
table.add_result(tag_item)
|
||||||
@@ -1069,6 +1079,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
# Try Hydrus first (always prioritize if available and has hash)
|
# Try Hydrus first (always prioritize if available and has hash)
|
||||||
use_hydrus = False
|
use_hydrus = False
|
||||||
hydrus_meta = None # Cache the metadata from first fetch
|
hydrus_meta = None # Cache the metadata from first fetch
|
||||||
|
client = None
|
||||||
if hash_hex and hydrus_available:
|
if hash_hex and hydrus_available:
|
||||||
try:
|
try:
|
||||||
client = hydrus.get_client(config)
|
client = hydrus.get_client(config)
|
||||||
@@ -1093,6 +1104,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
try:
|
try:
|
||||||
# Use cached metadata from above, don't fetch again
|
# Use cached metadata from above, don't fetch again
|
||||||
service_name = hydrus.get_tag_service_name(config)
|
service_name = hydrus.get_tag_service_name(config)
|
||||||
|
if client is None:
|
||||||
client = hydrus.get_client(config)
|
client = hydrus.get_client(config)
|
||||||
service_key = hydrus.get_tag_service_key(client, service_name)
|
service_key = hydrus.get_tag_service_key(client, service_name)
|
||||||
current = _extract_my_tags_from_hydrus_meta(hydrus_meta, service_key, service_name)
|
current = _extract_my_tags_from_hydrus_meta(hydrus_meta, service_key, service_name)
|
||||||
@@ -1148,10 +1160,13 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
# Always output to ResultTable (pipeline mode only)
|
# Always output to ResultTable (pipeline mode only)
|
||||||
|
# Extract title for table header
|
||||||
|
item_title = get_field(result, "title", None) or get_field(result, "name", None) or get_field(result, "filename", None)
|
||||||
|
|
||||||
if source == "hydrus":
|
if source == "hydrus":
|
||||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config)
|
_emit_tags_as_table(current, hash_hex=hash_hex, source="hydrus", service_name=service_name, config=config, item_title=item_title)
|
||||||
else:
|
else:
|
||||||
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config)
|
_emit_tags_as_table(current, hash_hex=hash_hex, source="local", service_name=None, config=config, item_title=item_title, file_path=str(local_path) if local_path else None)
|
||||||
|
|
||||||
# If emit requested or store key provided, emit payload
|
# If emit requested or store key provided, emit payload
|
||||||
if emit_mode:
|
if emit_mode:
|
||||||
|
|||||||
14
cmdlets/output_json.py
Normal file
14
cmdlets/output_json.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from typing import Any, Dict, Sequence
|
||||||
|
import json
|
||||||
|
from ._shared import Cmdlet
|
||||||
|
|
||||||
|
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||||
|
"""Output the current pipeline result as JSON."""
|
||||||
|
print(json.dumps(result, indent=2, default=str))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
CMDLET = Cmdlet(
|
||||||
|
name="output-json",
|
||||||
|
summary="Output the current pipeline result as JSON.",
|
||||||
|
usage="... | output-json",
|
||||||
|
)
|
||||||
586
cmdlets/pipe.py
586
cmdlets/pipe.py
@@ -6,219 +6,44 @@ import socket
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
from ._shared import Cmdlet, CmdletArg, parse_cmdlet_args
|
||||||
from helper.logger import log
|
from helper.logger import log, debug
|
||||||
from result_table import ResultTable
|
from result_table import ResultTable
|
||||||
from .get_file import _get_fixed_ipc_pipe
|
from helper.mpv_ipc import get_ipc_pipe_path, MPVIPCClient
|
||||||
import pipeline as ctx
|
import pipeline as ctx
|
||||||
|
|
||||||
def _send_ipc_command(command: Dict[str, Any]) -> Optional[Any]:
|
from helper.local_library import LocalLibrarySearchOptimizer
|
||||||
|
from config import get_local_storage_path
|
||||||
|
|
||||||
|
def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional[Any]:
|
||||||
"""Send a command to the MPV IPC pipe and return the response."""
|
"""Send a command to the MPV IPC pipe and return the response."""
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
|
||||||
request = json.dumps(command) + "\n"
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if platform.system() == 'Windows':
|
ipc_pipe = get_ipc_pipe_path()
|
||||||
# Windows named pipe
|
client = MPVIPCClient(socket_path=ipc_pipe)
|
||||||
# Opening in r+b mode to read response
|
|
||||||
try:
|
|
||||||
with open(ipc_pipe, 'r+b', buffering=0) as pipe:
|
|
||||||
pipe.write(request.encode('utf-8'))
|
|
||||||
pipe.flush()
|
|
||||||
|
|
||||||
# Read response
|
if not client.connect():
|
||||||
# We'll try to read a line. This might block if MPV is unresponsive.
|
|
||||||
response_line = pipe.readline()
|
|
||||||
if response_line:
|
|
||||||
return json.loads(response_line.decode('utf-8'))
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None # MPV not running
|
return None # MPV not running
|
||||||
|
|
||||||
|
response = client.send_command(command)
|
||||||
|
client.disconnect()
|
||||||
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Windows IPC Error: {e}", file=sys.stderr)
|
if not silent:
|
||||||
return None
|
debug(f"IPC Error: {e}", file=sys.stderr)
|
||||||
else:
|
|
||||||
# Unix socket
|
|
||||||
af_unix = getattr(socket, 'AF_UNIX', None)
|
|
||||||
if af_unix is None:
|
|
||||||
log("Unix sockets not supported on this platform", file=sys.stderr)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||||
sock = socket.socket(af_unix, socket.SOCK_STREAM)
|
"""Get the current playlist from MPV. Returns None if MPV is not running."""
|
||||||
sock.settimeout(2.0)
|
|
||||||
sock.connect(ipc_pipe)
|
|
||||||
sock.sendall(request.encode('utf-8'))
|
|
||||||
|
|
||||||
# Read response
|
|
||||||
response_data = b""
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
chunk = sock.recv(4096)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
response_data += chunk
|
|
||||||
if b"\n" in chunk:
|
|
||||||
break
|
|
||||||
except socket.timeout:
|
|
||||||
break
|
|
||||||
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
if response_data:
|
|
||||||
# Parse lines, look for response to our request
|
|
||||||
lines = response_data.decode('utf-8').strip().split('\n')
|
|
||||||
for line in lines:
|
|
||||||
try:
|
|
||||||
resp = json.loads(line)
|
|
||||||
# If it has 'error' field, it's a response
|
|
||||||
if 'error' in resp:
|
|
||||||
return resp
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
except (FileNotFoundError, ConnectionRefusedError):
|
|
||||||
return None # MPV not running
|
|
||||||
except Exception as e:
|
|
||||||
log(f"Unix IPC Error: {e}", file=sys.stderr)
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log(f"IPC Error: {e}", file=sys.stderr)
|
|
||||||
return None
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_playlist() -> List[Dict[str, Any]]:
|
|
||||||
"""Get the current playlist from MPV."""
|
|
||||||
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
cmd = {"command": ["get_property", "playlist"], "request_id": 100}
|
||||||
resp = _send_ipc_command(cmd)
|
resp = _send_ipc_command(cmd, silent=silent)
|
||||||
if resp and resp.get("error") == "success":
|
if resp is None:
|
||||||
|
return None
|
||||||
|
if resp.get("error") == "success":
|
||||||
return resp.get("data", [])
|
return resp.get("data", [])
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||||
"""Manage and play items in the MPV playlist via IPC."""
|
"""Extract a clean title from an MPV playlist item, handling memory:// M3U hacks."""
|
||||||
|
title = item.get("title")
|
||||||
parsed = parse_cmdlet_args(args, CMDLET)
|
|
||||||
|
|
||||||
# Handle positional index argument if provided
|
|
||||||
index_arg = parsed.get("index")
|
|
||||||
|
|
||||||
clear_mode = parsed.get("clear")
|
|
||||||
list_mode = parsed.get("list")
|
|
||||||
|
|
||||||
# Handle piped input (add to playlist)
|
|
||||||
if result:
|
|
||||||
# If result is a list of items, add them to playlist
|
|
||||||
items_to_add = []
|
|
||||||
if isinstance(result, list):
|
|
||||||
items_to_add = result
|
|
||||||
elif isinstance(result, dict):
|
|
||||||
items_to_add = [result]
|
|
||||||
|
|
||||||
added_count = 0
|
|
||||||
for i, item in enumerate(items_to_add):
|
|
||||||
# Extract URL/Path
|
|
||||||
target = None
|
|
||||||
title = None
|
|
||||||
|
|
||||||
if isinstance(item, dict):
|
|
||||||
target = item.get("target") or item.get("url") or item.get("path")
|
|
||||||
title = item.get("title") or item.get("name")
|
|
||||||
elif hasattr(item, "target"):
|
|
||||||
target = item.target
|
|
||||||
title = getattr(item, "title", None)
|
|
||||||
elif isinstance(item, str):
|
|
||||||
target = item
|
|
||||||
|
|
||||||
if target:
|
|
||||||
# Add to MPV playlist
|
|
||||||
# We use loadfile with append flag
|
|
||||||
# Configure 1080p limit for streams (bestvideo<=1080p + bestaudio)
|
|
||||||
options = {
|
|
||||||
"ytdl-format": "bestvideo[height<=?1080]+bestaudio/best[height<=?1080]"
|
|
||||||
}
|
|
||||||
|
|
||||||
if title:
|
|
||||||
options["force-media-title"] = title
|
|
||||||
|
|
||||||
cmd = {"command": ["loadfile", target, "append", options], "request_id": 200}
|
|
||||||
resp = _send_ipc_command(cmd)
|
|
||||||
|
|
||||||
if resp is None:
|
|
||||||
# MPV not running (or died)
|
|
||||||
# Start MPV with remaining items
|
|
||||||
_start_mpv(items_to_add[i:])
|
|
||||||
return 0
|
|
||||||
elif resp.get("error") == "success":
|
|
||||||
added_count += 1
|
|
||||||
if title:
|
|
||||||
log(f"Queued: {title}")
|
|
||||||
else:
|
|
||||||
log(f"Queued: {target}")
|
|
||||||
|
|
||||||
if added_count > 0:
|
|
||||||
# If we added items, we might want to play the first one if nothing is playing?
|
|
||||||
# For now, just list the playlist
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Get playlist from MPV
|
|
||||||
items = _get_playlist()
|
|
||||||
|
|
||||||
if not items:
|
|
||||||
log("MPV playlist is empty or MPV is not running.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# If index is provided, perform action (Play or Clear)
|
|
||||||
if index_arg is not None:
|
|
||||||
try:
|
|
||||||
# Handle 1-based index
|
|
||||||
idx = int(index_arg) - 1
|
|
||||||
|
|
||||||
if idx < 0 or idx >= len(items):
|
|
||||||
log(f"Index {index_arg} out of range (1-{len(items)}).")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
item = items[idx]
|
|
||||||
title = item.get("title") or item.get("filename") or "Unknown"
|
|
||||||
|
|
||||||
if clear_mode:
|
|
||||||
# Remove item
|
|
||||||
cmd = {"command": ["playlist-remove", idx], "request_id": 101}
|
|
||||||
resp = _send_ipc_command(cmd)
|
|
||||||
if resp and resp.get("error") == "success":
|
|
||||||
log(f"Removed: {title}")
|
|
||||||
# Refresh items for listing
|
|
||||||
items = _get_playlist()
|
|
||||||
list_mode = True
|
|
||||||
index_arg = None
|
|
||||||
else:
|
|
||||||
log(f"Failed to remove item: {resp.get('error') if resp else 'No response'}")
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
# Play item
|
|
||||||
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
|
||||||
resp = _send_ipc_command(cmd)
|
|
||||||
if resp and resp.get("error") == "success":
|
|
||||||
log(f"Playing: {title}")
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
log(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
log(f"Invalid index: {index_arg}")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# List items (Default action or after clear)
|
|
||||||
if list_mode or index_arg is None:
|
|
||||||
if not items:
|
|
||||||
log("MPV playlist is empty.")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
table = ResultTable("MPV Playlist")
|
|
||||||
|
|
||||||
for i, item in enumerate(items):
|
|
||||||
is_current = item.get("current", False)
|
|
||||||
title = item.get("title") or ""
|
|
||||||
filename = item.get("filename") or ""
|
filename = item.get("filename") or ""
|
||||||
|
|
||||||
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
||||||
@@ -232,28 +57,327 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
if not title or title == "memory://":
|
if not title or title == "memory://":
|
||||||
title = extracted_title
|
title = extracted_title
|
||||||
|
|
||||||
# Extract actual URL
|
# If we still don't have a title, try to find the URL in the M3U content
|
||||||
# Find the first line that looks like a URL and not a directive
|
if not title:
|
||||||
lines = filename.splitlines()
|
lines = filename.splitlines()
|
||||||
for line in lines:
|
for line in lines:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line and not line.startswith('#') and not line.startswith('memory://'):
|
if line and not line.startswith('#') and not line.startswith('memory://'):
|
||||||
filename = line
|
# Found the URL, use it as title
|
||||||
break
|
return line
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
return title or filename or "Unknown"
|
||||||
|
|
||||||
|
def _queue_items(items: List[Any], clear_first: bool = False) -> None:
|
||||||
|
"""Queue items to MPV, starting it if necessary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
items: List of items to queue
|
||||||
|
clear_first: If True, the first item will replace the current playlist
|
||||||
|
"""
|
||||||
|
for i, item in enumerate(items):
|
||||||
|
# Extract URL/Path
|
||||||
|
target = None
|
||||||
|
title = None
|
||||||
|
|
||||||
|
if isinstance(item, dict):
|
||||||
|
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||||
|
title = item.get("title") or item.get("name")
|
||||||
|
elif hasattr(item, "target"):
|
||||||
|
target = item.target
|
||||||
|
title = getattr(item, "title", None)
|
||||||
|
elif isinstance(item, str):
|
||||||
|
target = item
|
||||||
|
|
||||||
|
if target:
|
||||||
|
# Add to MPV playlist
|
||||||
|
# We use loadfile with append flag (or replace if clear_first is set)
|
||||||
|
|
||||||
|
# Use memory:// M3U hack to pass title to MPV
|
||||||
|
if title:
|
||||||
|
# Sanitize title for M3U (remove newlines)
|
||||||
|
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||||
|
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target}"
|
||||||
|
target_to_send = f"memory://{m3u_content}"
|
||||||
|
else:
|
||||||
|
target_to_send = target
|
||||||
|
|
||||||
|
mode = "append"
|
||||||
|
if clear_first and i == 0:
|
||||||
|
mode = "replace"
|
||||||
|
|
||||||
|
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
|
||||||
|
if resp is None:
|
||||||
|
# MPV not running (or died)
|
||||||
|
# Start MPV with remaining items
|
||||||
|
_start_mpv(items[i:])
|
||||||
|
return
|
||||||
|
elif resp.get("error") == "success":
|
||||||
|
# Also set property for good measure
|
||||||
|
if title:
|
||||||
|
title_cmd = {"command": ["set_property", "force-media-title", title], "request_id": 201}
|
||||||
|
_send_ipc_command(title_cmd)
|
||||||
|
debug(f"Queued: {title or target}")
|
||||||
|
else:
|
||||||
|
error_msg = str(resp.get('error'))
|
||||||
|
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
||||||
|
|
||||||
|
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||||
|
"""Manage and play items in the MPV playlist via IPC."""
|
||||||
|
|
||||||
|
parsed = parse_cmdlet_args(args, CMDLET)
|
||||||
|
|
||||||
|
# Handle positional index argument if provided
|
||||||
|
index_arg = parsed.get("index")
|
||||||
|
|
||||||
|
clear_mode = parsed.get("clear")
|
||||||
|
list_mode = parsed.get("list")
|
||||||
|
play_mode = parsed.get("play")
|
||||||
|
pause_mode = parsed.get("pause")
|
||||||
|
save_mode = parsed.get("save")
|
||||||
|
load_mode = parsed.get("load")
|
||||||
|
|
||||||
|
# Handle Save Playlist
|
||||||
|
if save_mode:
|
||||||
|
playlist_name = index_arg or f"Playlist {subprocess.check_output(['date', '/t'], shell=True).decode().strip()}"
|
||||||
|
# If index_arg was used for name, clear it so it doesn't trigger index logic
|
||||||
|
if index_arg:
|
||||||
|
index_arg = None
|
||||||
|
|
||||||
|
items = _get_playlist()
|
||||||
|
if not items:
|
||||||
|
debug("Cannot save: MPV playlist is empty or MPV is not running.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Clean up items for saving (remove current flag, etc)
|
||||||
|
clean_items = []
|
||||||
|
for item in items:
|
||||||
|
# If title was extracted from memory://, we should probably save the original filename
|
||||||
|
# if it's a URL, or reconstruct a clean object.
|
||||||
|
# Actually, _extract_title_from_item handles the display title.
|
||||||
|
# But for playback, we need the 'filename' (which might be memory://...)
|
||||||
|
# If we save 'memory://...', it will work when loaded back.
|
||||||
|
clean_items.append(item)
|
||||||
|
|
||||||
|
# Use config from context or load it
|
||||||
|
config_data = config if config else {}
|
||||||
|
|
||||||
|
storage_path = get_local_storage_path(config_data)
|
||||||
|
if not storage_path:
|
||||||
|
debug("Local storage path not configured.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||||
|
if db.save_playlist(playlist_name, clean_items):
|
||||||
|
debug(f"Playlist saved as '{playlist_name}'")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
debug(f"Failed to save playlist '{playlist_name}'")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Handle Load Playlist
|
||||||
|
current_playlist_name = None
|
||||||
|
if load_mode:
|
||||||
|
# Use config from context or load it
|
||||||
|
config_data = config if config else {}
|
||||||
|
|
||||||
|
storage_path = get_local_storage_path(config_data)
|
||||||
|
if not storage_path:
|
||||||
|
debug("Local storage path not configured.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
with LocalLibrarySearchOptimizer(storage_path) as db:
|
||||||
|
if index_arg:
|
||||||
|
try:
|
||||||
|
pl_id = int(index_arg)
|
||||||
|
result = db.get_playlist_by_id(pl_id)
|
||||||
|
if result is None:
|
||||||
|
debug(f"Playlist ID {pl_id} not found.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
name, items = result
|
||||||
|
current_playlist_name = name
|
||||||
|
|
||||||
|
# Queue items (replacing current playlist)
|
||||||
|
if items:
|
||||||
|
_queue_items(items, clear_first=True)
|
||||||
|
else:
|
||||||
|
# Empty playlist, just clear
|
||||||
|
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
|
||||||
|
|
||||||
|
# Switch to list mode to show the result
|
||||||
|
list_mode = True
|
||||||
|
index_arg = None
|
||||||
|
# Fall through to list logic
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
debug(f"Invalid playlist ID: {index_arg}")
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
playlists = db.get_playlists()
|
||||||
|
|
||||||
|
if not playlists:
|
||||||
|
debug("No saved playlists found.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
table = ResultTable("Saved Playlists")
|
||||||
|
for i, pl in enumerate(playlists):
|
||||||
|
item_count = len(pl.get('items', []))
|
||||||
|
row = table.add_row()
|
||||||
|
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
|
||||||
|
row.add_column("Name", pl['name'])
|
||||||
|
row.add_column("Items", str(item_count))
|
||||||
|
row.add_column("Updated", pl['updated_at'])
|
||||||
|
|
||||||
|
# Set the playlist items as the result object for this row
|
||||||
|
# When user selects @N, they get the list of items
|
||||||
|
# We also set the source command to .pipe -load <ID> so it loads it
|
||||||
|
table.set_row_selection_args(i, ["-load", str(pl['id'])])
|
||||||
|
|
||||||
|
table.set_source_command(".pipe")
|
||||||
|
|
||||||
|
# Register results
|
||||||
|
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
||||||
|
ctx.set_current_stage_table(table)
|
||||||
|
|
||||||
|
print(table)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Handle Play/Pause commands
|
||||||
|
if play_mode:
|
||||||
|
cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
if resp and resp.get("error") == "success":
|
||||||
|
debug("Resumed playback")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
debug("Failed to resume playback (MPV not running?)", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if pause_mode:
|
||||||
|
cmd = {"command": ["set_property", "pause", True], "request_id": 104}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
if resp and resp.get("error") == "success":
|
||||||
|
debug("Paused playback")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
debug("Failed to pause playback (MPV not running?)", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Handle Clear All command (no index provided)
|
||||||
|
if clear_mode and index_arg is None:
|
||||||
|
cmd = {"command": ["playlist-clear"], "request_id": 105}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
if resp and resp.get("error") == "success":
|
||||||
|
debug("Playlist cleared")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
debug("Failed to clear playlist (MPV not running?)", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Handle piped input (add to playlist)
|
||||||
|
if result:
|
||||||
|
# If result is a list of items, add them to playlist
|
||||||
|
items_to_add = []
|
||||||
|
if isinstance(result, list):
|
||||||
|
items_to_add = result
|
||||||
|
elif isinstance(result, dict):
|
||||||
|
items_to_add = [result]
|
||||||
|
|
||||||
|
_queue_items(items_to_add)
|
||||||
|
|
||||||
|
if items_to_add:
|
||||||
|
# If we added items, we might want to play the first one if nothing is playing?
|
||||||
|
# For now, just list the playlist
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Get playlist from MPV
|
||||||
|
items = _get_playlist()
|
||||||
|
|
||||||
|
if items is None:
|
||||||
|
debug("MPV is not running. Starting new instance...")
|
||||||
|
_start_mpv([])
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if not items:
|
||||||
|
debug("MPV playlist is empty.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# If index is provided, perform action (Play or Clear)
|
||||||
|
if index_arg is not None:
|
||||||
|
try:
|
||||||
|
# Handle 1-based index
|
||||||
|
idx = int(index_arg) - 1
|
||||||
|
|
||||||
|
if idx < 0 or idx >= len(items):
|
||||||
|
debug(f"Index {index_arg} out of range (1-{len(items)}).")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
item = items[idx]
|
||||||
|
title = _extract_title_from_item(item)
|
||||||
|
|
||||||
|
if clear_mode:
|
||||||
|
# Remove item
|
||||||
|
cmd = {"command": ["playlist-remove", idx], "request_id": 101}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
if resp and resp.get("error") == "success":
|
||||||
|
debug(f"Removed: {title}")
|
||||||
|
# Refresh items for listing
|
||||||
|
items = _get_playlist() or []
|
||||||
|
list_mode = True
|
||||||
|
index_arg = None
|
||||||
|
else:
|
||||||
|
debug(f"Failed to remove item: {resp.get('error') if resp else 'No response'}")
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
# Play item
|
||||||
|
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
||||||
|
resp = _send_ipc_command(cmd)
|
||||||
|
if resp and resp.get("error") == "success":
|
||||||
|
# Ensure playback starts (unpause)
|
||||||
|
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||||
|
_send_ipc_command(unpause_cmd)
|
||||||
|
|
||||||
|
debug(f"Playing: {title}")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
debug(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
debug(f"Invalid index: {index_arg}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# List items (Default action or after clear)
|
||||||
|
if list_mode or index_arg is None:
|
||||||
|
if not items:
|
||||||
|
debug("MPV playlist is empty.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Use the loaded playlist name if available, otherwise default
|
||||||
|
# Note: current_playlist_name is defined in the load_mode block if a playlist was loaded
|
||||||
|
try:
|
||||||
|
table_title = current_playlist_name or "MPV Playlist"
|
||||||
|
except NameError:
|
||||||
|
table_title = "MPV Playlist"
|
||||||
|
|
||||||
|
table = ResultTable(table_title)
|
||||||
|
|
||||||
|
for i, item in enumerate(items):
|
||||||
|
is_current = item.get("current", False)
|
||||||
|
title = _extract_title_from_item(item)
|
||||||
|
|
||||||
# Truncate if too long
|
# Truncate if too long
|
||||||
if len(title) > 57:
|
if len(title) > 80:
|
||||||
title = title[:57] + "..."
|
title = title[:77] + "..."
|
||||||
if len(filename) > 27:
|
|
||||||
filename = filename[:27] + "..."
|
|
||||||
|
|
||||||
row = table.add_row()
|
row = table.add_row()
|
||||||
row.add_column("#", str(i + 1))
|
|
||||||
row.add_column("Current", "*" if is_current else "")
|
row.add_column("Current", "*" if is_current else "")
|
||||||
row.add_column("Title", title)
|
row.add_column("Title", title)
|
||||||
row.add_column("Filename", filename)
|
|
||||||
|
|
||||||
table.set_row_selection_args(i, [str(i + 1)])
|
table.set_row_selection_args(i, [str(i + 1)])
|
||||||
|
|
||||||
@@ -269,20 +393,18 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
|
|
||||||
def _start_mpv(items: List[Any]) -> None:
|
def _start_mpv(items: List[Any]) -> None:
|
||||||
"""Start MPV with a list of items."""
|
"""Start MPV with a list of items."""
|
||||||
ipc_pipe = _get_fixed_ipc_pipe()
|
ipc_pipe = get_ipc_pipe_path()
|
||||||
|
|
||||||
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}']
|
cmd = ['mpv', f'--input-ipc-server={ipc_pipe}', '--idle', '--force-window']
|
||||||
cmd.append('--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]')
|
cmd.append('--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]')
|
||||||
|
|
||||||
# Add items
|
# Add items
|
||||||
first_title_set = False
|
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
target = None
|
target = None
|
||||||
title = None
|
title = None
|
||||||
|
|
||||||
if isinstance(item, dict):
|
if isinstance(item, dict):
|
||||||
target = item.get("target") or item.get("url") or item.get("path")
|
target = item.get("target") or item.get("url") or item.get("path") or item.get("filename")
|
||||||
title = item.get("title") or item.get("name")
|
title = item.get("title") or item.get("name")
|
||||||
elif hasattr(item, "target"):
|
elif hasattr(item, "target"):
|
||||||
target = item.target
|
target = item.target
|
||||||
@@ -291,21 +413,23 @@ def _start_mpv(items: List[Any]) -> None:
|
|||||||
target = item
|
target = item
|
||||||
|
|
||||||
if target:
|
if target:
|
||||||
if not first_title_set and title:
|
if title:
|
||||||
cmd.append(f'--force-media-title={title}')
|
# Use memory:// M3U hack to pass title
|
||||||
first_title_set = True
|
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||||
|
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{target}"
|
||||||
|
cmd.append(f"memory://{m3u_content}")
|
||||||
|
else:
|
||||||
cmd.append(target)
|
cmd.append(target)
|
||||||
|
|
||||||
if len(cmd) > 3: # mpv + ipc + format + at least one file
|
|
||||||
try:
|
try:
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
kwargs['creationflags'] = 0x00000008 # DETACHED_PROCESS
|
||||||
|
|
||||||
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, **kwargs)
|
||||||
log(f"Started MPV with {len(cmd)-3} items")
|
debug(f"Started MPV with {len(items)} items")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Error starting MPV: {e}", file=sys.stderr)
|
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||||
|
|
||||||
CMDLET = Cmdlet(
|
CMDLET = Cmdlet(
|
||||||
name=".pipe",
|
name=".pipe",
|
||||||
@@ -322,13 +446,33 @@ CMDLET = Cmdlet(
|
|||||||
CmdletArg(
|
CmdletArg(
|
||||||
name="clear",
|
name="clear",
|
||||||
type="flag",
|
type="flag",
|
||||||
description="Remove the selected item from the playlist"
|
description="Remove the selected item, or clear entire playlist if no index provided"
|
||||||
),
|
),
|
||||||
CmdletArg(
|
CmdletArg(
|
||||||
name="list",
|
name="list",
|
||||||
type="flag",
|
type="flag",
|
||||||
description="List items (default)"
|
description="List items (default)"
|
||||||
),
|
),
|
||||||
|
CmdletArg(
|
||||||
|
name="play",
|
||||||
|
type="flag",
|
||||||
|
description="Resume playback"
|
||||||
|
),
|
||||||
|
CmdletArg(
|
||||||
|
name="pause",
|
||||||
|
type="flag",
|
||||||
|
description="Pause playback"
|
||||||
|
),
|
||||||
|
CmdletArg(
|
||||||
|
name="save",
|
||||||
|
type="flag",
|
||||||
|
description="Save current playlist to database"
|
||||||
|
),
|
||||||
|
CmdletArg(
|
||||||
|
name="load",
|
||||||
|
type="flag",
|
||||||
|
description="List saved playlists"
|
||||||
|
),
|
||||||
],
|
],
|
||||||
exec=_run
|
exec=_run
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from __future__ import annotations
|
|||||||
import contextlib
|
import contextlib
|
||||||
import hashlib
|
import hashlib
|
||||||
import importlib
|
import importlib
|
||||||
|
import json
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import httpx
|
import httpx
|
||||||
@@ -17,8 +18,9 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
from typing import Any, Dict, List, Optional, Sequence, Tuple
|
||||||
from urllib.parse import urlsplit, quote, urljoin
|
from urllib.parse import urlsplit, quote, urljoin
|
||||||
|
|
||||||
from helper.logger import log
|
from helper.logger import log, debug
|
||||||
from helper.http_client import HTTPClient
|
from helper.http_client import HTTPClient
|
||||||
|
from helper.utils import ensure_directory, unique_path, unique_preserve_order
|
||||||
|
|
||||||
from . import register
|
from . import register
|
||||||
from ._shared import Cmdlet, CmdletArg, SharedArgs, create_pipe_object_result, normalize_result_input
|
from ._shared import Cmdlet, CmdletArg, SharedArgs, create_pipe_object_result, normalize_result_input
|
||||||
@@ -70,6 +72,38 @@ USER_AGENT = (
|
|||||||
DEFAULT_VIEWPORT: ViewportSize = {"width": 1280, "height": 1200}
|
DEFAULT_VIEWPORT: ViewportSize = {"width": 1280, "height": 1200}
|
||||||
ARCHIVE_TIMEOUT = 30.0
|
ARCHIVE_TIMEOUT = 30.0
|
||||||
|
|
||||||
|
# Configurable selectors for specific websites
|
||||||
|
SITE_SELECTORS: Dict[str, List[str]] = {
|
||||||
|
"twitter.com": [
|
||||||
|
"article[role='article']",
|
||||||
|
"div[data-testid='tweet']",
|
||||||
|
"div[data-testid='cellInnerDiv'] article",
|
||||||
|
],
|
||||||
|
"x.com": [
|
||||||
|
"article[role='article']",
|
||||||
|
"div[data-testid='tweet']",
|
||||||
|
"div[data-testid='cellInnerDiv'] article",
|
||||||
|
],
|
||||||
|
"instagram.com": [
|
||||||
|
"article[role='presentation']",
|
||||||
|
"article[role='article']",
|
||||||
|
"div[role='dialog'] article",
|
||||||
|
"section main article",
|
||||||
|
],
|
||||||
|
"reddit.com": [
|
||||||
|
"shreddit-post",
|
||||||
|
"div[data-testid='post-container']",
|
||||||
|
"div[data-click-id='background']",
|
||||||
|
"article",
|
||||||
|
],
|
||||||
|
"rumble.com": [
|
||||||
|
"rumble-player, iframe.rumble",
|
||||||
|
"div.video-item--main",
|
||||||
|
"main article",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ScreenshotError(RuntimeError):
|
class ScreenshotError(RuntimeError):
|
||||||
"""Raised when screenshot capture or upload fails."""
|
"""Raised when screenshot capture or upload fails."""
|
||||||
@@ -113,39 +147,6 @@ class ScreenshotResult:
|
|||||||
# Helper Functions
|
# Helper Functions
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
def _ensure_directory(path: Path) -> None:
|
|
||||||
"""Ensure directory exists."""
|
|
||||||
if not isinstance(path, Path):
|
|
||||||
path = Path(path)
|
|
||||||
path.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _unique_path(path: Path) -> Path:
|
|
||||||
"""Get unique path by appending numbers if file exists."""
|
|
||||||
if not path.exists():
|
|
||||||
return path
|
|
||||||
stem = path.stem
|
|
||||||
suffix = path.suffix
|
|
||||||
parent = path.parent
|
|
||||||
counter = 1
|
|
||||||
while True:
|
|
||||||
new_path = parent / f"{stem}_{counter}{suffix}"
|
|
||||||
if not new_path.exists():
|
|
||||||
return new_path
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
|
|
||||||
def _unique_preserve_order(items: Sequence[str]) -> List[str]:
|
|
||||||
"""Remove duplicates while preserving order."""
|
|
||||||
seen = set()
|
|
||||||
result = []
|
|
||||||
for item in items:
|
|
||||||
if item not in seen:
|
|
||||||
seen.add(item)
|
|
||||||
result.append(item)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def _slugify_url(url: str) -> str:
|
def _slugify_url(url: str) -> str:
|
||||||
"""Convert URL to filesystem-safe slug."""
|
"""Convert URL to filesystem-safe slug."""
|
||||||
parsed = urlsplit(url)
|
parsed = urlsplit(url)
|
||||||
@@ -180,36 +181,11 @@ def _selectors_for_url(url: str) -> List[str]:
|
|||||||
"""Return a list of likely content selectors for known platforms."""
|
"""Return a list of likely content selectors for known platforms."""
|
||||||
u = url.lower()
|
u = url.lower()
|
||||||
sels: List[str] = []
|
sels: List[str] = []
|
||||||
# Twitter/X
|
|
||||||
if "twitter.com" in u or "x.com" in u:
|
for domain, selectors in SITE_SELECTORS.items():
|
||||||
sels.extend([
|
if domain in u:
|
||||||
"article[role='article']",
|
sels.extend(selectors)
|
||||||
"div[data-testid='tweet']",
|
|
||||||
"div[data-testid='cellInnerDiv'] article",
|
|
||||||
])
|
|
||||||
# Instagram
|
|
||||||
if "instagram.com" in u:
|
|
||||||
sels.extend([
|
|
||||||
"article[role='presentation']",
|
|
||||||
"article[role='article']",
|
|
||||||
"div[role='dialog'] article",
|
|
||||||
"section main article",
|
|
||||||
])
|
|
||||||
# Reddit
|
|
||||||
if "reddit.com" in u:
|
|
||||||
sels.extend([
|
|
||||||
"shreddit-post",
|
|
||||||
"div[data-testid='post-container']",
|
|
||||||
"div[data-click-id='background']",
|
|
||||||
"article",
|
|
||||||
])
|
|
||||||
# Rumble (video post)
|
|
||||||
if "rumble.com" in u:
|
|
||||||
sels.extend([
|
|
||||||
"rumble-player, iframe.rumble",
|
|
||||||
"div.video-item--main",
|
|
||||||
"main article",
|
|
||||||
])
|
|
||||||
return sels or ["article"]
|
return sels or ["article"]
|
||||||
|
|
||||||
|
|
||||||
@@ -321,7 +297,7 @@ def _archive_url(url: str, timeout: float) -> Tuple[List[str], List[str]]:
|
|||||||
|
|
||||||
def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
||||||
"""Prepare and validate output path for screenshot."""
|
"""Prepare and validate output path for screenshot."""
|
||||||
_ensure_directory(options.output_dir)
|
ensure_directory(options.output_dir)
|
||||||
explicit_format = _normalise_format(options.output_format) if options.output_format else None
|
explicit_format = _normalise_format(options.output_format) if options.output_format else None
|
||||||
inferred_format: Optional[str] = None
|
inferred_format: Optional[str] = None
|
||||||
if options.output_path is not None:
|
if options.output_path is not None:
|
||||||
@@ -344,20 +320,23 @@ def _prepare_output_path(options: ScreenshotOptions) -> Path:
|
|||||||
if current_suffix != expected:
|
if current_suffix != expected:
|
||||||
path = path.with_suffix(expected)
|
path = path.with_suffix(expected)
|
||||||
options.output_format = final_format
|
options.output_format = final_format
|
||||||
return _unique_path(path)
|
return unique_path(path)
|
||||||
|
|
||||||
|
|
||||||
def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warnings: List[str]) -> None:
|
def _capture(options: ScreenshotOptions, destination: Path, warnings: List[str]) -> None:
|
||||||
"""Capture screenshot using Playwright."""
|
"""Capture screenshot using Playwright."""
|
||||||
|
debug(f"[_capture] Starting capture for {options.url} -> {destination}")
|
||||||
playwright = None
|
playwright = None
|
||||||
browser = None
|
browser = None
|
||||||
context = None
|
context = None
|
||||||
try:
|
try:
|
||||||
log("Starting Playwright...", flush=True)
|
debug("Starting Playwright...", flush=True)
|
||||||
playwright = sync_playwright().start()
|
playwright = sync_playwright().start()
|
||||||
log("Launching Chromium browser...", flush=True)
|
log("Launching Chromium browser...", flush=True)
|
||||||
format_name = _normalise_format(options.output_format)
|
format_name = _normalise_format(options.output_format)
|
||||||
headless = options.headless or format_name == "pdf"
|
headless = options.headless or format_name == "pdf"
|
||||||
|
debug(f"[_capture] Format: {format_name}, Headless: {headless}")
|
||||||
|
|
||||||
if format_name == "pdf" and not options.headless:
|
if format_name == "pdf" and not options.headless:
|
||||||
warnings.append("pdf output requires headless Chromium; overriding headless mode")
|
warnings.append("pdf output requires headless Chromium; overriding headless mode")
|
||||||
browser = playwright.chromium.launch(
|
browser = playwright.chromium.launch(
|
||||||
@@ -413,11 +392,14 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
|||||||
log("Attempting platform-specific content capture...", flush=True)
|
log("Attempting platform-specific content capture...", flush=True)
|
||||||
try:
|
try:
|
||||||
_platform_preprocess(options.url, page, warnings)
|
_platform_preprocess(options.url, page, warnings)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
|
debug(f"[_capture] Platform preprocess failed: {e}")
|
||||||
pass
|
pass
|
||||||
selectors = list(options.target_selectors or [])
|
selectors = list(options.target_selectors or [])
|
||||||
if not selectors:
|
if not selectors:
|
||||||
selectors = _selectors_for_url(options.url)
|
selectors = _selectors_for_url(options.url)
|
||||||
|
|
||||||
|
debug(f"[_capture] Trying selectors: {selectors}")
|
||||||
for sel in selectors:
|
for sel in selectors:
|
||||||
try:
|
try:
|
||||||
log(f"Trying selector: {sel}", flush=True)
|
log(f"Trying selector: {sel}", flush=True)
|
||||||
@@ -466,6 +448,7 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
|||||||
page.screenshot(**screenshot_kwargs)
|
page.screenshot(**screenshot_kwargs)
|
||||||
log(f"Screenshot saved to {destination}", flush=True)
|
log(f"Screenshot saved to {destination}", flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
debug(f"[_capture] Exception: {exc}")
|
||||||
raise ScreenshotError(f"Failed to capture screenshot: {exc}") from exc
|
raise ScreenshotError(f"Failed to capture screenshot: {exc}") from exc
|
||||||
finally:
|
finally:
|
||||||
log("Cleaning up browser resources...", flush=True)
|
log("Cleaning up browser resources...", flush=True)
|
||||||
@@ -483,20 +466,22 @@ def _capture_with_playwright(options: ScreenshotOptions, destination: Path, warn
|
|||||||
|
|
||||||
def _capture_screenshot(options: ScreenshotOptions) -> ScreenshotResult:
|
def _capture_screenshot(options: ScreenshotOptions) -> ScreenshotResult:
|
||||||
"""Capture a screenshot for the given options."""
|
"""Capture a screenshot for the given options."""
|
||||||
|
debug(f"[_capture_screenshot] Preparing capture for {options.url}")
|
||||||
destination = _prepare_output_path(options)
|
destination = _prepare_output_path(options)
|
||||||
warnings: List[str] = []
|
warnings: List[str] = []
|
||||||
_capture_with_playwright(options, destination, warnings)
|
_capture(options, destination, warnings)
|
||||||
|
|
||||||
known_urls = _unique_preserve_order([options.url, *options.known_urls])
|
known_urls = unique_preserve_order([options.url, *options.known_urls])
|
||||||
archive_urls: List[str] = []
|
archive_urls: List[str] = []
|
||||||
if options.archive:
|
if options.archive:
|
||||||
|
debug(f"[_capture_screenshot] Archiving enabled for {options.url}")
|
||||||
archives, archive_warnings = _archive_url(options.url, options.archive_timeout)
|
archives, archive_warnings = _archive_url(options.url, options.archive_timeout)
|
||||||
archive_urls.extend(archives)
|
archive_urls.extend(archives)
|
||||||
warnings.extend(archive_warnings)
|
warnings.extend(archive_warnings)
|
||||||
if archives:
|
if archives:
|
||||||
known_urls = _unique_preserve_order([*known_urls, *archives])
|
known_urls = unique_preserve_order([*known_urls, *archives])
|
||||||
|
|
||||||
applied_tags = _unique_preserve_order(list(tag for tag in options.tags if tag.strip()))
|
applied_tags = unique_preserve_order(list(tag for tag in options.tags if tag.strip()))
|
||||||
|
|
||||||
return ScreenshotResult(
|
return ScreenshotResult(
|
||||||
path=destination,
|
path=destination,
|
||||||
@@ -530,6 +515,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
"""
|
"""
|
||||||
from ._shared import parse_cmdlet_args
|
from ._shared import parse_cmdlet_args
|
||||||
|
|
||||||
|
debug(f"[_run] screen-shot invoked with args: {args}")
|
||||||
|
|
||||||
# Help check
|
# Help check
|
||||||
try:
|
try:
|
||||||
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
if any(str(a).lower() in {"-?", "/?", "--help", "-h", "help", "--cmdlet"} for a in args):
|
||||||
@@ -581,6 +568,8 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
log(f"No URLs to process for screen-shot cmdlet", file=sys.stderr)
|
log(f"No URLs to process for screen-shot cmdlet", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
debug(f"[_run] URLs to process: {urls_to_process}")
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# OUTPUT DIRECTORY RESOLUTION - Priority chain
|
# OUTPUT DIRECTORY RESOLUTION - Priority chain
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
@@ -617,7 +606,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
screenshot_dir = Path.home() / "Videos"
|
screenshot_dir = Path.home() / "Videos"
|
||||||
log(f"[screen_shot] Using default directory: {screenshot_dir}", flush=True)
|
log(f"[screen_shot] Using default directory: {screenshot_dir}", flush=True)
|
||||||
|
|
||||||
_ensure_directory(screenshot_dir)
|
ensure_directory(screenshot_dir)
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# PREPARE SCREENSHOT OPTIONS
|
# PREPARE SCREENSHOT OPTIONS
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ CMDLET = Cmdlet(
|
|||||||
CmdletArg("size", description="Filter by size: >100MB, <50MB, =10MB"),
|
CmdletArg("size", description="Filter by size: >100MB, <50MB, =10MB"),
|
||||||
CmdletArg("type", description="Filter by type: audio, video, image, document"),
|
CmdletArg("type", description="Filter by type: audio, video, image, document"),
|
||||||
CmdletArg("duration", description="Filter by duration: >10:00, <1:30:00"),
|
CmdletArg("duration", description="Filter by duration: >10:00, <1:30:00"),
|
||||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)"),
|
CmdletArg("limit", type="integer", description="Limit results (default: 45)"),
|
||||||
CmdletArg("storage", description="Search storage backend: hydrus, local, debrid (default: all searchable)"),
|
CmdletArg("storage", description="Search storage backend: hydrus, local, debrid (default: all searchable)"),
|
||||||
CmdletArg("provider", description="Search provider: libgen, openlibrary, soulseek, debrid, local (overrides -storage)"),
|
CmdletArg("provider", description="Search provider: libgen, openlibrary, soulseek, debrid, local (overrides -storage)"),
|
||||||
],
|
],
|
||||||
@@ -190,7 +190,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
type_filter: Optional[str] = None
|
type_filter: Optional[str] = None
|
||||||
storage_backend: Optional[str] = None
|
storage_backend: Optional[str] = None
|
||||||
provider_name: Optional[str] = None
|
provider_name: Optional[str] = None
|
||||||
limit = 100
|
limit = 45
|
||||||
|
|
||||||
# Simple argument parsing
|
# Simple argument parsing
|
||||||
i = 0
|
i = 0
|
||||||
@@ -216,7 +216,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
elif low in {"-type", "--type"} and i + 1 < len(args_list):
|
elif low in {"-type", "--type"} and i + 1 < len(args_list):
|
||||||
type_filter = args_list[i + 1].lower()
|
type_filter = args_list[i + 1].lower()
|
||||||
i += 2
|
i += 2
|
||||||
elif not query and not arg.startswith("-"):
|
elif not arg.startswith("-"):
|
||||||
|
if query:
|
||||||
|
query += " " + arg
|
||||||
|
else:
|
||||||
query = arg
|
query = arg
|
||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
@@ -246,6 +249,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
results_list = []
|
results_list = []
|
||||||
|
import result_table
|
||||||
|
import importlib
|
||||||
|
importlib.reload(result_table)
|
||||||
|
from result_table import ResultTable
|
||||||
|
|
||||||
|
# Create ResultTable for display
|
||||||
|
table_title = f"Search: {query}"
|
||||||
|
if provider_name:
|
||||||
|
table_title += f" [{provider_name}]"
|
||||||
|
elif storage_backend:
|
||||||
|
table_title += f" [{storage_backend}]"
|
||||||
|
|
||||||
|
table = ResultTable(table_title)
|
||||||
|
table.set_source_command("search-file", args_list)
|
||||||
|
|
||||||
# Try to search using provider (libgen, soulseek, debrid, openlibrary)
|
# Try to search using provider (libgen, soulseek, debrid, openlibrary)
|
||||||
if provider_name:
|
if provider_name:
|
||||||
@@ -261,10 +278,17 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
debug(f"[search_file] Provider search returned {len(search_result)} results")
|
debug(f"[search_file] Provider search returned {len(search_result)} results")
|
||||||
|
|
||||||
for item in search_result:
|
for item in search_result:
|
||||||
|
# Add to table
|
||||||
|
table.add_result(item)
|
||||||
|
|
||||||
|
# Emit to pipeline
|
||||||
item_dict = item.to_dict()
|
item_dict = item.to_dict()
|
||||||
results_list.append(item_dict)
|
results_list.append(item_dict)
|
||||||
ctx.emit(item_dict)
|
ctx.emit(item_dict)
|
||||||
|
|
||||||
|
# Set the result table in context for TUI/CLI display
|
||||||
|
ctx.set_last_result_table(table, results_list)
|
||||||
|
|
||||||
debug(f"[search_file] Emitted {len(results_list)} results")
|
debug(f"[search_file] Emitted {len(results_list)} results")
|
||||||
|
|
||||||
# Write results to worker stdout
|
# Write results to worker stdout
|
||||||
@@ -313,6 +337,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
# Emit results and collect for workers table
|
# Emit results and collect for workers table
|
||||||
if results:
|
if results:
|
||||||
for item in results:
|
for item in results:
|
||||||
|
# Add to table
|
||||||
|
table.add_result(item)
|
||||||
|
|
||||||
if isinstance(item, dict):
|
if isinstance(item, dict):
|
||||||
normalized = _ensure_storage_columns(item)
|
normalized = _ensure_storage_columns(item)
|
||||||
results_list.append(normalized)
|
results_list.append(normalized)
|
||||||
@@ -326,6 +353,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
|||||||
results_list.append(item_dict)
|
results_list.append(item_dict)
|
||||||
ctx.emit(item_dict)
|
ctx.emit(item_dict)
|
||||||
|
|
||||||
|
# Set the result table in context for TUI/CLI display
|
||||||
|
ctx.set_last_result_table(table, results_list)
|
||||||
|
|
||||||
# Write results to worker stdout
|
# Write results to worker stdout
|
||||||
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
db.append_worker_stdout(worker_id, json.dumps(results_list, indent=2))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -54,8 +54,15 @@ def _progress_callback(status: Dict[str, Any]) -> None:
|
|||||||
if event == "downloading":
|
if event == "downloading":
|
||||||
percent = status.get("_percent_str", "?")
|
percent = status.get("_percent_str", "?")
|
||||||
speed = status.get("_speed_str", "?")
|
speed = status.get("_speed_str", "?")
|
||||||
debug(f"Downloading {percent} at {speed}")
|
eta = status.get("_eta_str", "?")
|
||||||
|
# Print progress to stdout with carriage return to update in place
|
||||||
|
sys.stdout.write(f"\r[download] {percent} at {speed} ETA {eta} ")
|
||||||
|
sys.stdout.flush()
|
||||||
elif event == "finished":
|
elif event == "finished":
|
||||||
|
# Clear the progress line
|
||||||
|
sys.stdout.write("\r" + " " * 70 + "\r")
|
||||||
|
sys.stdout.flush()
|
||||||
|
# Log finished message (visible)
|
||||||
debug(f"✓ Download finished: {status.get('filename')}")
|
debug(f"✓ Download finished: {status.get('filename')}")
|
||||||
elif event in ("postprocessing", "processing"):
|
elif event in ("postprocessing", "processing"):
|
||||||
debug(f"Post-processing: {status.get('postprocessor')}")
|
debug(f"Post-processing: {status.get('postprocessor')}")
|
||||||
@@ -100,8 +107,8 @@ def list_formats(url: str, no_playlist: bool = False, playlist_items: Optional[s
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
"quiet": False,
|
"quiet": True,
|
||||||
"no_warnings": False,
|
"no_warnings": True,
|
||||||
"socket_timeout": 30,
|
"socket_timeout": 30,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -154,9 +161,9 @@ def _build_ytdlp_options(opts: DownloadOptions) -> Dict[str, Any]:
|
|||||||
|
|
||||||
base_options: Dict[str, Any] = {
|
base_options: Dict[str, Any] = {
|
||||||
"outtmpl": outtmpl,
|
"outtmpl": outtmpl,
|
||||||
"quiet": False,
|
"quiet": True,
|
||||||
"no_warnings": False,
|
"no_warnings": True,
|
||||||
"noprogress": False,
|
"noprogress": True,
|
||||||
"socket_timeout": 30,
|
"socket_timeout": 30,
|
||||||
"retries": 10,
|
"retries": 10,
|
||||||
"fragment_retries": 10,
|
"fragment_retries": 10,
|
||||||
@@ -622,7 +629,7 @@ def download_media(
|
|||||||
_ensure_yt_dlp_ready()
|
_ensure_yt_dlp_ready()
|
||||||
|
|
||||||
ytdl_options = _build_ytdlp_options(opts)
|
ytdl_options = _build_ytdlp_options(opts)
|
||||||
log(f"Starting yt-dlp download: {opts.url}")
|
debug(f"Starting yt-dlp download: {opts.url}")
|
||||||
if debug_logger is not None:
|
if debug_logger is not None:
|
||||||
debug_logger.write_record("ytdlp-start", {"url": opts.url})
|
debug_logger.write_record("ytdlp-start", {"url": opts.url})
|
||||||
|
|
||||||
@@ -700,7 +707,7 @@ def download_media(
|
|||||||
or entry.get("url")
|
or entry.get("url")
|
||||||
)
|
)
|
||||||
|
|
||||||
log(f"✓ Downloaded: {media_path.name} ({len(tags)} tags)")
|
debug(f"✓ Downloaded: {media_path.name} ({len(tags)} tags)")
|
||||||
if debug_logger is not None:
|
if debug_logger is not None:
|
||||||
debug_logger.write_record(
|
debug_logger.write_record(
|
||||||
"downloaded",
|
"downloaded",
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from typing import Any, Dict, Optional
|
|||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import requests
|
import requests
|
||||||
|
import re
|
||||||
|
|
||||||
from helper.logger import log, debug
|
from helper.logger import log, debug
|
||||||
|
|
||||||
@@ -49,6 +50,10 @@ class StorageBackend(ABC):
|
|||||||
Exception: If upload fails
|
Exception: If upload fails
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_name(self) -> str:
|
||||||
|
"""Get the unique name of this backend."""
|
||||||
|
|
||||||
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
|
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
|
||||||
"""Search for files in backends that support it.
|
"""Search for files in backends that support it.
|
||||||
|
|
||||||
@@ -124,7 +129,7 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
try:
|
try:
|
||||||
# Compute file hash
|
# Compute file hash
|
||||||
file_hash = sha256_file(file_path)
|
file_hash = sha256_file(file_path)
|
||||||
log(f"File hash: {file_hash}", file=sys.stderr)
|
debug(f"File hash: {file_hash}", file=sys.stderr)
|
||||||
|
|
||||||
dest_dir = Path(location).expanduser()
|
dest_dir = Path(location).expanduser()
|
||||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||||
@@ -147,13 +152,13 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
|
|
||||||
if move_file:
|
if move_file:
|
||||||
shutil.move(str(file_path), dest_file)
|
shutil.move(str(file_path), dest_file)
|
||||||
log(f"✅ Local move: {dest_file}", file=sys.stderr)
|
debug(f"Local move: {dest_file}", file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
shutil.copy2(file_path, dest_file)
|
shutil.copy2(file_path, dest_file)
|
||||||
log(f"✅ Local copy: {dest_file}", file=sys.stderr)
|
debug(f"Local copy: {dest_file}", file=sys.stderr)
|
||||||
return str(dest_file)
|
return str(dest_file)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"❌ Local copy failed: {exc}", file=sys.stderr)
|
debug(f"Local copy failed: {exc}", file=sys.stderr)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
|
def search(self, query: str, **kwargs: Any) -> list[Dict[str, Any]]:
|
||||||
@@ -199,7 +204,6 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
|
|
||||||
# Try database search first (much faster than filesystem scan)
|
# Try database search first (much faster than filesystem scan)
|
||||||
try:
|
try:
|
||||||
debug(f"Connecting to local library DB at {search_dir}")
|
|
||||||
db = LocalLibraryDB(search_dir)
|
db = LocalLibraryDB(search_dir)
|
||||||
cursor = db.connection.cursor()
|
cursor = db.connection.cursor()
|
||||||
|
|
||||||
@@ -260,8 +264,9 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
all_tags = [row[0] for row in cursor.fetchall()]
|
all_tags = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
"name": file_path.name,
|
"name": file_path.stem,
|
||||||
"title": file_path.name,
|
"title": file_path.stem,
|
||||||
|
"ext": file_path.suffix.lstrip('.'),
|
||||||
"path": path_str,
|
"path": path_str,
|
||||||
"target": path_str,
|
"target": path_str,
|
||||||
"origin": "local",
|
"origin": "local",
|
||||||
@@ -283,23 +288,64 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
# 2. Simple tags (without namespace) containing the query
|
# 2. Simple tags (without namespace) containing the query
|
||||||
# NOTE: Does NOT match namespaced tags (e.g., "joe" won't match "channel:Joe Mullan")
|
# NOTE: Does NOT match namespaced tags (e.g., "joe" won't match "channel:Joe Mullan")
|
||||||
# Use explicit namespace search for that (e.g., "channel:joe*")
|
# Use explicit namespace search for that (e.g., "channel:joe*")
|
||||||
query_pattern = f"%{query_lower}%"
|
|
||||||
debug(f"Performing filename/tag search: {query_pattern}")
|
|
||||||
|
|
||||||
cursor.execute("""
|
# Split query into terms for AND logic
|
||||||
|
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
|
||||||
|
if not terms:
|
||||||
|
terms = [query_lower]
|
||||||
|
|
||||||
|
debug(f"Performing filename/tag search for terms: {terms}")
|
||||||
|
|
||||||
|
# Fetch more results than requested to allow for filtering
|
||||||
|
fetch_limit = (limit or 45) * 50
|
||||||
|
|
||||||
|
# 1. Filename search (AND logic)
|
||||||
|
conditions = ["LOWER(f.file_path) LIKE ?" for _ in terms]
|
||||||
|
params = [f"%{t}%" for t in terms]
|
||||||
|
where_clause = " AND ".join(conditions)
|
||||||
|
|
||||||
|
cursor.execute(f"""
|
||||||
SELECT DISTINCT f.id, f.file_path, f.file_size
|
SELECT DISTINCT f.id, f.file_path, f.file_size
|
||||||
FROM files f
|
FROM files f
|
||||||
WHERE LOWER(f.file_path) LIKE ?
|
WHERE {where_clause}
|
||||||
ORDER BY f.file_path
|
ORDER BY f.file_path
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
""", (query_pattern, limit or 1000))
|
""", (*params, fetch_limit))
|
||||||
|
|
||||||
rows = cursor.fetchall()
|
rows = cursor.fetchall()
|
||||||
debug(f"Found {len(rows)} filename matches in DB")
|
debug(f"Found {len(rows)} filename matches in DB (before whole-word filter)")
|
||||||
|
|
||||||
|
# Compile regex for whole word matching (only if single term, otherwise skip)
|
||||||
|
word_regex = None
|
||||||
|
if len(terms) == 1:
|
||||||
|
term = terms[0]
|
||||||
|
# Check if term contains wildcard characters
|
||||||
|
has_wildcard = '*' in term or '?' in term
|
||||||
|
|
||||||
|
if has_wildcard:
|
||||||
|
# Use fnmatch for wildcard patterns (e.g., "sie*" matches "SiebeliebenWohl...")
|
||||||
|
try:
|
||||||
|
from fnmatch import translate
|
||||||
|
word_regex = re.compile(translate(term), re.IGNORECASE)
|
||||||
|
except Exception:
|
||||||
|
word_regex = None
|
||||||
|
else:
|
||||||
|
# Use word boundary for exact terms (backwards compatibility)
|
||||||
|
try:
|
||||||
|
word_regex = re.compile(r'\b' + re.escape(term) + r'\b', re.IGNORECASE)
|
||||||
|
except Exception:
|
||||||
|
word_regex = None
|
||||||
|
|
||||||
seen_files = set()
|
seen_files = set()
|
||||||
for file_id, file_path_str, size_bytes in rows:
|
for file_id, file_path_str, size_bytes in rows:
|
||||||
if not file_path_str or file_path_str in seen_files:
|
if not file_path_str or file_path_str in seen_files:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Apply whole word filter on filename if single term
|
||||||
|
if word_regex:
|
||||||
|
p = Path(file_path_str)
|
||||||
|
if not word_regex.search(p.name):
|
||||||
|
continue
|
||||||
seen_files.add(file_path_str)
|
seen_files.add(file_path_str)
|
||||||
|
|
||||||
file_path = Path(file_path_str)
|
file_path = Path(file_path_str)
|
||||||
@@ -315,8 +361,9 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
tags = [row[0] for row in cursor.fetchall()]
|
tags = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
"name": file_path.name,
|
"name": file_path.stem,
|
||||||
"title": file_path.name,
|
"title": file_path.stem,
|
||||||
|
"ext": file_path.suffix.lstrip('.'),
|
||||||
"path": path_str,
|
"path": path_str,
|
||||||
"target": path_str,
|
"target": path_str,
|
||||||
"origin": "local",
|
"origin": "local",
|
||||||
@@ -326,6 +373,12 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
})
|
})
|
||||||
|
|
||||||
# Also search for simple tags (without namespace) containing the query
|
# Also search for simple tags (without namespace) containing the query
|
||||||
|
# Only perform tag search if single term, or if we want to support multi-term tag search
|
||||||
|
# For now, fallback to single pattern search for tags if multiple terms
|
||||||
|
# (searching for a tag that contains "term1 term2" or "term1,term2")
|
||||||
|
# This is less useful for AND logic across multiple tags, but consistent with previous behavior
|
||||||
|
query_pattern = f"%{query_lower}%"
|
||||||
|
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT DISTINCT f.id, f.file_path, f.file_size
|
SELECT DISTINCT f.id, f.file_path, f.file_size
|
||||||
FROM files f
|
FROM files f
|
||||||
@@ -354,8 +407,9 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
tags = [row[0] for row in cursor.fetchall()]
|
tags = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
"name": file_path.name,
|
"name": file_path.stem,
|
||||||
"title": file_path.name,
|
"title": file_path.stem,
|
||||||
|
"ext": file_path.suffix.lstrip('.'),
|
||||||
"path": path_str,
|
"path": path_str,
|
||||||
"target": path_str,
|
"target": path_str,
|
||||||
"origin": "local",
|
"origin": "local",
|
||||||
@@ -392,8 +446,9 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
tags = [row[0] for row in cursor.fetchall()]
|
tags = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
"name": file_path.name,
|
"name": file_path.stem,
|
||||||
"title": file_path.name,
|
"title": file_path.stem,
|
||||||
|
"ext": file_path.suffix.lstrip('.'),
|
||||||
"path": path_str,
|
"path": path_str,
|
||||||
"target": path_str,
|
"target": path_str,
|
||||||
"origin": "local",
|
"origin": "local",
|
||||||
@@ -417,6 +472,11 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
recursive = kwargs.get("recursive", True)
|
recursive = kwargs.get("recursive", True)
|
||||||
pattern = "**/*" if recursive else "*"
|
pattern = "**/*" if recursive else "*"
|
||||||
|
|
||||||
|
# Split query into terms for AND logic
|
||||||
|
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
|
||||||
|
if not terms:
|
||||||
|
terms = [query_lower]
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
for file_path in search_dir.glob(pattern):
|
for file_path in search_dir.glob(pattern):
|
||||||
if not file_path.is_file():
|
if not file_path.is_file():
|
||||||
@@ -425,14 +485,26 @@ class LocalStorageBackend(StorageBackend):
|
|||||||
if lower_name.endswith('.tags') or lower_name.endswith('.metadata') \
|
if lower_name.endswith('.tags') or lower_name.endswith('.metadata') \
|
||||||
or lower_name.endswith('.notes') or lower_name.endswith('.tags.txt'):
|
or lower_name.endswith('.notes') or lower_name.endswith('.tags.txt'):
|
||||||
continue
|
continue
|
||||||
if not (match_all or query_lower in lower_name):
|
|
||||||
|
if not match_all:
|
||||||
|
# Check if ALL terms are present in the filename
|
||||||
|
# For single terms with wildcards, use fnmatch; otherwise use substring matching
|
||||||
|
if len(terms) == 1 and ('*' in terms[0] or '?' in terms[0]):
|
||||||
|
# Wildcard pattern matching for single term
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
if not fnmatch(lower_name, terms[0]):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# Substring matching for all terms (AND logic)
|
||||||
|
if not all(term in lower_name for term in terms):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
size_bytes = file_path.stat().st_size
|
size_bytes = file_path.stat().st_size
|
||||||
path_str = str(file_path)
|
path_str = str(file_path)
|
||||||
results.append({
|
results.append({
|
||||||
"name": file_path.name,
|
"name": file_path.stem,
|
||||||
"title": file_path.name,
|
"title": file_path.stem,
|
||||||
|
"ext": file_path.suffix.lstrip('.'),
|
||||||
"path": path_str,
|
"path": path_str,
|
||||||
"target": path_str,
|
"target": path_str,
|
||||||
"origin": "local",
|
"origin": "local",
|
||||||
@@ -545,7 +617,7 @@ class HydrusStorageBackend(StorageBackend):
|
|||||||
raise Exception(f"Hydrus response missing file hash: {response}")
|
raise Exception(f"Hydrus response missing file hash: {response}")
|
||||||
|
|
||||||
file_hash = hydrus_hash
|
file_hash = hydrus_hash
|
||||||
log(f"✅ File uploaded to Hydrus: {file_hash}", file=sys.stderr)
|
log(f"Hydrus: {file_hash}", file=sys.stderr)
|
||||||
|
|
||||||
# Add tags if provided
|
# Add tags if provided
|
||||||
if tags:
|
if tags:
|
||||||
@@ -637,7 +709,8 @@ class HydrusStorageBackend(StorageBackend):
|
|||||||
# Fetch metadata for the found files
|
# Fetch metadata for the found files
|
||||||
results = []
|
results = []
|
||||||
query_lower = query.lower().strip()
|
query_lower = query.lower().strip()
|
||||||
search_terms = set(query_lower.split()) # For substring matching
|
# Split by comma or space for AND logic
|
||||||
|
search_terms = set(query_lower.replace(',', ' ').split()) # For substring matching
|
||||||
|
|
||||||
if file_ids:
|
if file_ids:
|
||||||
metadata = client.fetch_file_metadata(file_ids=file_ids)
|
metadata = client.fetch_file_metadata(file_ids=file_ids)
|
||||||
@@ -706,8 +779,23 @@ class HydrusStorageBackend(StorageBackend):
|
|||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
# Free-form search: check if search terms match the title or tags
|
# Free-form search: check if search terms match the title or tags
|
||||||
# Match if ANY search term is found in title or tags (OR logic)
|
# Match if ALL search terms are found in title or tags (AND logic)
|
||||||
if query_lower == "*" or any(term in all_tags_str or term in title.lower() for term in search_terms):
|
# AND use whole word matching
|
||||||
|
|
||||||
|
# Combine title and tags for searching
|
||||||
|
searchable_text = (title + " " + all_tags_str).lower()
|
||||||
|
|
||||||
|
match = True
|
||||||
|
if query_lower != "*":
|
||||||
|
for term in search_terms:
|
||||||
|
# Regex for whole word: \bterm\b
|
||||||
|
# Escape term to handle special chars
|
||||||
|
pattern = r'\b' + re.escape(term) + r'\b'
|
||||||
|
if not re.search(pattern, searchable_text):
|
||||||
|
match = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if match:
|
||||||
results.append({
|
results.append({
|
||||||
"hash": hash_hex,
|
"hash": hash_hex,
|
||||||
"hash_hex": hash_hex,
|
"hash_hex": hash_hex,
|
||||||
@@ -820,6 +908,11 @@ class DebridStorageBackend(StorageBackend):
|
|||||||
# "*" means "match all" - include all magnets
|
# "*" means "match all" - include all magnets
|
||||||
match_all = query_lower == "*"
|
match_all = query_lower == "*"
|
||||||
|
|
||||||
|
# Split query into terms for AND logic
|
||||||
|
terms = [t.strip() for t in query_lower.replace(',', ' ').split() if t.strip()]
|
||||||
|
if not terms:
|
||||||
|
terms = [query_lower]
|
||||||
|
|
||||||
for magnet in magnets:
|
for magnet in magnets:
|
||||||
filename = magnet.get('filename', '').lower()
|
filename = magnet.get('filename', '').lower()
|
||||||
status_code = magnet.get('statusCode', 0)
|
status_code = magnet.get('statusCode', 0)
|
||||||
@@ -830,7 +923,8 @@ class DebridStorageBackend(StorageBackend):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Match query against filename (or match all if query is "*")
|
# Match query against filename (or match all if query is "*")
|
||||||
if not match_all and query_lower not in filename:
|
if not match_all:
|
||||||
|
if not all(term in filename for term in terms):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
matching_magnet_ids.append(magnet_id)
|
matching_magnet_ids.append(magnet_id)
|
||||||
@@ -920,6 +1014,102 @@ class DebridStorageBackend(StorageBackend):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MatrixStorageBackend(StorageBackend):
|
||||||
|
"""File storage backend for Matrix (Element) chat rooms."""
|
||||||
|
|
||||||
|
def get_name(self) -> str:
|
||||||
|
return "matrix"
|
||||||
|
|
||||||
|
def upload(self, file_path: Path, **kwargs: Any) -> str:
|
||||||
|
"""Upload file to Matrix room.
|
||||||
|
|
||||||
|
Requires 'config' in kwargs with 'storage.matrix' settings:
|
||||||
|
- homeserver: URL of homeserver (e.g. https://matrix.org)
|
||||||
|
- user_id: User ID (e.g. @user:matrix.org)
|
||||||
|
- access_token: Access token (preferred) OR password
|
||||||
|
- room_id: Room ID to upload to (e.g. !roomid:matrix.org)
|
||||||
|
"""
|
||||||
|
config = kwargs.get('config', {})
|
||||||
|
if not config:
|
||||||
|
raise ValueError("Config required for Matrix upload")
|
||||||
|
|
||||||
|
matrix_conf = config.get('storage', {}).get('matrix', {})
|
||||||
|
if not matrix_conf:
|
||||||
|
raise ValueError("Matrix storage not configured in config.json")
|
||||||
|
|
||||||
|
homeserver = matrix_conf.get('homeserver')
|
||||||
|
# user_id = matrix_conf.get('user_id') # Not strictly needed if we have token
|
||||||
|
access_token = matrix_conf.get('access_token')
|
||||||
|
room_id = matrix_conf.get('room_id')
|
||||||
|
|
||||||
|
if not homeserver or not room_id:
|
||||||
|
raise ValueError("Matrix homeserver and room_id required")
|
||||||
|
|
||||||
|
# Ensure homeserver has protocol
|
||||||
|
if not homeserver.startswith('http'):
|
||||||
|
homeserver = f"https://{homeserver}"
|
||||||
|
|
||||||
|
# Login if no access token (optional implementation, for now assume token)
|
||||||
|
if not access_token:
|
||||||
|
raise ValueError("Matrix access_token required (login not yet implemented)")
|
||||||
|
|
||||||
|
# 1. Upload Media
|
||||||
|
upload_url = f"{homeserver}/_matrix/media/r3/upload"
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {access_token}",
|
||||||
|
"Content-Type": "application/octet-stream" # Or guess mime type
|
||||||
|
}
|
||||||
|
|
||||||
|
import mimetypes
|
||||||
|
mime_type, _ = mimetypes.guess_type(file_path)
|
||||||
|
if mime_type:
|
||||||
|
headers["Content-Type"] = mime_type
|
||||||
|
|
||||||
|
filename = file_path.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(file_path, 'rb') as f:
|
||||||
|
resp = requests.post(upload_url, headers=headers, data=f, params={"filename": filename})
|
||||||
|
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise Exception(f"Matrix upload failed: {resp.text}")
|
||||||
|
|
||||||
|
content_uri = resp.json().get('content_uri')
|
||||||
|
if not content_uri:
|
||||||
|
raise Exception("No content_uri returned from Matrix upload")
|
||||||
|
|
||||||
|
# 2. Send Message
|
||||||
|
send_url = f"{homeserver}/_matrix/client/r0/rooms/{room_id}/send/m.room.message"
|
||||||
|
|
||||||
|
# Determine msgtype
|
||||||
|
msgtype = "m.file"
|
||||||
|
if mime_type:
|
||||||
|
if mime_type.startswith("image/"): msgtype = "m.image"
|
||||||
|
elif mime_type.startswith("video/"): msgtype = "m.video"
|
||||||
|
elif mime_type.startswith("audio/"): msgtype = "m.audio"
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"msgtype": msgtype,
|
||||||
|
"body": filename,
|
||||||
|
"url": content_uri,
|
||||||
|
"info": {
|
||||||
|
"mimetype": mime_type,
|
||||||
|
"size": file_path.stat().st_size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = requests.post(send_url, headers=headers, json=payload)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise Exception(f"Matrix send message failed: {resp.text}")
|
||||||
|
|
||||||
|
event_id = resp.json().get('event_id')
|
||||||
|
return f"matrix://{room_id}/{event_id}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log(f"❌ Matrix upload error: {e}", file=sys.stderr)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
class FileStorage:
|
class FileStorage:
|
||||||
"""Unified file storage interface supporting multiple backend services.
|
"""Unified file storage interface supporting multiple backend services.
|
||||||
|
|
||||||
@@ -966,6 +1156,9 @@ class FileStorage:
|
|||||||
if debrid_api_key:
|
if debrid_api_key:
|
||||||
self._backends["debrid"] = DebridStorageBackend(api_key=debrid_api_key)
|
self._backends["debrid"] = DebridStorageBackend(api_key=debrid_api_key)
|
||||||
|
|
||||||
|
# Include Matrix backend
|
||||||
|
self._backends["matrix"] = MatrixStorageBackend()
|
||||||
|
|
||||||
def __getitem__(self, backend_name: str) -> StorageBackend:
|
def __getitem__(self, backend_name: str) -> StorageBackend:
|
||||||
"""Get a storage backend by name.
|
"""Get a storage backend by name.
|
||||||
|
|
||||||
|
|||||||
@@ -1411,7 +1411,7 @@ def get_client(config: dict[str, Any]) -> HydrusClient:
|
|||||||
cached_client = _hydrus_client_cache[cache_key]
|
cached_client = _hydrus_client_cache[cache_key]
|
||||||
# If cached client has a session key, reuse it (don't re-acquire)
|
# If cached client has a session key, reuse it (don't re-acquire)
|
||||||
if hasattr(cached_client, '_session_key') and cached_client._session_key:
|
if hasattr(cached_client, '_session_key') and cached_client._session_key:
|
||||||
debug(f"Reusing cached session key for {hydrus_url}")
|
# debug(f"Reusing cached session key for {hydrus_url}")
|
||||||
return cached_client
|
return cached_client
|
||||||
# If no session key in cache, try to get one
|
# If no session key in cache, try to get one
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -231,6 +231,16 @@ class LocalLibraryDB:
|
|||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS playlists (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT UNIQUE NOT NULL,
|
||||||
|
items TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
# Worker tracking tables (drop legacy workers table if still present)
|
# Worker tracking tables (drop legacy workers table if still present)
|
||||||
self._ensure_worker_tables(cursor)
|
self._ensure_worker_tables(cursor)
|
||||||
|
|
||||||
@@ -1386,6 +1396,104 @@ class LocalLibrarySearchOptimizer:
|
|||||||
"""Fast tag-based search using database."""
|
"""Fast tag-based search using database."""
|
||||||
if not self.db:
|
if not self.db:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor = self.db.connection.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT f.file_path
|
||||||
|
FROM files f
|
||||||
|
JOIN tags t ON f.id = t.file_id
|
||||||
|
WHERE t.tag LIKE ?
|
||||||
|
LIMIT ?
|
||||||
|
""", (f"%{tag}%", limit))
|
||||||
|
|
||||||
|
return [Path(row[0]) for row in cursor.fetchall()]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Tag search failed: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def save_playlist(self, name: str, items: List[Dict[str, Any]]) -> bool:
|
||||||
|
"""Save a playlist to the database."""
|
||||||
|
if not self.db:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
cursor = self.db.connection.cursor()
|
||||||
|
items_json = json.dumps(items)
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO playlists (name, items, updated_at)
|
||||||
|
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||||
|
ON CONFLICT(name) DO UPDATE SET
|
||||||
|
items = excluded.items,
|
||||||
|
updated_at = CURRENT_TIMESTAMP
|
||||||
|
""", (name, items_json))
|
||||||
|
self.db.connection.commit()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save playlist {name}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_playlists(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all saved playlists."""
|
||||||
|
if not self.db:
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
cursor = self.db.connection.cursor()
|
||||||
|
cursor.execute("SELECT id, name, items, updated_at FROM playlists ORDER BY updated_at DESC")
|
||||||
|
results = []
|
||||||
|
for row in cursor.fetchall():
|
||||||
|
try:
|
||||||
|
items = json.loads(row['items'])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
items = []
|
||||||
|
results.append({
|
||||||
|
'id': row['id'],
|
||||||
|
'name': row['name'],
|
||||||
|
'items': items,
|
||||||
|
'updated_at': row['updated_at']
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get playlists: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_playlist(self, name: str) -> Optional[List[Dict[str, Any]]]:
|
||||||
|
"""Get a specific playlist by name."""
|
||||||
|
if not self.db:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
cursor = self.db.connection.cursor()
|
||||||
|
cursor.execute("SELECT items FROM playlists WHERE name = ?", (name,))
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row:
|
||||||
|
try:
|
||||||
|
return json.loads(row['items'])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return []
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get playlist {name}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_playlist_by_id(self, playlist_id: int) -> Optional[Tuple[str, List[Dict[str, Any]]]]:
|
||||||
|
"""Get a specific playlist by ID. Returns (name, items)."""
|
||||||
|
if not self.db:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
cursor = self.db.connection.cursor()
|
||||||
|
cursor.execute("SELECT name, items FROM playlists WHERE id = ?", (playlist_id,))
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row:
|
||||||
|
try:
|
||||||
|
items = json.loads(row['items'])
|
||||||
|
return (row['name'], items)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return (row['name'], [])
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get playlist ID {playlist_id}: {e}")
|
||||||
|
return None
|
||||||
|
if not self.db:
|
||||||
|
return []
|
||||||
return self.db.search_by_tag(tag, limit)
|
return self.db.search_by_tag(tag, limit)
|
||||||
|
|
||||||
def search_by_hash(self, file_hash: str) -> Optional[Path]:
|
def search_by_hash(self, file_hash: str) -> Optional[Path]:
|
||||||
|
|||||||
290
helper/mpv_ipc.py
Normal file
290
helper/mpv_ipc.py
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
"""MPV IPC client for cross-platform communication.
|
||||||
|
|
||||||
|
This module provides a cross-platform interface to communicate with mpv
|
||||||
|
using either named pipes (Windows) or Unix domain sockets (Linux/macOS).
|
||||||
|
|
||||||
|
This is the central hub for all Python-mpv IPC communication. The Lua script
|
||||||
|
should use the Python CLI, which uses this module to manage mpv connections.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import socket
|
||||||
|
import time as _time
|
||||||
|
from typing import Any, Dict, Optional, List
|
||||||
|
|
||||||
|
from helper.logger import debug
|
||||||
|
|
||||||
|
|
||||||
|
# Fixed pipe name for persistent MPV connection across all Python sessions
|
||||||
|
FIXED_IPC_PIPE_NAME = "mpv-medeia-macina"
|
||||||
|
|
||||||
|
|
||||||
|
class MPVIPCError(Exception):
|
||||||
|
"""Raised when MPV IPC communication fails."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_ipc_pipe_path() -> str:
|
||||||
|
"""Get the fixed IPC pipe/socket path for persistent MPV connection.
|
||||||
|
|
||||||
|
Uses a fixed name so all playback sessions connect to the same MPV
|
||||||
|
window/process instead of creating new instances.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to IPC pipe (Windows) or socket (Linux/macOS)
|
||||||
|
"""
|
||||||
|
system = platform.system()
|
||||||
|
|
||||||
|
if system == "Windows":
|
||||||
|
return f"\\\\.\\pipe\\{FIXED_IPC_PIPE_NAME}"
|
||||||
|
elif system == "Darwin": # macOS
|
||||||
|
return f"/tmp/{FIXED_IPC_PIPE_NAME}.sock"
|
||||||
|
else: # Linux and others
|
||||||
|
return f"/tmp/{FIXED_IPC_PIPE_NAME}.sock"
|
||||||
|
|
||||||
|
|
||||||
|
class MPVIPCClient:
|
||||||
|
"""Client for communicating with mpv via IPC socket/pipe.
|
||||||
|
|
||||||
|
This is the unified interface for all Python code to communicate with mpv.
|
||||||
|
It handles platform-specific differences (Windows named pipes vs Unix sockets).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, socket_path: Optional[str] = None, timeout: float = 5.0):
|
||||||
|
"""Initialize MPV IPC client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
socket_path: Path to IPC socket/pipe. If None, uses the fixed persistent path.
|
||||||
|
timeout: Socket timeout in seconds.
|
||||||
|
"""
|
||||||
|
self.timeout = timeout
|
||||||
|
self.socket_path = socket_path or get_ipc_pipe_path()
|
||||||
|
self.sock = None
|
||||||
|
self.is_windows = platform.system() == "Windows"
|
||||||
|
|
||||||
|
def connect(self) -> bool:
|
||||||
|
"""Connect to mpv IPC socket.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if connection successful, False otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.is_windows:
|
||||||
|
# Windows named pipes
|
||||||
|
try:
|
||||||
|
# Try to open the named pipe
|
||||||
|
self.sock = open(self.socket_path, 'r+b', buffering=0)
|
||||||
|
return True
|
||||||
|
except (OSError, IOError) as exc:
|
||||||
|
debug(f"Failed to connect to MPV named pipe: {exc}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
# Unix domain socket (Linux, macOS)
|
||||||
|
if not os.path.exists(self.socket_path):
|
||||||
|
debug(f"IPC socket not found: {self.socket_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||||
|
self.sock.settimeout(self.timeout)
|
||||||
|
self.sock.connect(self.socket_path)
|
||||||
|
return True
|
||||||
|
except Exception as exc:
|
||||||
|
debug(f"Failed to connect to MPV IPC: {exc}")
|
||||||
|
self.sock = None
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_command(self, command_data: Dict[str, Any] | List[Any]) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Send a command to mpv and get response.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command_data: Command dict (e.g. {"command": [...]}) or list (e.g. ["loadfile", ...])
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict with 'error' key (value 'success' on success), or None on error.
|
||||||
|
"""
|
||||||
|
if not self.sock:
|
||||||
|
if not self.connect():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Format command as JSON (mpv IPC protocol)
|
||||||
|
if isinstance(command_data, list):
|
||||||
|
request = {"command": command_data}
|
||||||
|
else:
|
||||||
|
request = command_data
|
||||||
|
|
||||||
|
# Add request_id if not present to match response
|
||||||
|
if "request_id" not in request:
|
||||||
|
request["request_id"] = int(_time.time() * 1000) % 100000
|
||||||
|
|
||||||
|
payload = json.dumps(request) + "\n"
|
||||||
|
|
||||||
|
# Send command
|
||||||
|
if self.is_windows:
|
||||||
|
self.sock.write(payload.encode('utf-8'))
|
||||||
|
self.sock.flush()
|
||||||
|
else:
|
||||||
|
self.sock.sendall(payload.encode('utf-8'))
|
||||||
|
|
||||||
|
# Receive response
|
||||||
|
# We need to read lines until we find the one with matching request_id
|
||||||
|
# or until timeout/error. MPV might send events in between.
|
||||||
|
start_time = _time.time()
|
||||||
|
while _time.time() - start_time < self.timeout:
|
||||||
|
response_data = b""
|
||||||
|
if self.is_windows:
|
||||||
|
try:
|
||||||
|
response_data = self.sock.readline()
|
||||||
|
except (OSError, IOError):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# This is simplistic for Unix socket (might not get full line)
|
||||||
|
# But for now assuming MPV sends line-buffered JSON
|
||||||
|
chunk = self.sock.recv(4096)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
response_data = chunk
|
||||||
|
# TODO: Handle partial lines if needed
|
||||||
|
except socket.timeout:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not response_data:
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
lines = response_data.decode('utf-8').strip().split('\n')
|
||||||
|
for line in lines:
|
||||||
|
if not line: continue
|
||||||
|
resp = json.loads(line)
|
||||||
|
|
||||||
|
# Check if this is the response to our request
|
||||||
|
if resp.get("request_id") == request.get("request_id"):
|
||||||
|
return resp
|
||||||
|
|
||||||
|
# If it's an error without request_id (shouldn't happen for commands)
|
||||||
|
if "error" in resp and "request_id" not in resp:
|
||||||
|
# Might be an event or async error
|
||||||
|
pass
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
except Exception as exc:
|
||||||
|
debug(f"Error sending command to MPV: {exc}")
|
||||||
|
self.disconnect()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
"""Disconnect from mpv IPC socket."""
|
||||||
|
if self.sock:
|
||||||
|
try:
|
||||||
|
self.sock.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
"""Cleanup on object destruction."""
|
||||||
|
self.disconnect()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Context manager entry."""
|
||||||
|
self.connect()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Context manager exit."""
|
||||||
|
self.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
def send_to_mpv(file_url: str, title: str, headers: Optional[Dict[str, str]] = None,
|
||||||
|
append: bool = True) -> bool:
|
||||||
|
"""Send a file to be played in the existing MPV instance via IPC.
|
||||||
|
|
||||||
|
This attempts to send to an existing MPV instance. If it fails, the calling
|
||||||
|
code should start a new MPV instance with the IPC pipe.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_url: URL or path to file to play
|
||||||
|
title: Display title for the file
|
||||||
|
headers: Optional HTTP headers (dict)
|
||||||
|
append: If True, append to playlist; if False, replace
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successfully sent to existing MPV, False if pipe unavailable.
|
||||||
|
"""
|
||||||
|
# Try to connect using the robust client
|
||||||
|
client = get_mpv_client()
|
||||||
|
if not client:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Command 1: Set headers if provided
|
||||||
|
if headers:
|
||||||
|
header_str = ",".join([f"{k}: {v}" for k, v in headers.items()])
|
||||||
|
cmd_headers = {
|
||||||
|
"command": ["set_property", "http-header-fields", header_str],
|
||||||
|
"request_id": 0
|
||||||
|
}
|
||||||
|
client.send_command(cmd_headers)
|
||||||
|
|
||||||
|
# Command 2: Load file
|
||||||
|
# Use memory:// M3U to preserve title in playlist if provided
|
||||||
|
# This is required for YouTube URLs and proper playlist display
|
||||||
|
if title:
|
||||||
|
# Sanitize title for M3U (remove newlines)
|
||||||
|
safe_title = title.replace("\n", " ").replace("\r", "")
|
||||||
|
# M3U format: #EXTM3U\n#EXTINF:-1,Title\nURL
|
||||||
|
m3u_content = f"#EXTM3U\n#EXTINF:-1,{safe_title}\n{file_url}\n"
|
||||||
|
target = f"memory://{m3u_content}"
|
||||||
|
else:
|
||||||
|
target = file_url
|
||||||
|
|
||||||
|
load_mode = "append-play" if append else "replace"
|
||||||
|
cmd_load = {
|
||||||
|
"command": ["loadfile", target, load_mode],
|
||||||
|
"request_id": 1
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = client.send_command(cmd_load)
|
||||||
|
if not resp or resp.get('error') != 'success':
|
||||||
|
debug(f"MPV loadfile failed: {resp}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Command 3: Set title (metadata for display) - still useful for window title
|
||||||
|
if title:
|
||||||
|
safe_title_prop = title.replace('"', '\\"')
|
||||||
|
cmd_title = {
|
||||||
|
"command": ["set_property", "force-media-title", safe_title_prop],
|
||||||
|
"request_id": 2
|
||||||
|
}
|
||||||
|
client.send_command(cmd_title)
|
||||||
|
|
||||||
|
debug(f"Sent to existing MPV: {title}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
debug(f"Error in send_to_mpv: {e}")
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
client.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_mpv_client(socket_path: Optional[str] = None) -> Optional[MPVIPCClient]:
|
||||||
|
"""Get an MPV IPC client, attempting to connect.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
socket_path: Custom socket path (uses default if None)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Connected MPVIPCClient or None if connection fails.
|
||||||
|
"""
|
||||||
|
client = MPVIPCClient(socket_path=socket_path)
|
||||||
|
if client.connect():
|
||||||
|
return client
|
||||||
|
return None
|
||||||
|
|
||||||
@@ -1660,7 +1660,7 @@ class FileProvider(ABC):
|
|||||||
self.name = self.__class__.__name__.replace("FileProvider", "").lower()
|
self.name = self.__class__.__name__.replace("FileProvider", "").lower()
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def upload(self, file_path: str) -> str:
|
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||||
"""Upload a file and return the URL."""
|
"""Upload a file and return the URL."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1677,7 +1677,7 @@ class ZeroXZeroFileProvider(FileProvider):
|
|||||||
self.name = "0x0"
|
self.name = "0x0"
|
||||||
self.base_url = "https://0x0.st"
|
self.base_url = "https://0x0.st"
|
||||||
|
|
||||||
def upload(self, file_path: str) -> str:
|
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||||
"""Upload file to 0x0.st."""
|
"""Upload file to 0x0.st."""
|
||||||
from helper.http_client import HTTPClient
|
from helper.http_client import HTTPClient
|
||||||
import os
|
import os
|
||||||
@@ -1707,9 +1707,137 @@ class ZeroXZeroFileProvider(FileProvider):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class MatrixFileProvider(FileProvider):
|
||||||
|
"""File provider for Matrix (Element) chat rooms."""
|
||||||
|
|
||||||
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||||
|
super().__init__(config)
|
||||||
|
self.name = "matrix"
|
||||||
|
|
||||||
|
def validate(self) -> bool:
|
||||||
|
"""Check if Matrix is configured."""
|
||||||
|
if not self.config: return False
|
||||||
|
matrix_conf = self.config.get('storage', {}).get('matrix', {})
|
||||||
|
return bool(matrix_conf.get('homeserver') and matrix_conf.get('room_id') and (matrix_conf.get('access_token') or matrix_conf.get('password')))
|
||||||
|
|
||||||
|
def upload(self, file_path: str, **kwargs: Any) -> str:
|
||||||
|
"""Upload file to Matrix room."""
|
||||||
|
import requests
|
||||||
|
import mimetypes
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
|
||||||
|
debug(f"[Matrix] Starting upload for: {file_path}")
|
||||||
|
debug(f"[Matrix] kwargs: {kwargs}")
|
||||||
|
|
||||||
|
path = Path(file_path)
|
||||||
|
if not path.exists():
|
||||||
|
raise FileNotFoundError(f"File not found: {file_path}")
|
||||||
|
|
||||||
|
matrix_conf = self.config.get('storage', {}).get('matrix', {})
|
||||||
|
homeserver = matrix_conf.get('homeserver')
|
||||||
|
access_token = matrix_conf.get('access_token')
|
||||||
|
room_id = matrix_conf.get('room_id')
|
||||||
|
|
||||||
|
if not homeserver.startswith('http'):
|
||||||
|
homeserver = f"https://{homeserver}"
|
||||||
|
|
||||||
|
# 1. Upload Media
|
||||||
|
# Use v3 API
|
||||||
|
upload_url = f"{homeserver}/_matrix/media/v3/upload"
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {access_token}",
|
||||||
|
"Content-Type": "application/octet-stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
mime_type, _ = mimetypes.guess_type(path)
|
||||||
|
if mime_type:
|
||||||
|
headers["Content-Type"] = mime_type
|
||||||
|
|
||||||
|
filename = path.name
|
||||||
|
|
||||||
|
debug(f"[Matrix] Uploading media to {upload_url} with mime_type: {mime_type}")
|
||||||
|
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
resp = requests.post(upload_url, headers=headers, data=f, params={"filename": filename})
|
||||||
|
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise Exception(f"Matrix upload failed: {resp.text}")
|
||||||
|
|
||||||
|
content_uri = resp.json().get('content_uri')
|
||||||
|
if not content_uri:
|
||||||
|
raise Exception("No content_uri returned from Matrix upload")
|
||||||
|
|
||||||
|
debug(f"[Matrix] Media uploaded, content_uri: {content_uri}")
|
||||||
|
|
||||||
|
# 2. Send Message
|
||||||
|
# Use v3 API
|
||||||
|
send_url = f"{homeserver}/_matrix/client/v3/rooms/{room_id}/send/m.room.message"
|
||||||
|
|
||||||
|
# Determine msgtype with better fallback for audio
|
||||||
|
msgtype = "m.file"
|
||||||
|
ext = path.suffix.lower()
|
||||||
|
|
||||||
|
# Explicit check for common audio extensions to force m.audio
|
||||||
|
# This prevents audio files being treated as generic files or video
|
||||||
|
AUDIO_EXTS = {'.mp3', '.flac', '.wav', '.m4a', '.aac', '.ogg', '.opus', '.wma', '.mka', '.alac'}
|
||||||
|
VIDEO_EXTS = {'.mp4', '.mkv', '.webm', '.mov', '.avi', '.flv', '.mpg', '.mpeg', '.ts', '.m4v', '.wmv'}
|
||||||
|
IMAGE_EXTS = {'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff'}
|
||||||
|
|
||||||
|
if ext in AUDIO_EXTS:
|
||||||
|
msgtype = "m.audio"
|
||||||
|
elif ext in VIDEO_EXTS:
|
||||||
|
msgtype = "m.video"
|
||||||
|
elif ext in IMAGE_EXTS:
|
||||||
|
msgtype = "m.image"
|
||||||
|
elif mime_type:
|
||||||
|
if mime_type.startswith("audio/"): msgtype = "m.audio"
|
||||||
|
elif mime_type.startswith("video/"): msgtype = "m.video"
|
||||||
|
elif mime_type.startswith("image/"): msgtype = "m.image"
|
||||||
|
|
||||||
|
debug(f"[Matrix] Determined msgtype: {msgtype} (ext: {ext}, mime: {mime_type})")
|
||||||
|
|
||||||
|
info = {
|
||||||
|
"mimetype": mime_type,
|
||||||
|
"size": path.stat().st_size
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try to get duration for audio/video
|
||||||
|
if msgtype in ("m.audio", "m.video"):
|
||||||
|
try:
|
||||||
|
# Try mutagen first (lightweight)
|
||||||
|
# Use dynamic import to avoid top-level dependency if not installed
|
||||||
|
# Note: mutagen.File is available at package level at runtime but type checkers might miss it
|
||||||
|
import mutagen # type: ignore
|
||||||
|
m = mutagen.File(str(path)) # type: ignore
|
||||||
|
if m and m.info and hasattr(m.info, 'length'):
|
||||||
|
duration_ms = int(m.info.length * 1000)
|
||||||
|
info['duration'] = duration_ms
|
||||||
|
debug(f"[Matrix] Extracted duration: {duration_ms}ms")
|
||||||
|
except Exception as e:
|
||||||
|
debug(f"[Matrix] Failed to extract duration: {e}")
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"msgtype": msgtype,
|
||||||
|
"body": filename,
|
||||||
|
"url": content_uri,
|
||||||
|
"info": info
|
||||||
|
}
|
||||||
|
|
||||||
|
debug(f"[Matrix] Sending message payload: {json.dumps(payload, indent=2)}")
|
||||||
|
|
||||||
|
resp = requests.post(send_url, headers=headers, json=payload)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise Exception(f"Matrix send message failed: {resp.text}")
|
||||||
|
|
||||||
|
event_id = resp.json().get('event_id')
|
||||||
|
return f"https://matrix.to/#/{room_id}/{event_id}"
|
||||||
|
|
||||||
|
|
||||||
# File provider registry
|
# File provider registry
|
||||||
_FILE_PROVIDERS = {
|
_FILE_PROVIDERS = {
|
||||||
"0x0": ZeroXZeroFileProvider,
|
"0x0": ZeroXZeroFileProvider,
|
||||||
|
"matrix": MatrixFileProvider,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ disables Hydrus features if the API is unavailable.
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from helper.logger import log
|
from helper.logger import log, debug
|
||||||
from typing import Tuple, Optional, Dict, Any
|
from typing import Tuple, Optional, Dict, Any
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@@ -28,6 +28,11 @@ _MPV_AVAILABLE: Optional[bool] = None
|
|||||||
_MPV_UNAVAILABLE_REASON: Optional[str] = None
|
_MPV_UNAVAILABLE_REASON: Optional[str] = None
|
||||||
_MPV_CHECK_COMPLETE = False
|
_MPV_CHECK_COMPLETE = False
|
||||||
|
|
||||||
|
# Global state for Matrix availability
|
||||||
|
_MATRIX_AVAILABLE: Optional[bool] = None
|
||||||
|
_MATRIX_UNAVAILABLE_REASON: Optional[str] = None
|
||||||
|
_MATRIX_CHECK_COMPLETE = False
|
||||||
|
|
||||||
|
|
||||||
def check_hydrus_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
def check_hydrus_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
||||||
"""Check if Hydrus API is available by pinging it.
|
"""Check if Hydrus API is available by pinging it.
|
||||||
@@ -80,20 +85,16 @@ def initialize_hydrus_health_check(config: Dict[str, Any]) -> None:
|
|||||||
_HYDRUS_CHECK_COMPLETE = True
|
_HYDRUS_CHECK_COMPLETE = True
|
||||||
|
|
||||||
if is_available:
|
if is_available:
|
||||||
log("✅ Hydrus: ENABLED - All Hydrus features available", file=sys.stderr)
|
debug("✅ Hydrus: ENABLED - All Hydrus features available", file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
log(f"⚠️ Hydrus: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
debug(f"⚠️ Hydrus: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
||||||
log("- Export functionality disabled", file=sys.stderr)
|
|
||||||
log("- Hydrus library features disabled", file=sys.stderr)
|
|
||||||
log("- Hydrus tag operations disabled", file=sys.stderr)
|
|
||||||
log("→ Local storage and All-Debrid features still available", file=sys.stderr)
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[Startup] Failed to initialize Hydrus health check: {e}", exc_info=True)
|
logger.error(f"[Startup] Failed to initialize Hydrus health check: {e}", exc_info=True)
|
||||||
_HYDRUS_AVAILABLE = False
|
_HYDRUS_AVAILABLE = False
|
||||||
_HYDRUS_UNAVAILABLE_REASON = str(e)
|
_HYDRUS_UNAVAILABLE_REASON = str(e)
|
||||||
_HYDRUS_CHECK_COMPLETE = True
|
_HYDRUS_CHECK_COMPLETE = True
|
||||||
log(f"⚠️ Hydrus: DISABLED - Error during health check: {e}", file=sys.stderr)
|
debug(f"⚠️ Hydrus: DISABLED - Error during health check: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def check_debrid_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
def check_debrid_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
||||||
@@ -176,13 +177,10 @@ def initialize_debrid_health_check(config: Dict[str, Any]) -> None:
|
|||||||
_DEBRID_CHECK_COMPLETE = True
|
_DEBRID_CHECK_COMPLETE = True
|
||||||
|
|
||||||
if is_available:
|
if is_available:
|
||||||
log("✅ Debrid: ENABLED - All Debrid features available", file=sys.stderr)
|
debug("✅ Debrid: ENABLED - All Debrid features available", file=sys.stderr)
|
||||||
logger.info("[Startup] Debrid health check PASSED")
|
logger.info("[Startup] Debrid health check PASSED")
|
||||||
else:
|
else:
|
||||||
log(f"⚠️ Debrid: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
debug(f"⚠️ Debrid: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
||||||
log("- Debrid export disabled", file=sys.stderr)
|
|
||||||
log("- Debrid library features disabled", file=sys.stderr)
|
|
||||||
log("→ Local storage and Hydrus features still available", file=sys.stderr)
|
|
||||||
logger.warning(f"[Startup] Debrid health check FAILED: {reason}")
|
logger.warning(f"[Startup] Debrid health check FAILED: {reason}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -190,7 +188,7 @@ def initialize_debrid_health_check(config: Dict[str, Any]) -> None:
|
|||||||
_DEBRID_AVAILABLE = False
|
_DEBRID_AVAILABLE = False
|
||||||
_DEBRID_UNAVAILABLE_REASON = str(e)
|
_DEBRID_UNAVAILABLE_REASON = str(e)
|
||||||
_DEBRID_CHECK_COMPLETE = True
|
_DEBRID_CHECK_COMPLETE = True
|
||||||
log(f"⚠️ Debrid: DISABLED - Error during health check: {e}", file=sys.stderr)
|
debug(f"⚠️ Debrid: DISABLED - Error during health check: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def check_mpv_availability() -> Tuple[bool, Optional[str]]:
|
def check_mpv_availability() -> Tuple[bool, Optional[str]]:
|
||||||
@@ -263,11 +261,11 @@ def initialize_mpv_health_check() -> None:
|
|||||||
_MPV_CHECK_COMPLETE = True
|
_MPV_CHECK_COMPLETE = True
|
||||||
|
|
||||||
if is_available:
|
if is_available:
|
||||||
log("✅ MPV: ENABLED - All MPV features available", file=sys.stderr)
|
debug("✅ MPV: ENABLED - All MPV features available", file=sys.stderr)
|
||||||
logger.info("[Startup] MPV health check PASSED")
|
logger.info("[Startup] MPV health check PASSED")
|
||||||
else:
|
else:
|
||||||
log(f"⚠️ MPV: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
debug(f"⚠️ MPV: DISABLED - {reason or 'Connection failed'}", file=sys.stderr)
|
||||||
log("→ Hydrus features still available", file=sys.stderr)
|
debug("→ Hydrus features still available", file=sys.stderr)
|
||||||
logger.warning(f"[Startup] MPV health check FAILED: {reason}")
|
logger.warning(f"[Startup] MPV health check FAILED: {reason}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -275,7 +273,77 @@ def initialize_mpv_health_check() -> None:
|
|||||||
_MPV_AVAILABLE = False
|
_MPV_AVAILABLE = False
|
||||||
_MPV_UNAVAILABLE_REASON = str(e)
|
_MPV_UNAVAILABLE_REASON = str(e)
|
||||||
_MPV_CHECK_COMPLETE = True
|
_MPV_CHECK_COMPLETE = True
|
||||||
log(f"⚠️ MPV: DISABLED - Error during health check: {e}", file=sys.stderr)
|
debug(f"⚠️ MPV: DISABLED - Error during health check: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def check_matrix_availability(config: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
|
||||||
|
"""Check if Matrix homeserver is reachable and credentials are valid.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Application configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (is_available: bool, reason: Optional[str])
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
import requests
|
||||||
|
matrix_conf = config.get('storage', {}).get('matrix', {})
|
||||||
|
homeserver = matrix_conf.get('homeserver')
|
||||||
|
access_token = matrix_conf.get('access_token')
|
||||||
|
|
||||||
|
if not homeserver:
|
||||||
|
return False, "Not configured"
|
||||||
|
|
||||||
|
if not homeserver.startswith('http'):
|
||||||
|
homeserver = f"https://{homeserver}"
|
||||||
|
|
||||||
|
# Check versions endpoint (no auth required)
|
||||||
|
try:
|
||||||
|
resp = requests.get(f"{homeserver}/_matrix/client/versions", timeout=5)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return False, f"Homeserver returned {resp.status_code}"
|
||||||
|
except Exception as e:
|
||||||
|
return False, f"Homeserver unreachable: {e}"
|
||||||
|
|
||||||
|
# Check auth if token provided (whoami)
|
||||||
|
if access_token:
|
||||||
|
try:
|
||||||
|
headers = {"Authorization": f"Bearer {access_token}"}
|
||||||
|
resp = requests.get(f"{homeserver}/_matrix/client/v3/account/whoami", headers=headers, timeout=5)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return False, f"Authentication failed: {resp.status_code}"
|
||||||
|
except Exception as e:
|
||||||
|
return False, f"Auth check failed: {e}"
|
||||||
|
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return False, str(e)
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_matrix_health_check(config: Dict[str, Any]) -> None:
|
||||||
|
"""Initialize Matrix health check at startup."""
|
||||||
|
global _MATRIX_AVAILABLE, _MATRIX_UNAVAILABLE_REASON, _MATRIX_CHECK_COMPLETE
|
||||||
|
|
||||||
|
logger.info("[Startup] Starting Matrix health check...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
is_available, reason = check_matrix_availability(config)
|
||||||
|
_MATRIX_AVAILABLE = is_available
|
||||||
|
_MATRIX_UNAVAILABLE_REASON = reason
|
||||||
|
_MATRIX_CHECK_COMPLETE = True
|
||||||
|
|
||||||
|
if is_available:
|
||||||
|
debug("Matrix: ENABLED - Homeserver reachable", file=sys.stderr)
|
||||||
|
else:
|
||||||
|
if reason != "Not configured":
|
||||||
|
debug(f"Matrix: DISABLED - {reason}", file=sys.stderr)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"[Startup] Failed to initialize Matrix health check: {e}", exc_info=True)
|
||||||
|
_MATRIX_AVAILABLE = False
|
||||||
|
_MATRIX_UNAVAILABLE_REASON = str(e)
|
||||||
|
_MATRIX_CHECK_COMPLETE = True
|
||||||
|
|
||||||
|
|
||||||
def is_hydrus_available() -> bool:
|
def is_hydrus_available() -> bool:
|
||||||
@@ -423,3 +491,52 @@ def enable_mpv_features() -> None:
|
|||||||
_MPV_AVAILABLE = True
|
_MPV_AVAILABLE = True
|
||||||
_MPV_UNAVAILABLE_REASON = None
|
_MPV_UNAVAILABLE_REASON = None
|
||||||
logger.info("[MPV] Features manually enabled")
|
logger.info("[MPV] Features manually enabled")
|
||||||
|
|
||||||
|
|
||||||
|
def is_matrix_available() -> bool:
|
||||||
|
"""Check if Matrix is available (from cached health check).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if Matrix is available, False otherwise
|
||||||
|
"""
|
||||||
|
return _MATRIX_AVAILABLE is True
|
||||||
|
|
||||||
|
|
||||||
|
def get_matrix_unavailable_reason() -> Optional[str]:
|
||||||
|
"""Get the reason why Matrix is unavailable.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
String explaining why Matrix is unavailable, or None if available
|
||||||
|
"""
|
||||||
|
return _MATRIX_UNAVAILABLE_REASON if not is_matrix_available() else None
|
||||||
|
|
||||||
|
|
||||||
|
def is_matrix_check_complete() -> bool:
|
||||||
|
"""Check if the Matrix health check has been completed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if health check has run, False if still pending
|
||||||
|
"""
|
||||||
|
return _MATRIX_CHECK_COMPLETE
|
||||||
|
|
||||||
|
|
||||||
|
def disable_matrix_features() -> None:
|
||||||
|
"""Manually disable all Matrix features (for testing/fallback).
|
||||||
|
|
||||||
|
This can be called if Matrix connectivity is lost after startup.
|
||||||
|
"""
|
||||||
|
global _MATRIX_AVAILABLE, _MATRIX_UNAVAILABLE_REASON
|
||||||
|
_MATRIX_AVAILABLE = False
|
||||||
|
_MATRIX_UNAVAILABLE_REASON = "Manually disabled or lost connection"
|
||||||
|
logger.warning("[Matrix] Features manually disabled")
|
||||||
|
|
||||||
|
|
||||||
|
def enable_matrix_features() -> None:
|
||||||
|
"""Manually enable Matrix features (for testing/fallback).
|
||||||
|
|
||||||
|
This can be called if Matrix connectivity is restored after startup.
|
||||||
|
"""
|
||||||
|
global _MATRIX_AVAILABLE, _MATRIX_UNAVAILABLE_REASON
|
||||||
|
_MATRIX_AVAILABLE = True
|
||||||
|
_MATRIX_UNAVAILABLE_REASON = None
|
||||||
|
logger.info("[Matrix] Features manually enabled")
|
||||||
|
|||||||
92
metadata.py
92
metadata.py
@@ -5,7 +5,7 @@ import sys
|
|||||||
import shutil
|
import shutil
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import requests
|
import requests
|
||||||
from helper.logger import log
|
from helper.logger import log, debug
|
||||||
from urllib.parse import urlsplit, urlunsplit, unquote
|
from urllib.parse import urlsplit, urlunsplit, unquote
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -1312,7 +1312,7 @@ def _read_sidecar_metadata(sidecar_path: Path) -> tuple[Optional[str], List[str]
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rename_by_metadata(file_path: Path, tags: Iterable[str]) -> Optional[Path]:
|
def rename(file_path: Path, tags: Iterable[str]) -> Optional[Path]:
|
||||||
"""Rename a file based on title: tag in the tags list.
|
"""Rename a file based on title: tag in the tags list.
|
||||||
|
|
||||||
If a title: tag is present, renames the file and any .tags/.metadata sidecars.
|
If a title: tag is present, renames the file and any .tags/.metadata sidecars.
|
||||||
@@ -1350,13 +1350,13 @@ def rename_by_metadata(file_path: Path, tags: Iterable[str]) -> Optional[Path]:
|
|||||||
if new_path.exists():
|
if new_path.exists():
|
||||||
try:
|
try:
|
||||||
new_path.unlink()
|
new_path.unlink()
|
||||||
log(f"[rename_by_metadata] Replaced existing file: {new_name}", file=sys.stderr)
|
debug(f"Replaced existing file: {new_name}", file=sys.stderr)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"[rename_by_metadata] Warning: Could not replace target file {new_name}: {e}", file=sys.stderr)
|
debug(f"Warning: Could not replace target file {new_name}: {e}", file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
file_path.rename(new_path)
|
file_path.rename(new_path)
|
||||||
log(f"[rename_by_metadata] Renamed file: {old_name} → {new_name}", file=sys.stderr)
|
debug(f"Renamed file: {old_name} → {new_name}", file=sys.stderr)
|
||||||
|
|
||||||
# Rename the .tags sidecar if it exists
|
# Rename the .tags sidecar if it exists
|
||||||
old_tags_path = file_path.parent / (old_name + '.tags')
|
old_tags_path = file_path.parent / (old_name + '.tags')
|
||||||
@@ -1369,21 +1369,21 @@ def rename_by_metadata(file_path: Path, tags: Iterable[str]) -> Optional[Path]:
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
old_tags_path.rename(new_tags_path)
|
old_tags_path.rename(new_tags_path)
|
||||||
log(f"[rename_by_metadata] Renamed sidecar: {old_tags_path.name} → {new_tags_path.name}", file=sys.stderr)
|
debug(f"Renamed sidecar: {old_tags_path.name} → {new_tags_path.name}", file=sys.stderr)
|
||||||
|
|
||||||
# Rename the .metadata sidecar if it exists
|
# Rename the .metadata sidecar if it exists
|
||||||
old_metadata_path = file_path.parent / (old_name + '.metadata')
|
old_metadata_path = file_path.parent / (old_name + '.metadata')
|
||||||
if old_metadata_path.exists():
|
if old_metadata_path.exists():
|
||||||
new_metadata_path = file_path.parent / (new_name + '.metadata')
|
new_metadata_path = file_path.parent / (new_name + '.metadata')
|
||||||
if new_metadata_path.exists():
|
if new_metadata_path.exists():
|
||||||
log(f"[rename_by_metadata] Warning: Target metadata already exists: {new_metadata_path.name}", file=sys.stderr)
|
debug(f"Warning: Target metadata already exists: {new_metadata_path.name}", file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
old_metadata_path.rename(new_metadata_path)
|
old_metadata_path.rename(new_metadata_path)
|
||||||
log(f"[rename_by_metadata] Renamed metadata: {old_metadata_path.name} → {new_metadata_path.name}", file=sys.stderr)
|
debug(f"Renamed metadata: {old_metadata_path.name} → {new_metadata_path.name}", file=sys.stderr)
|
||||||
|
|
||||||
return new_path
|
return new_path
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
log(f"[rename_by_metadata] Warning: Failed to rename file: {exc}", file=sys.stderr)
|
debug(f"Warning: Failed to rename file: {exc}", file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -1419,10 +1419,10 @@ def write_tags(media_path: Path, tags: Iterable[str], known_urls: Iterable[str],
|
|||||||
|
|
||||||
if db_tags:
|
if db_tags:
|
||||||
db.add_tags(media_path, db_tags)
|
db.add_tags(media_path, db_tags)
|
||||||
log(f"Added tags to database for {media_path.name}")
|
debug(f"Added tags to database for {media_path.name}")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Failed to add tags to database: {e}", file=sys.stderr)
|
debug(f"Failed to add tags to database: {e}", file=sys.stderr)
|
||||||
# Fall through to sidecar creation as fallback
|
# Fall through to sidecar creation as fallback
|
||||||
|
|
||||||
# Create sidecar path
|
# Create sidecar path
|
||||||
@@ -1449,7 +1449,7 @@ def write_tags(media_path: Path, tags: Iterable[str], known_urls: Iterable[str],
|
|||||||
|
|
||||||
if lines:
|
if lines:
|
||||||
sidecar.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
sidecar.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||||
log(f"Wrote tags to {sidecar}")
|
debug(f"Tags: {sidecar}")
|
||||||
# Clean up legacy files
|
# Clean up legacy files
|
||||||
for legacy_path in [media_path.with_name(media_path.name + '.tags'),
|
for legacy_path in [media_path.with_name(media_path.name + '.tags'),
|
||||||
media_path.with_name(media_path.name + '.tags.txt')]:
|
media_path.with_name(media_path.name + '.tags.txt')]:
|
||||||
@@ -1464,7 +1464,7 @@ def write_tags(media_path: Path, tags: Iterable[str], known_urls: Iterable[str],
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
log(f"Failed to write tag sidecar {sidecar}: {exc}", file=sys.stderr)
|
debug(f"Failed to write tag sidecar {sidecar}: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def write_metadata(media_path: Path, hash_value: Optional[str] = None, known_urls: Optional[Iterable[str]] = None, relationships: Optional[Iterable[str]] = None, db=None) -> None:
|
def write_metadata(media_path: Path, hash_value: Optional[str] = None, known_urls: Optional[Iterable[str]] = None, relationships: Optional[Iterable[str]] = None, db=None) -> None:
|
||||||
@@ -1503,10 +1503,10 @@ def write_metadata(media_path: Path, hash_value: Optional[str] = None, known_url
|
|||||||
|
|
||||||
if db_tags:
|
if db_tags:
|
||||||
db.add_tags(media_path, db_tags)
|
db.add_tags(media_path, db_tags)
|
||||||
log(f"Added metadata to database for {media_path.name}")
|
debug(f"Added metadata to database for {media_path.name}")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"Failed to add metadata to database: {e}", file=sys.stderr)
|
debug(f"Failed to add metadata to database: {e}", file=sys.stderr)
|
||||||
# Fall through to sidecar creation as fallback
|
# Fall through to sidecar creation as fallback
|
||||||
|
|
||||||
# Create sidecar path
|
# Create sidecar path
|
||||||
@@ -1535,7 +1535,7 @@ def write_metadata(media_path: Path, hash_value: Optional[str] = None, known_url
|
|||||||
# Write metadata file
|
# Write metadata file
|
||||||
if lines:
|
if lines:
|
||||||
sidecar.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
sidecar.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||||
log(f"Wrote metadata to {sidecar}")
|
debug(f"Wrote metadata to {sidecar}")
|
||||||
else:
|
else:
|
||||||
# Remove if no content
|
# Remove if no content
|
||||||
try:
|
try:
|
||||||
@@ -1543,7 +1543,7 @@ def write_metadata(media_path: Path, hash_value: Optional[str] = None, known_url
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
log(f"Failed to write metadata sidecar {sidecar}: {exc}", file=sys.stderr)
|
debug(f"Failed to write metadata sidecar {sidecar}: {exc}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def extract_title(tags: Iterable[str]) -> Optional[str]:
|
def extract_title(tags: Iterable[str]) -> Optional[str]:
|
||||||
@@ -1892,7 +1892,7 @@ def extract_ytdlp_tags(entry: Dict[str, Any]) -> List[str]:
|
|||||||
Example:
|
Example:
|
||||||
>>> entry = {'artist': 'The Beatles', 'album': 'Abbey Road', 'duration': 5247}
|
>>> entry = {'artist': 'The Beatles', 'album': 'Abbey Road', 'duration': 5247}
|
||||||
>>> tags = extract_ytdlp_tags(entry)
|
>>> tags = extract_ytdlp_tags(entry)
|
||||||
>>> log(tags)
|
>>> debug(tags)
|
||||||
['artist:The Beatles', 'album:Abbey Road']
|
['artist:The Beatles', 'album:Abbey Road']
|
||||||
"""
|
"""
|
||||||
tags: List[str] = []
|
tags: List[str] = []
|
||||||
@@ -1986,7 +1986,7 @@ def dedup_tags_by_namespace(tags: List[str], keep_first: bool = True) -> List[st
|
|||||||
... 'album:Abbey Road', 'artist:Beatles'
|
... 'album:Abbey Road', 'artist:Beatles'
|
||||||
... ]
|
... ]
|
||||||
>>> dedup = dedup_tags_by_namespace(tags)
|
>>> dedup = dedup_tags_by_namespace(tags)
|
||||||
>>> log(dedup)
|
>>> debug(dedup)
|
||||||
['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
||||||
"""
|
"""
|
||||||
if not tags:
|
if not tags:
|
||||||
@@ -2053,7 +2053,7 @@ def merge_multiple_tag_lists(
|
|||||||
>>> list1 = ['artist:Beatles', 'album:Abbey Road']
|
>>> list1 = ['artist:Beatles', 'album:Abbey Road']
|
||||||
>>> list2 = ['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
>>> list2 = ['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
||||||
>>> merged = merge_multiple_tag_lists([list1, list2])
|
>>> merged = merge_multiple_tag_lists([list1, list2])
|
||||||
>>> log(merged)
|
>>> debug(merged)
|
||||||
['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
['artist:Beatles', 'album:Abbey Road', 'tag:rock']
|
||||||
"""
|
"""
|
||||||
if not sources:
|
if not sources:
|
||||||
@@ -2137,7 +2137,7 @@ def read_tags_from_file(file_path: Path) -> List[str]:
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> tags = read_tags_from_file(Path('file.txt.tags'))
|
>>> tags = read_tags_from_file(Path('file.txt.tags'))
|
||||||
>>> log(tags)
|
>>> debug(tags)
|
||||||
['artist:Beatles', 'album:Abbey Road']
|
['artist:Beatles', 'album:Abbey Road']
|
||||||
"""
|
"""
|
||||||
file_path = Path(file_path)
|
file_path = Path(file_path)
|
||||||
@@ -2271,7 +2271,7 @@ def embed_metadata_in_file(
|
|||||||
# Check if FFmpeg is available
|
# Check if FFmpeg is available
|
||||||
ffmpeg_path = shutil.which('ffmpeg')
|
ffmpeg_path = shutil.which('ffmpeg')
|
||||||
if not ffmpeg_path:
|
if not ffmpeg_path:
|
||||||
log(f"⚠️ FFmpeg not found; cannot embed metadata in {file_path.name}", file=sys.stderr)
|
debug(f"⚠️ FFmpeg not found; cannot embed metadata in {file_path.name}", file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Create temporary file for output
|
# Create temporary file for output
|
||||||
@@ -2294,18 +2294,18 @@ def embed_metadata_in_file(
|
|||||||
# Replace original with temp file
|
# Replace original with temp file
|
||||||
file_path.unlink()
|
file_path.unlink()
|
||||||
temp_file.rename(file_path)
|
temp_file.rename(file_path)
|
||||||
log(f"✅ Embedded metadata in file: {file_path.name}", file=sys.stderr)
|
debug(f"✅ Embedded metadata in file: {file_path.name}", file=sys.stderr)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# Clean up temp file if it exists
|
# Clean up temp file if it exists
|
||||||
if temp_file.exists():
|
if temp_file.exists():
|
||||||
temp_file.unlink()
|
temp_file.unlink()
|
||||||
log(f"❌ FFmpeg metadata embedding failed for {file_path.name}", file=sys.stderr)
|
debug(f"❌ FFmpeg metadata embedding failed for {file_path.name}", file=sys.stderr)
|
||||||
if result.stderr:
|
if result.stderr:
|
||||||
# Safely decode stderr, ignoring invalid UTF-8 bytes
|
# Safely decode stderr, ignoring invalid UTF-8 bytes
|
||||||
try:
|
try:
|
||||||
stderr_text = result.stderr.decode('utf-8', errors='replace')[:200]
|
stderr_text = result.stderr.decode('utf-8', errors='replace')[:200]
|
||||||
log(f"FFmpeg stderr: {stderr_text}", file=sys.stderr)
|
debug(f"FFmpeg stderr: {stderr_text}", file=sys.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
@@ -2315,7 +2315,7 @@ def embed_metadata_in_file(
|
|||||||
temp_file.unlink()
|
temp_file.unlink()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
log(f"❌ Error embedding metadata: {exc}", file=sys.stderr)
|
debug(f"❌ Error embedding metadata: {exc}", file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@@ -2402,7 +2402,7 @@ def normalize_tags_from_source(
|
|||||||
Example:
|
Example:
|
||||||
>>> entry = {'artist': 'Beatles', 'album': 'Abbey Road'}
|
>>> entry = {'artist': 'Beatles', 'album': 'Abbey Road'}
|
||||||
>>> tags = normalize_tags_from_source(entry, 'ytdlp')
|
>>> tags = normalize_tags_from_source(entry, 'ytdlp')
|
||||||
>>> log(tags)
|
>>> debug(tags)
|
||||||
['artist:Beatles', 'album:Abbey Road']
|
['artist:Beatles', 'album:Abbey Road']
|
||||||
"""
|
"""
|
||||||
if source_type == 'auto':
|
if source_type == 'auto':
|
||||||
@@ -2600,10 +2600,10 @@ def imdb(imdb_id: str = typer.Argument(..., help="IMDb identifier (ttXXXXXXX)"))
|
|||||||
"""Lookup an IMDb title."""
|
"""Lookup an IMDb title."""
|
||||||
try:
|
try:
|
||||||
result = imdb_tag(imdb_id)
|
result = imdb_tag(imdb_id)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(help="Lookup a MusicBrainz entity")
|
@app.command(help="Lookup a MusicBrainz entity")
|
||||||
@@ -2614,10 +2614,10 @@ def musicbrainz(
|
|||||||
"""Lookup a MusicBrainz entity."""
|
"""Lookup a MusicBrainz entity."""
|
||||||
try:
|
try:
|
||||||
result = fetch_musicbrainz_tags(mbid, entity)
|
result = fetch_musicbrainz_tags(mbid, entity)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="remote-tags", help="Normalize a remote metadata payload")
|
@app.command(name="remote-tags", help="Normalize a remote metadata payload")
|
||||||
@@ -2633,10 +2633,10 @@ def remote_tags(payload: Optional[str] = typer.Option(None, "--payload", help="J
|
|||||||
if context and not isinstance(context, dict):
|
if context and not isinstance(context, dict):
|
||||||
raise ValueError("context must be an object")
|
raise ValueError("context must be an object")
|
||||||
result = build_remote_bundle(metadata, existing, context)
|
result = build_remote_bundle(metadata, existing, context)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="remote-fetch", help="Resolve remote metadata bundle")
|
@app.command(name="remote-fetch", help="Resolve remote metadata bundle")
|
||||||
@@ -2645,10 +2645,10 @@ def remote_fetch(payload: Optional[str] = typer.Option(None, "--payload", help="
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = resolve_remote_metadata(payload_data)
|
result = resolve_remote_metadata(payload_data)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="expand-tag", help="Expand metadata references into tags")
|
@app.command(name="expand-tag", help="Expand metadata references into tags")
|
||||||
@@ -2657,10 +2657,10 @@ def expand_tag(payload: Optional[str] = typer.Option(None, "--payload", help="JS
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = expand_metadata_tag(payload_data)
|
result = expand_metadata_tag(payload_data)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="hydrus-fetch", help="Fetch Hydrus metadata for a file")
|
@app.command(name="hydrus-fetch", help="Fetch Hydrus metadata for a file")
|
||||||
@@ -2669,10 +2669,10 @@ def hydrus_fetch(payload: Optional[str] = typer.Option(None, "--payload", help="
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = fetch_hydrus_metadata(payload_data)
|
result = fetch_hydrus_metadata(payload_data)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="hydrus-fetch-url", help="Fetch Hydrus metadata using a source URL")
|
@app.command(name="hydrus-fetch-url", help="Fetch Hydrus metadata using a source URL")
|
||||||
@@ -2681,10 +2681,10 @@ def hydrus_fetch_url(payload: Optional[str] = typer.Option(None, "--payload", he
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = fetch_hydrus_metadata_by_url(payload_data)
|
result = fetch_hydrus_metadata_by_url(payload_data)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="sync-sidecar", help="Synchronise .tags sidecar with supplied data")
|
@app.command(name="sync-sidecar", help="Synchronise .tags sidecar with supplied data")
|
||||||
@@ -2693,10 +2693,10 @@ def sync_sidecar_cmd(payload: Optional[str] = typer.Option(None, "--payload", he
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = sync_sidecar(payload_data)
|
result = sync_sidecar(payload_data)
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
@app.command(name="update-tag", help="Update or rename a tag")
|
@app.command(name="update-tag", help="Update or rename a tag")
|
||||||
@@ -2705,10 +2705,10 @@ def update_tag_cmd(payload: Optional[str] = typer.Option(None, "--payload", help
|
|||||||
try:
|
try:
|
||||||
payload_data = _load_payload(payload)
|
payload_data = _load_payload(payload)
|
||||||
result = apply_tag_mutation(payload_data, 'update')
|
result = apply_tag_mutation(payload_data, 'update')
|
||||||
log(json.dumps(result, ensure_ascii=False), flush=True)
|
debug(json.dumps(result, ensure_ascii=False), flush=True)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
error_payload = {"error": str(exc)}
|
error_payload = {"error": str(exc)}
|
||||||
log(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
debug(json.dumps(error_payload, ensure_ascii=False), flush=True)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
def main(argv: Optional[List[str]] = None) -> int:
|
def main(argv: Optional[List[str]] = None) -> int:
|
||||||
@@ -3102,7 +3102,7 @@ def fetch_openlibrary_metadata_tags(isbn: Optional[str] = None, olid: Optional[s
|
|||||||
metadata_tags.append(subject_clean)
|
metadata_tags.append(subject_clean)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log(f"⚠ Failed to fetch OpenLibrary metadata: {e}")
|
debug(f"⚠ Failed to fetch OpenLibrary metadata: {e}")
|
||||||
|
|
||||||
return metadata_tags
|
return metadata_tags
|
||||||
|
|
||||||
|
|||||||
189
result_table.py
189
result_table.py
@@ -251,8 +251,22 @@ class ResultTable:
|
|||||||
def _add_search_result(self, row: ResultRow, result: Any) -> None:
|
def _add_search_result(self, row: ResultRow, result: Any) -> None:
|
||||||
"""Extract and add SearchResult fields to row."""
|
"""Extract and add SearchResult fields to row."""
|
||||||
# Core fields
|
# Core fields
|
||||||
if hasattr(result, 'title') and result.title:
|
title = getattr(result, 'title', '')
|
||||||
row.add_column("Title", result.title)
|
origin = getattr(result, 'origin', '').lower()
|
||||||
|
|
||||||
|
# Handle extension separation for local files
|
||||||
|
extension = ""
|
||||||
|
if title and origin == 'local':
|
||||||
|
path_obj = Path(title)
|
||||||
|
if path_obj.suffix:
|
||||||
|
extension = path_obj.suffix.lstrip('.')
|
||||||
|
title = path_obj.stem
|
||||||
|
|
||||||
|
if title:
|
||||||
|
row.add_column("Title", title)
|
||||||
|
|
||||||
|
# Extension column
|
||||||
|
row.add_column("Ext", extension)
|
||||||
|
|
||||||
if hasattr(result, 'origin') and result.origin:
|
if hasattr(result, 'origin') and result.origin:
|
||||||
row.add_column("Source", result.origin)
|
row.add_column("Source", result.origin)
|
||||||
@@ -263,18 +277,6 @@ class ResultTable:
|
|||||||
if hasattr(result, 'media_kind') and result.media_kind:
|
if hasattr(result, 'media_kind') and result.media_kind:
|
||||||
row.add_column("Type", result.media_kind)
|
row.add_column("Type", result.media_kind)
|
||||||
|
|
||||||
# Target (file path or URL)
|
|
||||||
if hasattr(result, 'target') and result.target:
|
|
||||||
# Truncate long paths for display
|
|
||||||
target_str = str(result.target)
|
|
||||||
if len(target_str) > 60:
|
|
||||||
target_str = "..." + target_str[-57:]
|
|
||||||
row.add_column("Target", target_str)
|
|
||||||
|
|
||||||
# Hash
|
|
||||||
if hasattr(result, 'hash_hex') and result.hash_hex:
|
|
||||||
row.add_column("Hash", result.hash_hex[:16] + "...") # First 16 chars
|
|
||||||
|
|
||||||
# Tags summary
|
# Tags summary
|
||||||
if hasattr(result, 'tag_summary') and result.tag_summary:
|
if hasattr(result, 'tag_summary') and result.tag_summary:
|
||||||
tags_str = str(result.tag_summary)
|
tags_str = str(result.tag_summary)
|
||||||
@@ -305,6 +307,7 @@ class ResultTable:
|
|||||||
|
|
||||||
Shows only essential columns:
|
Shows only essential columns:
|
||||||
- Title (required)
|
- Title (required)
|
||||||
|
- Ext (extension)
|
||||||
- Origin (source backend)
|
- Origin (source backend)
|
||||||
- Size (formatted MB, integer only)
|
- Size (formatted MB, integer only)
|
||||||
|
|
||||||
@@ -313,9 +316,23 @@ class ResultTable:
|
|||||||
"""
|
"""
|
||||||
# Title (required - use origin as fallback)
|
# Title (required - use origin as fallback)
|
||||||
title = getattr(item, 'title', None) or getattr(item, 'origin', 'Unknown')
|
title = getattr(item, 'title', None) or getattr(item, 'origin', 'Unknown')
|
||||||
|
origin = getattr(item, 'origin', '').lower()
|
||||||
|
|
||||||
|
# Handle extension separation for local files
|
||||||
|
extension = ""
|
||||||
|
if title and origin == 'local':
|
||||||
|
# Try to split extension
|
||||||
|
path_obj = Path(title)
|
||||||
|
if path_obj.suffix:
|
||||||
|
extension = path_obj.suffix.lstrip('.')
|
||||||
|
title = path_obj.stem
|
||||||
|
|
||||||
if title:
|
if title:
|
||||||
row.add_column("Title", title[:90] + ("..." if len(title) > 90 else ""))
|
row.add_column("Title", title[:90] + ("..." if len(title) > 90 else ""))
|
||||||
|
|
||||||
|
# Extension column - always add to maintain column order
|
||||||
|
row.add_column("Ext", extension)
|
||||||
|
|
||||||
# Storage (source backend - hydrus, local, debrid, etc)
|
# Storage (source backend - hydrus, local, debrid, etc)
|
||||||
if hasattr(item, 'origin') and item.origin:
|
if hasattr(item, 'origin') and item.origin:
|
||||||
row.add_column("Storage", item.origin)
|
row.add_column("Storage", item.origin)
|
||||||
@@ -364,9 +381,6 @@ class ResultTable:
|
|||||||
file_str = "..." + file_str[-57:]
|
file_str = "..." + file_str[-57:]
|
||||||
row.add_column("Path", file_str)
|
row.add_column("Path", file_str)
|
||||||
|
|
||||||
if hasattr(obj, 'file_hash') and obj.file_hash:
|
|
||||||
row.add_column("Hash", obj.file_hash[:16] + "...")
|
|
||||||
|
|
||||||
# Tags
|
# Tags
|
||||||
if hasattr(obj, 'tags') and obj.tags:
|
if hasattr(obj, 'tags') and obj.tags:
|
||||||
tags_str = ", ".join(obj.tags[:3]) # First 3 tags
|
tags_str = ", ".join(obj.tags[:3]) # First 3 tags
|
||||||
@@ -406,7 +420,10 @@ class ResultTable:
|
|||||||
# Helper to determine if a field should be hidden from display
|
# Helper to determine if a field should be hidden from display
|
||||||
def is_hidden_field(field_name: Any) -> bool:
|
def is_hidden_field(field_name: Any) -> bool:
|
||||||
# Hide internal/metadata fields
|
# Hide internal/metadata fields
|
||||||
hidden_fields = {'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra'}
|
hidden_fields = {
|
||||||
|
'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra',
|
||||||
|
'target', 'hash', 'hash_hex', 'file_hash'
|
||||||
|
}
|
||||||
if isinstance(field_name, str):
|
if isinstance(field_name, str):
|
||||||
if field_name.startswith('__'):
|
if field_name.startswith('__'):
|
||||||
return True
|
return True
|
||||||
@@ -417,6 +434,30 @@ class ResultTable:
|
|||||||
# Strip out hidden metadata fields (prefixed with __)
|
# Strip out hidden metadata fields (prefixed with __)
|
||||||
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
||||||
|
|
||||||
|
# Handle extension separation for local files
|
||||||
|
origin = str(visible_data.get('origin', '') or visible_data.get('source', '')).lower()
|
||||||
|
|
||||||
|
# Debug logging
|
||||||
|
# print(f"DEBUG: Processing dict result. Origin: {origin}, Keys: {list(visible_data.keys())}")
|
||||||
|
|
||||||
|
if origin == 'local':
|
||||||
|
# Find title field
|
||||||
|
title_field = next((f for f in ['title', 'name', 'filename'] if f in visible_data), None)
|
||||||
|
if title_field:
|
||||||
|
title_val = str(visible_data[title_field])
|
||||||
|
path_obj = Path(title_val)
|
||||||
|
if path_obj.suffix:
|
||||||
|
extension = path_obj.suffix.lstrip('.')
|
||||||
|
visible_data[title_field] = path_obj.stem
|
||||||
|
visible_data['ext'] = extension
|
||||||
|
# print(f"DEBUG: Split extension. Title: {visible_data[title_field]}, Ext: {extension}")
|
||||||
|
else:
|
||||||
|
visible_data['ext'] = ""
|
||||||
|
|
||||||
|
# Ensure 'ext' is present so it gets picked up by priority_groups in correct order
|
||||||
|
if 'ext' not in visible_data:
|
||||||
|
visible_data['ext'] = ""
|
||||||
|
|
||||||
# Track which fields we've already added to avoid duplicates
|
# Track which fields we've already added to avoid duplicates
|
||||||
added_fields = set()
|
added_fields = set()
|
||||||
column_count = 0 # Track total columns added
|
column_count = 0 # Track total columns added
|
||||||
@@ -467,10 +508,9 @@ class ResultTable:
|
|||||||
# Priority field groups - uses first matching field in each group
|
# Priority field groups - uses first matching field in each group
|
||||||
priority_groups = [
|
priority_groups = [
|
||||||
('title | name | filename', ['title', 'name', 'filename']),
|
('title | name | filename', ['title', 'name', 'filename']),
|
||||||
('origin | source', ['origin', 'source']),
|
('ext', ['ext']),
|
||||||
|
('origin | source | store', ['origin', 'source', 'store']),
|
||||||
('type | media_kind | kind', ['type', 'media_kind', 'kind']),
|
('type | media_kind | kind', ['type', 'media_kind', 'kind']),
|
||||||
('target | path | url', ['target', 'path', 'url']),
|
|
||||||
('hash | hash_hex | file_hash', ['hash', 'hash_hex', 'file_hash']),
|
|
||||||
('tags | tag_summary', ['tags', 'tag_summary']),
|
('tags | tag_summary', ['tags', 'tag_summary']),
|
||||||
('detail | description', ['detail', 'description']),
|
('detail | description', ['detail', 'description']),
|
||||||
]
|
]
|
||||||
@@ -485,7 +525,12 @@ class ResultTable:
|
|||||||
if len(value_str) > 60:
|
if len(value_str) > 60:
|
||||||
value_str = value_str[:57] + "..."
|
value_str = value_str[:57] + "..."
|
||||||
|
|
||||||
row.add_column(field.replace('_', ' ').title(), value_str)
|
# Special case for Origin/Source -> Store to match user preference
|
||||||
|
col_name = field.replace('_', ' ').title()
|
||||||
|
if field in ['origin', 'source']:
|
||||||
|
col_name = "Store"
|
||||||
|
|
||||||
|
row.add_column(col_name, value_str)
|
||||||
added_fields.add(field)
|
added_fields.add(field)
|
||||||
column_count += 1
|
column_count += 1
|
||||||
break # Use first match in this group, skip rest
|
break # Use first match in this group, skip rest
|
||||||
@@ -509,106 +554,6 @@ class ResultTable:
|
|||||||
# Don't display it
|
# Don't display it
|
||||||
added_fields.add('_selection_args')
|
added_fields.add('_selection_args')
|
||||||
|
|
||||||
# Helper to determine if a field should be hidden from display
|
|
||||||
def is_hidden_field(field_name: Any) -> bool:
|
|
||||||
# Hide internal/metadata fields
|
|
||||||
hidden_fields = {'__', 'id', 'action', 'parent_id', 'is_temp', 'file_path', 'extra'}
|
|
||||||
if isinstance(field_name, str):
|
|
||||||
if field_name.startswith('__'):
|
|
||||||
return True
|
|
||||||
if field_name in hidden_fields:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Strip out hidden metadata fields (prefixed with __)
|
|
||||||
visible_data = {k: v for k, v in data.items() if not is_hidden_field(k)}
|
|
||||||
|
|
||||||
# Track which fields we've already added to avoid duplicates
|
|
||||||
added_fields = set()
|
|
||||||
column_count = 0 # Track total columns added
|
|
||||||
|
|
||||||
# Helper function to format values
|
|
||||||
def format_value(value: Any) -> str:
|
|
||||||
if isinstance(value, list):
|
|
||||||
formatted = ", ".join(str(v) for v in value[:3])
|
|
||||||
if len(value) > 3:
|
|
||||||
formatted += f", +{len(value) - 3} more"
|
|
||||||
return formatted
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
# Special handling for 'columns' field from search providers
|
|
||||||
# If present, use it to populate row columns dynamically
|
|
||||||
if 'columns' in visible_data and isinstance(visible_data['columns'], list) and visible_data['columns']:
|
|
||||||
try:
|
|
||||||
for col_name, col_value in visible_data['columns']:
|
|
||||||
# Skip the "#" column as ResultTable already adds row numbers
|
|
||||||
if col_name == '#':
|
|
||||||
continue
|
|
||||||
if column_count >= self.max_columns:
|
|
||||||
break
|
|
||||||
col_value_str = format_value(col_value)
|
|
||||||
if len(col_value_str) > 60:
|
|
||||||
col_value_str = col_value_str[:57] + "..."
|
|
||||||
row.add_column(col_name, col_value_str)
|
|
||||||
added_fields.add(col_name.lower())
|
|
||||||
column_count += 1
|
|
||||||
# Mark 'columns' as handled so we don't add it as a field
|
|
||||||
added_fields.add('columns')
|
|
||||||
# Also mark common fields that shouldn't be re-displayed if they're in columns
|
|
||||||
# This prevents showing both "Store" (from columns) and "Origin" (from data fields)
|
|
||||||
added_fields.add('origin')
|
|
||||||
added_fields.add('source')
|
|
||||||
added_fields.add('target')
|
|
||||||
added_fields.add('path')
|
|
||||||
added_fields.add('media_kind')
|
|
||||||
added_fields.add('detail')
|
|
||||||
added_fields.add('annotations')
|
|
||||||
added_fields.add('full_metadata') # Don't display full metadata as column
|
|
||||||
except Exception:
|
|
||||||
# Fall back to regular field handling if columns format is unexpected
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Only add priority groups if we haven't already filled columns from 'columns' field
|
|
||||||
if column_count == 0:
|
|
||||||
# Priority field groups - uses first matching field in each group
|
|
||||||
priority_groups = [
|
|
||||||
('title | name | filename', ['title', 'name', 'filename']),
|
|
||||||
('origin | source', ['origin', 'source']),
|
|
||||||
('type | media_kind | kind', ['type', 'media_kind', 'kind']),
|
|
||||||
('target | path | url', ['target', 'path', 'url']),
|
|
||||||
('hash | hash_hex | file_hash', ['hash', 'hash_hex', 'file_hash']),
|
|
||||||
('tags | tag_summary', ['tags', 'tag_summary']),
|
|
||||||
('detail | description', ['detail', 'description']),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add priority field groups first - use first match in each group
|
|
||||||
for _group_label, field_options in priority_groups:
|
|
||||||
if column_count >= self.max_columns:
|
|
||||||
break
|
|
||||||
for field in field_options:
|
|
||||||
if field in visible_data and field not in added_fields:
|
|
||||||
value_str = format_value(visible_data[field])
|
|
||||||
if len(value_str) > 60:
|
|
||||||
value_str = value_str[:57] + "..."
|
|
||||||
|
|
||||||
row.add_column(field.replace('_', ' ').title(), value_str)
|
|
||||||
added_fields.add(field)
|
|
||||||
column_count += 1
|
|
||||||
break # Use first match in this group, skip rest
|
|
||||||
|
|
||||||
# Add remaining fields only if we haven't hit max_columns (and no explicit columns were set)
|
|
||||||
if column_count < self.max_columns:
|
|
||||||
for key, value in visible_data.items():
|
|
||||||
if column_count >= self.max_columns:
|
|
||||||
break
|
|
||||||
if key not in added_fields: # Only add if not already added
|
|
||||||
value_str = format_value(value)
|
|
||||||
if len(value_str) > 40:
|
|
||||||
value_str = value_str[:37] + "..."
|
|
||||||
row.add_column(key.replace('_', ' ').title(), value_str)
|
|
||||||
added_fields.add(key) # Track in added_fields to prevent re-adding
|
|
||||||
column_count += 1
|
|
||||||
|
|
||||||
def _add_generic_object(self, row: ResultRow, obj: Any) -> None:
|
def _add_generic_object(self, row: ResultRow, obj: Any) -> None:
|
||||||
"""Extract and add fields from generic objects."""
|
"""Extract and add fields from generic objects."""
|
||||||
if hasattr(obj, '__dict__'):
|
if hasattr(obj, '__dict__'):
|
||||||
|
|||||||
23
test_search.py
Normal file
23
test_search.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
sys.path.insert(0, '.')
|
||||||
|
from helper.file_storage import LocalStorageBackend
|
||||||
|
from config import get_local_storage_path
|
||||||
|
import json
|
||||||
|
|
||||||
|
config = json.load(open('config.json'))
|
||||||
|
# Get the location string properly
|
||||||
|
location = get_local_storage_path(config)
|
||||||
|
if isinstance(location, dict):
|
||||||
|
location = location.get('path') or str(location)
|
||||||
|
|
||||||
|
backend = LocalStorageBackend(config)
|
||||||
|
|
||||||
|
# Test searches
|
||||||
|
for query in ['sie*', 'sie', '*']:
|
||||||
|
print(f"\n=== Searching for: {query} ===")
|
||||||
|
results = backend.search(query, location=str(location), limit=5)
|
||||||
|
print(f"Found {len(results)} results")
|
||||||
|
for r in results:
|
||||||
|
print(f" - {r.get('title')} ({r.get('ext')}) @ {r.get('path')}")
|
||||||
|
|
||||||
Reference in New Issue
Block a user