df
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
Some checks failed
smoke-mm / Install & smoke test mm --help (push) Has been cancelled
This commit is contained in:
@@ -32,7 +32,9 @@ def register_native_commands(registry: Dict[str, CmdletFn]) -> None:
|
||||
"""Import native command modules and register their CMDLET exec functions."""
|
||||
base_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(base_dir):
|
||||
if not (filename.endswith(".py") and not filename.startswith("_") and filename != "__init__.py"):
|
||||
if not (
|
||||
filename.endswith(".py") and not filename.startswith("_") and filename != "__init__.py"
|
||||
):
|
||||
continue
|
||||
|
||||
mod_name = filename[:-3]
|
||||
@@ -43,5 +45,6 @@ def register_native_commands(registry: Dict[str, CmdletFn]) -> None:
|
||||
_register_cmdlet_object(cmdlet_obj, registry)
|
||||
except Exception as exc:
|
||||
import sys
|
||||
|
||||
print(f"Error importing native command '{mod_name}': {exc}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
@@ -7,32 +7,37 @@ from SYS.logger import log
|
||||
from result_table import ResultTable
|
||||
import pipeline as ctx
|
||||
|
||||
ADJECTIVE_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)), "cmdnat", "adjective.json")
|
||||
ADJECTIVE_FILE = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "cmdnat", "adjective.json"
|
||||
)
|
||||
|
||||
|
||||
def _load_adjectives() -> Dict[str, List[str]]:
|
||||
try:
|
||||
if os.path.exists(ADJECTIVE_FILE):
|
||||
with open(ADJECTIVE_FILE, 'r', encoding='utf-8') as f:
|
||||
with open(ADJECTIVE_FILE, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
log(f"Error loading adjectives: {e}", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
|
||||
def _save_adjectives(data: Dict[str, List[str]]) -> bool:
|
||||
try:
|
||||
with open(ADJECTIVE_FILE, 'w', encoding='utf-8') as f:
|
||||
with open(ADJECTIVE_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"Error saving adjectives: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
data = _load_adjectives()
|
||||
|
||||
|
||||
# Parse arguments manually first to handle positional args
|
||||
# We expect: .adjective [category] [tag] [-add] [-delete]
|
||||
|
||||
|
||||
# If no args, list categories
|
||||
if not args:
|
||||
table = ResultTable("Adjective Categories")
|
||||
@@ -41,10 +46,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Category", category)
|
||||
row.add_column("Tag Amount", str(len(tags)))
|
||||
|
||||
|
||||
# Selection expands to: .adjective "Category Name"
|
||||
table.set_row_selection_args(i, [category])
|
||||
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, list(data.keys()))
|
||||
ctx.set_current_stage_table(table)
|
||||
@@ -55,7 +60,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# We have args. First arg is likely category.
|
||||
category = args[0]
|
||||
|
||||
|
||||
# Check if we are adding a new category (implicit if it doesn't exist)
|
||||
if category not in data:
|
||||
# If only category provided, create it
|
||||
@@ -65,12 +70,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log(f"Created new category: {category}")
|
||||
# If more args, we might be trying to add to a non-existent category
|
||||
elif "-add" in args:
|
||||
data[category] = []
|
||||
# Continue to add logic
|
||||
|
||||
data[category] = []
|
||||
# Continue to add logic
|
||||
|
||||
# Handle operations within category
|
||||
remaining_args = list(args[1:])
|
||||
|
||||
|
||||
# Check for -add flag
|
||||
if "-add" in remaining_args:
|
||||
# .adjective category -add tag
|
||||
@@ -82,7 +87,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tag = remaining_args[add_idx + 1]
|
||||
elif add_idx > 0:
|
||||
tag = remaining_args[add_idx - 1]
|
||||
|
||||
|
||||
if tag:
|
||||
if tag not in data[category]:
|
||||
data[category].append(tag)
|
||||
@@ -93,7 +98,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
else:
|
||||
log("Error: No tag specified to add")
|
||||
return 1
|
||||
|
||||
|
||||
# Check for -delete flag
|
||||
elif "-delete" in remaining_args:
|
||||
# .adjective category -delete tag
|
||||
@@ -104,7 +109,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
tag = remaining_args[del_idx + 1]
|
||||
elif del_idx > 0:
|
||||
tag = remaining_args[del_idx - 1]
|
||||
|
||||
|
||||
if tag:
|
||||
if tag in data[category]:
|
||||
data[category].remove(tag)
|
||||
@@ -115,7 +120,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
else:
|
||||
log("Error: No tag specified to delete")
|
||||
return 1
|
||||
|
||||
|
||||
# List tags in category (Default action if no flags or after modification)
|
||||
tags = data.get(category, [])
|
||||
table = ResultTable(f"Tags in '{category}'")
|
||||
@@ -123,20 +128,21 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
row = table.add_row()
|
||||
row.add_column("#", str(i + 1))
|
||||
row.add_column("Tag", tag)
|
||||
|
||||
|
||||
# Selection expands to: .adjective "Category" "Tag"
|
||||
# This allows typing @N -delete to delete it
|
||||
table.set_row_selection_args(i, [category, tag])
|
||||
|
||||
|
||||
table.set_source_command(".adjective")
|
||||
ctx.set_last_result_table_overlay(table, tags)
|
||||
ctx.set_current_stage_table(table)
|
||||
from rich_display import stdout_console
|
||||
|
||||
stdout_console().print(table)
|
||||
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".adjective",
|
||||
alias=["adj"],
|
||||
@@ -148,5 +154,5 @@ CMDLET = Cmdlet(
|
||||
CmdletArg(name="add", type="flag", description="Add tag"),
|
||||
CmdletArg(name="delete", type="flag", description="Delete tag"),
|
||||
],
|
||||
exec=_run
|
||||
exec=_run,
|
||||
)
|
||||
|
||||
@@ -9,55 +9,55 @@ CMDLET = Cmdlet(
|
||||
usage=".config [key] [value]",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="key",
|
||||
description="Configuration key to update (dot-separated)",
|
||||
required=False
|
||||
name="key", description="Configuration key to update (dot-separated)", required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="value",
|
||||
description="New value for the configuration key",
|
||||
required=False
|
||||
)
|
||||
]
|
||||
CmdletArg(name="value", description="New value for the configuration key", required=False),
|
||||
],
|
||||
)
|
||||
|
||||
def flatten_config(config: Dict[str, Any], parent_key: str = '', sep: str = '.') -> List[Dict[str, Any]]:
|
||||
|
||||
def flatten_config(
|
||||
config: Dict[str, Any], parent_key: str = "", sep: str = "."
|
||||
) -> List[Dict[str, Any]]:
|
||||
items = []
|
||||
for k, v in config.items():
|
||||
if k.startswith('_'): # Skip internal keys
|
||||
if k.startswith("_"): # Skip internal keys
|
||||
continue
|
||||
|
||||
|
||||
new_key = f"{parent_key}{sep}{k}" if parent_key else k
|
||||
if isinstance(v, dict):
|
||||
items.extend(flatten_config(v, new_key, sep=sep))
|
||||
else:
|
||||
items.append({
|
||||
"Key": new_key,
|
||||
"Value": str(v),
|
||||
"Type": type(v).__name__,
|
||||
"_selection_args": [new_key]
|
||||
})
|
||||
items.append(
|
||||
{
|
||||
"Key": new_key,
|
||||
"Value": str(v),
|
||||
"Type": type(v).__name__,
|
||||
"_selection_args": [new_key],
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
|
||||
def set_nested_config(config: Dict[str, Any], key: str, value: str) -> bool:
|
||||
keys = key.split('.')
|
||||
keys = key.split(".")
|
||||
d = config
|
||||
|
||||
|
||||
# Navigate to the parent dict
|
||||
for k in keys[:-1]:
|
||||
if k not in d or not isinstance(d[k], dict):
|
||||
d[k] = {}
|
||||
d = d[k]
|
||||
|
||||
|
||||
last_key = keys[-1]
|
||||
|
||||
|
||||
# Try to preserve type if key exists
|
||||
if last_key in d:
|
||||
current_val = d[last_key]
|
||||
if isinstance(current_val, bool):
|
||||
if value.lower() in ('true', 'yes', '1', 'on'):
|
||||
if value.lower() in ("true", "yes", "1", "on"):
|
||||
d[last_key] = True
|
||||
elif value.lower() in ('false', 'no', '0', 'off'):
|
||||
elif value.lower() in ("false", "no", "0", "off"):
|
||||
d[last_key] = False
|
||||
else:
|
||||
# Fallback to boolean conversion of string (usually True for non-empty)
|
||||
@@ -80,53 +80,57 @@ def set_nested_config(config: Dict[str, Any], key: str, value: str) -> bool:
|
||||
d[last_key] = value
|
||||
else:
|
||||
# New key, try to infer type
|
||||
if value.lower() in ('true', 'false'):
|
||||
d[last_key] = (value.lower() == 'true')
|
||||
if value.lower() in ("true", "false"):
|
||||
d[last_key] = value.lower() == "true"
|
||||
elif value.isdigit():
|
||||
d[last_key] = int(value)
|
||||
else:
|
||||
d[last_key] = value
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _run(piped_result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
||||
# Reload config to ensure we have the latest on disk
|
||||
# We don't use the passed 'config' because we want to edit the file
|
||||
# and 'config' might contain runtime objects (like worker manager)
|
||||
# But load_config() returns a fresh dict from disk (or cache)
|
||||
# We should use load_config()
|
||||
|
||||
|
||||
current_config = load_config()
|
||||
|
||||
|
||||
# Parse args
|
||||
# We handle args manually because of the potential for spaces in values
|
||||
# and the @ expansion logic in CLI.py passing args
|
||||
|
||||
|
||||
if not args:
|
||||
# List mode
|
||||
items = flatten_config(current_config)
|
||||
# Sort by key
|
||||
items.sort(key=lambda x: x['Key'])
|
||||
|
||||
items.sort(key=lambda x: x["Key"])
|
||||
|
||||
# Emit items for ResultTable
|
||||
import pipeline as ctx
|
||||
|
||||
for item in items:
|
||||
ctx.emit(item)
|
||||
return 0
|
||||
|
||||
|
||||
# Update mode
|
||||
key = args[0]
|
||||
|
||||
|
||||
if len(args) < 2:
|
||||
print(f"Error: Value required for key '{key}'")
|
||||
return 1
|
||||
|
||||
|
||||
value = " ".join(args[1:])
|
||||
|
||||
|
||||
# Remove quotes if present
|
||||
if (value.startswith('"') and value.endswith('"')) or (value.startswith("'") and value.endswith("'")):
|
||||
if (value.startswith('"') and value.endswith('"')) or (
|
||||
value.startswith("'") and value.endswith("'")
|
||||
):
|
||||
value = value[1:-1]
|
||||
|
||||
|
||||
try:
|
||||
set_nested_config(current_config, key, value)
|
||||
save_config(current_config)
|
||||
@@ -136,4 +140,5 @@ def _run(piped_result: Any, args: List[str], config: Dict[str, Any]) -> int:
|
||||
print(f"Error updating config: {e}")
|
||||
return 1
|
||||
|
||||
|
||||
CMDLET.exec = _run
|
||||
|
||||
@@ -38,7 +38,9 @@ def _find_cmd_metadata(name: str, metadata: Dict[str, Dict[str, Any]]) -> Option
|
||||
return None
|
||||
|
||||
|
||||
def _render_list(metadata: Dict[str, Dict[str, Any]], filter_text: Optional[str], args: Sequence[str]) -> None:
|
||||
def _render_list(
|
||||
metadata: Dict[str, Dict[str, Any]], filter_text: Optional[str], args: Sequence[str]
|
||||
) -> None:
|
||||
table = ResultTable("Help")
|
||||
table.set_source_command(".help", list(args))
|
||||
|
||||
|
||||
@@ -169,6 +169,7 @@ def _extract_file_path(item: Any) -> Optional[str]:
|
||||
|
||||
Returns a filesystem path string only if it exists.
|
||||
"""
|
||||
|
||||
def _maybe_local_path(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
@@ -225,7 +226,11 @@ def _extract_url(item: Any) -> Optional[str]:
|
||||
if isinstance(item, dict):
|
||||
for key in ("url", "source_url", "path", "target"):
|
||||
raw = item.get(key)
|
||||
if isinstance(raw, str) and raw.strip() and raw.strip().startswith(("http://", "https://")):
|
||||
if (
|
||||
isinstance(raw, str)
|
||||
and raw.strip()
|
||||
and raw.strip().startswith(("http://", "https://"))
|
||||
):
|
||||
return raw.strip()
|
||||
except Exception:
|
||||
pass
|
||||
@@ -264,7 +269,9 @@ def _extract_hash_from_hydrus_file_url(url: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_download_hydrus_file(item: Any, config: Dict[str, Any], output_dir: Path) -> Optional[str]:
|
||||
def _maybe_download_hydrus_file(
|
||||
item: Any, config: Dict[str, Any], output_dir: Path
|
||||
) -> Optional[str]:
|
||||
"""If the item looks like a Hydrus file (hash + Hydrus URL), download it using Hydrus access key headers.
|
||||
|
||||
This avoids 401 from Hydrus when the URL is /get_files/file?hash=... without headers.
|
||||
@@ -307,18 +314,24 @@ def _maybe_download_hydrus_file(item: Any, config: Dict[str, Any], output_dir: P
|
||||
is_hydrus_url = False
|
||||
if url:
|
||||
parsed = urlparse(url)
|
||||
is_hydrus_url = (parsed.path or "").endswith("/get_files/file") and _extract_hash_from_hydrus_file_url(url) == file_hash
|
||||
is_hydrus_url = (parsed.path or "").endswith(
|
||||
"/get_files/file"
|
||||
) and _extract_hash_from_hydrus_file_url(url) == file_hash
|
||||
hydrus_instances: set[str] = set()
|
||||
try:
|
||||
store_cfg = (config or {}).get("store") if isinstance(config, dict) else None
|
||||
if isinstance(store_cfg, dict):
|
||||
hydrus_cfg = store_cfg.get("hydrusnetwork")
|
||||
if isinstance(hydrus_cfg, dict):
|
||||
hydrus_instances = {str(k).strip().lower() for k in hydrus_cfg.keys() if str(k).strip()}
|
||||
hydrus_instances = {
|
||||
str(k).strip().lower() for k in hydrus_cfg.keys() if str(k).strip()
|
||||
}
|
||||
except Exception:
|
||||
hydrus_instances = set()
|
||||
|
||||
store_hint = store_name.lower() in {"hydrus", "hydrusnetwork"} or (store_name.lower() in hydrus_instances)
|
||||
store_hint = store_name.lower() in {"hydrus", "hydrusnetwork"} or (
|
||||
store_name.lower() in hydrus_instances
|
||||
)
|
||||
if not (is_hydrus_url or store_hint):
|
||||
return None
|
||||
|
||||
@@ -402,7 +415,11 @@ def _resolve_upload_path(item: Any, config: Dict[str, Any]) -> Optional[str]:
|
||||
base_tmp = None
|
||||
if isinstance(config, dict):
|
||||
base_tmp = config.get("temp")
|
||||
output_dir = Path(str(base_tmp)).expanduser() if base_tmp else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
output_dir = (
|
||||
Path(str(base_tmp)).expanduser()
|
||||
if base_tmp
|
||||
else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
)
|
||||
output_dir = output_dir / "matrix" / "hydrus"
|
||||
hydrus_path = _maybe_download_hydrus_file(item, config, output_dir)
|
||||
if hydrus_path:
|
||||
@@ -423,11 +440,20 @@ def _resolve_upload_path(item: Any, config: Dict[str, Any]) -> Optional[str]:
|
||||
base_tmp = None
|
||||
if isinstance(config, dict):
|
||||
base_tmp = config.get("temp")
|
||||
output_dir = Path(str(base_tmp)).expanduser() if base_tmp else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
output_dir = (
|
||||
Path(str(base_tmp)).expanduser()
|
||||
if base_tmp
|
||||
else (Path(tempfile.gettempdir()) / "Medios-Macina")
|
||||
)
|
||||
output_dir = output_dir / "matrix"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
result = _download_direct_file(url, output_dir, quiet=True)
|
||||
if result and hasattr(result, "path") and isinstance(result.path, Path) and result.path.exists():
|
||||
if (
|
||||
result
|
||||
and hasattr(result, "path")
|
||||
and isinstance(result.path, Path)
|
||||
and result.path.exists()
|
||||
):
|
||||
return str(result.path)
|
||||
except Exception as exc:
|
||||
debug(f"[matrix] Failed to download URL for upload: {exc}")
|
||||
@@ -467,6 +493,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 1
|
||||
|
||||
from Provider.matrix import Matrix
|
||||
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
@@ -490,7 +517,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
file_path = _resolve_upload_path(item, config)
|
||||
if not file_path:
|
||||
any_failed = True
|
||||
log("Matrix upload requires a local file (path) or a direct URL on the selected item", file=sys.stderr)
|
||||
log(
|
||||
"Matrix upload requires a local file (path) or a direct URL on the selected item",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
|
||||
media_path = Path(file_path)
|
||||
@@ -561,6 +591,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
pass
|
||||
|
||||
from Provider.matrix import Matrix
|
||||
|
||||
try:
|
||||
provider = Matrix(config)
|
||||
except Exception as exc:
|
||||
@@ -581,7 +612,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Diagnostics if a configured filter yields no rows (provider filtered before name lookups for speed).
|
||||
if not rooms and not _has_flag(args, "-all"):
|
||||
configured_ids_dbg = [str(v).strip() for v in _parse_config_room_filter_ids(config) if str(v).strip()]
|
||||
configured_ids_dbg = [
|
||||
str(v).strip() for v in _parse_config_room_filter_ids(config) if str(v).strip()
|
||||
]
|
||||
if configured_ids_dbg:
|
||||
try:
|
||||
joined_ids = provider.list_joined_room_ids()
|
||||
@@ -592,7 +625,10 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
if not rooms:
|
||||
if _parse_config_room_filter_ids(config) and not _has_flag(args, "-all"):
|
||||
log("No joined rooms matched the configured Matrix room filter (use: .matrix -all)", file=sys.stderr)
|
||||
log(
|
||||
"No joined rooms matched the configured Matrix room filter (use: .matrix -all)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
log("No joined rooms found.", file=sys.stderr)
|
||||
return 0
|
||||
@@ -630,15 +666,31 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
ctx.set_pending_pipeline_tail([[".matrix", "-send"]], ".matrix")
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".matrix",
|
||||
alias=["matrix", "rooms"],
|
||||
summary="Send selected items to a Matrix room",
|
||||
usage="@N | .matrix",
|
||||
arg=[
|
||||
CmdletArg(name="send", type="bool", description="(internal) Send to selected room(s)", required=False),
|
||||
CmdletArg(name="all", type="bool", description="Ignore config room filter and show all joined rooms", required=False),
|
||||
CmdletArg(name="text", type="string", description="Send a follow-up text message after each upload (caption-like)", required=False),
|
||||
CmdletArg(
|
||||
name="send",
|
||||
type="bool",
|
||||
description="(internal) Send to selected room(s)",
|
||||
required=False,
|
||||
),
|
||||
CmdletArg(
|
||||
name="all",
|
||||
type="bool",
|
||||
description="Ignore config room filter and show all joined rooms",
|
||||
required=False,
|
||||
),
|
||||
CmdletArg(
|
||||
name="text",
|
||||
type="string",
|
||||
description="Send a follow-up text message after each upload (caption-like)",
|
||||
required=False,
|
||||
),
|
||||
],
|
||||
exec=_run
|
||||
exec=_run,
|
||||
)
|
||||
|
||||
@@ -112,11 +112,7 @@ def _get_active_table(piped_result: Any) -> Optional[Any]:
|
||||
if piped_result.__class__.__name__ == "ResultTable":
|
||||
return piped_result
|
||||
|
||||
return (
|
||||
ctx.get_display_table()
|
||||
or ctx.get_current_stage_table()
|
||||
or ctx.get_last_result_table()
|
||||
)
|
||||
return ctx.get_display_table() or ctx.get_current_stage_table() or ctx.get_last_result_table()
|
||||
|
||||
|
||||
def _run(piped_result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
329
cmdnat/pipe.py
329
cmdnat/pipe.py
@@ -18,7 +18,6 @@ from API.folder import LocalLibrarySearchOptimizer
|
||||
from config import get_local_storage_path, get_hydrus_access_key, get_hydrus_url
|
||||
|
||||
|
||||
|
||||
_ALLDEBRID_UNLOCK_CACHE: Dict[str, str] = {}
|
||||
|
||||
|
||||
@@ -69,10 +68,14 @@ def _try_enable_mpv_file_logging(mpv_log_path: str, *, attempts: int = 3) -> boo
|
||||
# Try to set log-file and verbose level.
|
||||
r1 = _send_ipc_command({"command": ["set_property", "options/log-file", mpv_log_path]})
|
||||
r2 = _send_ipc_command({"command": ["set_property", "options/msg-level", "all=v"]})
|
||||
ok = bool((r1 and r1.get("error") == "success") or (r2 and r2.get("error") == "success"))
|
||||
ok = bool(
|
||||
(r1 and r1.get("error") == "success") or (r2 and r2.get("error") == "success")
|
||||
)
|
||||
|
||||
# Emit a predictable line so the file isn't empty if logging is active.
|
||||
_send_ipc_command({"command": ["print-text", f"medeia: log enabled -> {mpv_log_path}"]}, silent=True)
|
||||
_send_ipc_command(
|
||||
{"command": ["print-text", f"medeia: log enabled -> {mpv_log_path}"]}, silent=True
|
||||
)
|
||||
except Exception:
|
||||
ok = False
|
||||
|
||||
@@ -86,6 +89,7 @@ def _try_enable_mpv_file_logging(mpv_log_path: str, *, attempts: int = 3) -> boo
|
||||
|
||||
try:
|
||||
import time
|
||||
|
||||
time.sleep(0.15)
|
||||
except Exception:
|
||||
break
|
||||
@@ -168,6 +172,7 @@ def _ensure_lyric_overlay(mpv: MPV) -> None:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _send_ipc_command(command: Dict[str, Any], silent: bool = False) -> Optional[Any]:
|
||||
"""Send a command to the MPV IPC pipe and return the response."""
|
||||
try:
|
||||
@@ -189,11 +194,12 @@ def _get_playlist(silent: bool = False) -> Optional[List[Dict[str, Any]]]:
|
||||
return resp.get("data", [])
|
||||
return []
|
||||
|
||||
|
||||
def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||
"""Extract a clean title from an MPV playlist item, handling memory:// M3U hacks."""
|
||||
title = item.get("title")
|
||||
filename = item.get("filename") or ""
|
||||
|
||||
|
||||
# Special handling for memory:// M3U playlists (used to pass titles via IPC)
|
||||
if "memory://" in filename and "#EXTINF:" in filename:
|
||||
try:
|
||||
@@ -204,18 +210,18 @@ def _extract_title_from_item(item: Dict[str, Any]) -> str:
|
||||
extracted_title = match.group(1).strip()
|
||||
if not title or title == "memory://":
|
||||
title = extracted_title
|
||||
|
||||
|
||||
# If we still don't have a title, try to find the URL in the M3U content
|
||||
if not title:
|
||||
lines = filename.splitlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#') and not line.startswith('memory://'):
|
||||
if line and not line.startswith("#") and not line.startswith("memory://"):
|
||||
# Found the URL, use it as title
|
||||
return line
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
return title or filename or "Unknown"
|
||||
|
||||
|
||||
@@ -225,7 +231,7 @@ def _extract_target_from_memory_uri(text: str) -> Optional[str]:
|
||||
return None
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#') or line.startswith('memory://'):
|
||||
if not line or line.startswith("#") or line.startswith("memory://"):
|
||||
continue
|
||||
return line
|
||||
return None
|
||||
@@ -233,11 +239,11 @@ def _extract_target_from_memory_uri(text: str) -> Optional[str]:
|
||||
|
||||
def _find_hydrus_instance_for_hash(hash_str: str, file_storage: Any) -> Optional[str]:
|
||||
"""Find which Hydrus instance serves a specific file hash.
|
||||
|
||||
|
||||
Args:
|
||||
hash_str: SHA256 hash (64 hex chars)
|
||||
file_storage: FileStorage instance with Hydrus backends
|
||||
|
||||
|
||||
Returns:
|
||||
Instance name (e.g., 'home') or None if not found
|
||||
"""
|
||||
@@ -248,7 +254,7 @@ def _find_hydrus_instance_for_hash(hash_str: str, file_storage: Any) -> Optional
|
||||
backend_class = type(backend).__name__
|
||||
if backend_class != "HydrusNetwork":
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
# Query metadata to see if this instance has the file
|
||||
metadata = backend.get_metadata(hash_str)
|
||||
@@ -257,44 +263,44 @@ def _find_hydrus_instance_for_hash(hash_str: str, file_storage: Any) -> Optional
|
||||
except Exception:
|
||||
# This instance doesn't have the file or had an error
|
||||
continue
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_hydrus_instance_by_url(url: str, file_storage: Any) -> Optional[str]:
|
||||
"""Find which Hydrus instance matches a given URL.
|
||||
|
||||
|
||||
Args:
|
||||
url: Full URL (e.g., http://localhost:45869/get_files/file?hash=...)
|
||||
file_storage: FileStorage instance with Hydrus backends
|
||||
|
||||
|
||||
Returns:
|
||||
Instance name (e.g., 'home') or None if not found
|
||||
"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
parsed_target = urlparse(url)
|
||||
target_netloc = parsed_target.netloc.lower()
|
||||
|
||||
|
||||
# Check each Hydrus backend's URL
|
||||
for backend_name in file_storage.list_backends():
|
||||
backend = file_storage[backend_name]
|
||||
backend_class = type(backend).__name__
|
||||
if backend_class != "HydrusNetwork":
|
||||
continue
|
||||
|
||||
|
||||
# Get the backend's base URL from its client
|
||||
try:
|
||||
backend_url = backend._client.base_url
|
||||
parsed_backend = urlparse(backend_url)
|
||||
backend_netloc = parsed_backend.netloc.lower()
|
||||
|
||||
|
||||
# Match by netloc (host:port)
|
||||
if target_netloc == backend_netloc:
|
||||
return backend_name
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -324,17 +330,19 @@ def _normalize_playlist_path(text: Optional[str]) -> Optional[str]:
|
||||
pass
|
||||
|
||||
# Normalize slashes for Windows paths and lowercase for comparison
|
||||
real = real.replace('\\', '/')
|
||||
real = real.replace("\\", "/")
|
||||
return real.lower()
|
||||
|
||||
|
||||
def _infer_store_from_playlist_item(item: Dict[str, Any], file_storage: Optional[Any] = None) -> str:
|
||||
def _infer_store_from_playlist_item(
|
||||
item: Dict[str, Any], file_storage: Optional[Any] = None
|
||||
) -> str:
|
||||
"""Infer a friendly store label from an MPV playlist entry.
|
||||
|
||||
|
||||
Args:
|
||||
item: MPV playlist item dict
|
||||
file_storage: Optional FileStorage instance for querying specific backend instances
|
||||
|
||||
|
||||
Returns:
|
||||
Store label (e.g., 'home', 'work', 'local', 'youtube', etc.)
|
||||
"""
|
||||
@@ -423,7 +431,7 @@ def _infer_store_from_playlist_item(item: Dict[str, Any], file_storage: Optional
|
||||
return hydrus_instance
|
||||
return "hydrus"
|
||||
|
||||
parts = host_stripped.split('.')
|
||||
parts = host_stripped.split(".")
|
||||
if len(parts) >= 2:
|
||||
return parts[-2] or host_stripped
|
||||
return host_stripped
|
||||
@@ -440,7 +448,9 @@ def _build_hydrus_header(config: Dict[str, Any]) -> Optional[str]:
|
||||
return f"Hydrus-Client-API-Access-Key: {key}"
|
||||
|
||||
|
||||
def _build_ytdl_options(config: Optional[Dict[str, Any]], hydrus_header: Optional[str]) -> Optional[str]:
|
||||
def _build_ytdl_options(
|
||||
config: Optional[Dict[str, Any]], hydrus_header: Optional[str]
|
||||
) -> Optional[str]:
|
||||
"""Compose ytdl-raw-options string including cookies and optional Hydrus header."""
|
||||
opts: List[str] = []
|
||||
cookies_path = None
|
||||
@@ -454,7 +464,7 @@ def _build_ytdl_options(config: Optional[Dict[str, Any]], hydrus_header: Optiona
|
||||
cookies_path = None
|
||||
|
||||
if cookies_path:
|
||||
opts.append(f"cookies={cookies_path.replace('\\', '/')}" )
|
||||
opts.append(f"cookies={cookies_path.replace('\\', '/')}")
|
||||
else:
|
||||
opts.append("cookies-from-browser=chrome")
|
||||
if hydrus_header:
|
||||
@@ -484,9 +494,11 @@ def _is_hydrus_path(path: str, hydrus_url: Optional[str]) -> bool:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _ensure_ytdl_cookies(config: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Ensure yt-dlp options are set correctly for this session."""
|
||||
from pathlib import Path
|
||||
|
||||
cookies_path = None
|
||||
try:
|
||||
from tool.ytdlp import YtDlpTool
|
||||
@@ -498,7 +510,7 @@ def _ensure_ytdl_cookies(config: Optional[Dict[str, Any]] = None) -> None:
|
||||
cookies_path = None
|
||||
if cookies_path:
|
||||
# Check if file exists and has content (use forward slashes for path checking)
|
||||
check_path = cookies_path.replace('\\', '/')
|
||||
check_path = cookies_path.replace("\\", "/")
|
||||
file_obj = Path(cookies_path)
|
||||
if file_obj.exists():
|
||||
file_size = file_obj.stat().st_size
|
||||
@@ -508,6 +520,7 @@ def _ensure_ytdl_cookies(config: Optional[Dict[str, Any]] = None) -> None:
|
||||
else:
|
||||
debug("No cookies file configured")
|
||||
|
||||
|
||||
def _monitor_mpv_logs(duration: float = 3.0) -> None:
|
||||
"""Monitor MPV logs for a short duration to capture errors."""
|
||||
try:
|
||||
@@ -516,16 +529,17 @@ def _monitor_mpv_logs(duration: float = 3.0) -> None:
|
||||
if not client.connect():
|
||||
debug("Failed to connect to MPV for log monitoring", file=sys.stderr)
|
||||
return
|
||||
|
||||
|
||||
# Request log messages
|
||||
client.send_command({"command": ["request_log_messages", "warn"]})
|
||||
|
||||
|
||||
# On Windows named pipes, avoid blocking the CLI; skip log read entirely
|
||||
if client.is_windows:
|
||||
client.disconnect()
|
||||
return
|
||||
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Unix sockets already have timeouts set; read until duration expires
|
||||
@@ -585,14 +599,18 @@ def _tail_text_file(path: str, *, max_lines: int = 120, max_bytes: int = 65536)
|
||||
return lines
|
||||
except Exception:
|
||||
return []
|
||||
def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[Dict[str, Any]]) -> Optional[tuple[str, Optional[str]]]:
|
||||
|
||||
|
||||
def _get_playable_path(
|
||||
item: Any, file_storage: Optional[Any], config: Optional[Dict[str, Any]]
|
||||
) -> Optional[tuple[str, Optional[str]]]:
|
||||
"""Extract a playable path/URL from an item, handling different store types.
|
||||
|
||||
|
||||
Args:
|
||||
item: Item to extract path from (dict, PipeObject, or string)
|
||||
file_storage: FileStorage instance for querying backends
|
||||
config: Config dict for Hydrus URL
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (path, title) or None if no valid path found
|
||||
"""
|
||||
@@ -600,7 +618,7 @@ def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[
|
||||
title: Optional[str] = None
|
||||
store: Optional[str] = None
|
||||
file_hash: Optional[str] = None
|
||||
|
||||
|
||||
# Extract fields from item - prefer a disk path ('path'), but accept 'url' as fallback for providers
|
||||
if isinstance(item, dict):
|
||||
path = item.get("path")
|
||||
@@ -614,13 +632,25 @@ def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[
|
||||
title = item.get("title") or item.get("file_title")
|
||||
store = item.get("store")
|
||||
file_hash = item.get("hash")
|
||||
elif hasattr(item, "path") or hasattr(item, "url") or hasattr(item, "source_url") or hasattr(item, "store") or hasattr(item, "hash"):
|
||||
elif (
|
||||
hasattr(item, "path")
|
||||
or hasattr(item, "url")
|
||||
or hasattr(item, "source_url")
|
||||
or hasattr(item, "store")
|
||||
or hasattr(item, "hash")
|
||||
):
|
||||
# Handle PipeObject / dataclass objects - prefer path, but fall back to url/source_url attributes
|
||||
path = getattr(item, "path", None)
|
||||
if not path:
|
||||
path = getattr(item, "url", None) or getattr(item, "source_url", None) or getattr(item, "target", None)
|
||||
path = (
|
||||
getattr(item, "url", None)
|
||||
or getattr(item, "source_url", None)
|
||||
or getattr(item, "target", None)
|
||||
)
|
||||
if not path:
|
||||
known = getattr(item, "url", None) or (getattr(item, "extra", None) or {}).get("url")
|
||||
known = getattr(item, "url", None) or (getattr(item, "extra", None) or {}).get(
|
||||
"url"
|
||||
)
|
||||
if known and isinstance(known, list):
|
||||
path = known[0]
|
||||
title = getattr(item, "title", None) or getattr(item, "file_title", None)
|
||||
@@ -628,13 +658,13 @@ def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[
|
||||
file_hash = getattr(item, "hash", None)
|
||||
elif isinstance(item, str):
|
||||
path = item
|
||||
|
||||
|
||||
# Debug: show incoming values
|
||||
try:
|
||||
debug(f"_get_playable_path: store={store}, path={path}, hash={file_hash}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# Treat common placeholders as missing.
|
||||
if isinstance(path, str) and path.strip().lower() in {"", "n/a", "na", "none"}:
|
||||
path = None
|
||||
@@ -644,7 +674,7 @@ def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[
|
||||
|
||||
if isinstance(file_hash, str):
|
||||
file_hash = file_hash.strip().lower()
|
||||
|
||||
|
||||
# Resolve hash+store into a playable target (file path or URL).
|
||||
# This is unrelated to MPV's IPC pipe and keeps "pipe" terminology reserved for:
|
||||
# - MPV IPC pipe (transport)
|
||||
@@ -663,7 +693,11 @@ def _get_playable_path(item: Any, file_storage: Optional[Any], config: Optional[
|
||||
backend_class = type(backend).__name__
|
||||
|
||||
# Folder stores: resolve to an on-disk file path.
|
||||
if hasattr(backend, "get_file") and callable(getattr(backend, "get_file")) and backend_class == "Folder":
|
||||
if (
|
||||
hasattr(backend, "get_file")
|
||||
and callable(getattr(backend, "get_file"))
|
||||
and backend_class == "Folder"
|
||||
):
|
||||
try:
|
||||
resolved = backend.get_file(file_hash)
|
||||
if isinstance(resolved, Path):
|
||||
@@ -705,11 +739,11 @@ def _queue_items(
|
||||
start_opts: Optional[Dict[str, Any]] = None,
|
||||
) -> bool:
|
||||
"""Queue items to MPV, starting it if necessary.
|
||||
|
||||
|
||||
Args:
|
||||
items: List of items to queue
|
||||
clear_first: If True, the first item will replace the current playlist
|
||||
|
||||
|
||||
Returns:
|
||||
True if MPV was started, False if items were queued via IPC.
|
||||
"""
|
||||
@@ -729,11 +763,12 @@ def _queue_items(
|
||||
hydrus_url = get_hydrus_url(config) if config is not None else None
|
||||
except Exception:
|
||||
hydrus_url = None
|
||||
|
||||
|
||||
# Initialize Store registry for path resolution
|
||||
file_storage = None
|
||||
try:
|
||||
from Store import Store
|
||||
|
||||
file_storage = Store(config or {})
|
||||
except Exception as e:
|
||||
debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr)
|
||||
@@ -757,7 +792,9 @@ def _queue_items(
|
||||
# Remove duplicates from playlist starting from the end to keep indices valid
|
||||
for idx in reversed(dup_indexes):
|
||||
try:
|
||||
_send_ipc_command({"command": ["playlist-remove", idx], "request_id": 106}, silent=True)
|
||||
_send_ipc_command(
|
||||
{"command": ["playlist-remove", idx], "request_id": 106}, silent=True
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -774,7 +811,7 @@ def _queue_items(
|
||||
if not result:
|
||||
debug(f"_queue_items: item idx={i} produced no playable path")
|
||||
continue
|
||||
|
||||
|
||||
target, title = result
|
||||
|
||||
# If the target is an AllDebrid protected file URL, unlock it to a direct link for MPV.
|
||||
@@ -812,15 +849,19 @@ def _queue_items(
|
||||
if base_url:
|
||||
effective_hydrus_url = str(base_url).rstrip("/")
|
||||
if key:
|
||||
effective_hydrus_header = f"Hydrus-Client-API-Access-Key: {str(key).strip()}"
|
||||
effective_hydrus_header = (
|
||||
f"Hydrus-Client-API-Access-Key: {str(key).strip()}"
|
||||
)
|
||||
effective_ytdl_opts = _build_ytdl_options(config, effective_hydrus_header)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if target:
|
||||
# If we just have a hydrus hash, build a direct file URL for MPV
|
||||
if re.fullmatch(r"[0-9a-f]{64}", str(target).strip().lower()) and effective_hydrus_url:
|
||||
target = f"{effective_hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
|
||||
target = (
|
||||
f"{effective_hydrus_url.rstrip('/')}/get_files/file?hash={str(target).strip()}"
|
||||
)
|
||||
|
||||
norm_key = _normalize_playlist_path(target) or str(target).strip().lower()
|
||||
if norm_key in existing_targets or norm_key in new_targets:
|
||||
@@ -833,13 +874,17 @@ def _queue_items(
|
||||
# show the raw URL as the playlist title.
|
||||
if title:
|
||||
# Sanitize title for M3U (remove newlines)
|
||||
safe_title = title.replace('\n', ' ').replace('\r', '')
|
||||
safe_title = title.replace("\n", " ").replace("\r", "")
|
||||
|
||||
# Carry the store name for hash URLs so MPV.lyric can resolve the backend.
|
||||
# This is especially important for local file-server URLs like /get_files/file?hash=...
|
||||
target_for_m3u = target
|
||||
try:
|
||||
if item_store_name and isinstance(target_for_m3u, str) and target_for_m3u.startswith("http"):
|
||||
if (
|
||||
item_store_name
|
||||
and isinstance(target_for_m3u, str)
|
||||
and target_for_m3u.startswith("http")
|
||||
):
|
||||
if "get_files/file" in target_for_m3u and "store=" not in target_for_m3u:
|
||||
sep = "&" if "?" in target_for_m3u else "?"
|
||||
target_for_m3u = f"{target_for_m3u}{sep}store={item_store_name}"
|
||||
@@ -858,10 +903,16 @@ def _queue_items(
|
||||
# If this is a Hydrus path, set header property and yt-dlp headers before loading.
|
||||
# Use the real target (not the memory:// wrapper) for detection.
|
||||
if effective_hydrus_header and _is_hydrus_path(str(target), effective_hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", effective_hydrus_header], "request_id": 199}
|
||||
header_cmd = {
|
||||
"command": ["set_property", "http-header-fields", effective_hydrus_header],
|
||||
"request_id": 199,
|
||||
}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
if effective_ytdl_opts:
|
||||
ytdl_cmd = {"command": ["set_property", "ytdl-raw-options", effective_ytdl_opts], "request_id": 197}
|
||||
ytdl_cmd = {
|
||||
"command": ["set_property", "ytdl-raw-options", effective_ytdl_opts],
|
||||
"request_id": 197,
|
||||
}
|
||||
_send_ipc_command(ytdl_cmd, silent=True)
|
||||
|
||||
cmd = {"command": ["loadfile", target_to_send, mode], "request_id": 200}
|
||||
@@ -872,11 +923,13 @@ def _queue_items(
|
||||
except Exception as e:
|
||||
debug(f"Exception sending loadfile to MPV: {e}", file=sys.stderr)
|
||||
resp = None
|
||||
|
||||
|
||||
if resp is None:
|
||||
# MPV not running (or died)
|
||||
# Start MPV with remaining items
|
||||
debug(f"MPV not running/died while queuing, starting MPV with remaining items: {items[i:]}")
|
||||
debug(
|
||||
f"MPV not running/died while queuing, starting MPV with remaining items: {items[i:]}"
|
||||
)
|
||||
_start_mpv(items[i:], config=config, start_opts=start_opts)
|
||||
return True
|
||||
elif resp.get("error") == "success":
|
||||
@@ -884,13 +937,14 @@ def _queue_items(
|
||||
# would change the MPV window title even if the item isn't currently playing.
|
||||
debug(f"Queued: {title or target}")
|
||||
else:
|
||||
error_msg = str(resp.get('error'))
|
||||
error_msg = str(resp.get("error"))
|
||||
debug(f"Failed to queue item: {error_msg}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
"""Manage and play items in the MPV playlist via IPC."""
|
||||
|
||||
|
||||
parsed = parse_cmdlet_args(args, CMDLET)
|
||||
|
||||
log_requested = bool(parsed.get("log"))
|
||||
@@ -912,7 +966,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
log_dir = _repo_log_dir()
|
||||
mpv_log_path = str((log_dir / "medeia-mpv.log").resolve())
|
||||
except Exception:
|
||||
mpv_log_path = str((Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".") / "medeia-mpv.log").resolve())
|
||||
mpv_log_path = str(
|
||||
(
|
||||
Path(os.environ.get("TEMP") or os.environ.get("TMP") or ".")
|
||||
/ "medeia-mpv.log"
|
||||
).resolve()
|
||||
)
|
||||
# Ensure file exists early so we can tail it even if mpv writes later.
|
||||
try:
|
||||
Path(mpv_log_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
@@ -1019,11 +1078,11 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Emit the current item to pipeline
|
||||
result_obj = {
|
||||
'path': filename,
|
||||
'title': title,
|
||||
'cmdlet_name': '.pipe',
|
||||
'source': 'pipe',
|
||||
'__pipe_index': items.index(current_item),
|
||||
"path": filename,
|
||||
"title": title,
|
||||
"cmdlet_name": ".pipe",
|
||||
"source": "pipe",
|
||||
"__pipe_index": items.index(current_item),
|
||||
}
|
||||
|
||||
ctx.emit(result_obj)
|
||||
@@ -1040,6 +1099,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if mpv_started:
|
||||
# MPV was just started, wait a moment for it to be ready, then play first item
|
||||
import time
|
||||
|
||||
time.sleep(0.5)
|
||||
index_arg = "1" # 1-based index for first item
|
||||
play_mode = True
|
||||
@@ -1061,6 +1121,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if mpv_started:
|
||||
# MPV was just started; give it a moment, then play first item.
|
||||
import time
|
||||
|
||||
time.sleep(0.5)
|
||||
index_arg = "1"
|
||||
else:
|
||||
@@ -1156,7 +1217,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
# Queue items (replacing current playlist)
|
||||
if items:
|
||||
_queue_items(items, clear_first=True, config=config, start_opts=start_opts)
|
||||
_queue_items(
|
||||
items, clear_first=True, config=config, start_opts=start_opts
|
||||
)
|
||||
else:
|
||||
# Empty playlist, just clear
|
||||
_send_ipc_command({"command": ["playlist-clear"]}, silent=True)
|
||||
@@ -1180,22 +1243,22 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
table = ResultTable("Saved Playlists")
|
||||
for i, pl in enumerate(playlists):
|
||||
item_count = len(pl.get('items', []))
|
||||
item_count = len(pl.get("items", []))
|
||||
row = table.add_row()
|
||||
# row.add_column("ID", str(pl['id'])) # Hidden as per user request
|
||||
row.add_column("Name", pl['name'])
|
||||
row.add_column("Name", pl["name"])
|
||||
row.add_column("Items", str(item_count))
|
||||
row.add_column("Updated", pl['updated_at'])
|
||||
row.add_column("Updated", pl["updated_at"])
|
||||
|
||||
# Set the playlist items as the result object for this row
|
||||
# When user selects @N, they get the list of items
|
||||
# We also set the source command to .pipe -load <ID> so it loads it
|
||||
table.set_row_selection_args(i, ["-load", str(pl['id'])])
|
||||
table.set_row_selection_args(i, ["-load", str(pl["id"])])
|
||||
|
||||
table.set_source_command(".pipe")
|
||||
|
||||
# Register results
|
||||
ctx.set_last_result_table_overlay(table, [p['items'] for p in playlists])
|
||||
ctx.set_last_result_table_overlay(table, [p["items"] for p in playlists])
|
||||
ctx.set_current_stage_table(table)
|
||||
|
||||
# Do not print directly here.
|
||||
@@ -1243,7 +1306,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
playlist_before = _get_playlist(silent=True)
|
||||
idle_before = None
|
||||
try:
|
||||
idle_resp = _send_ipc_command({"command": ["get_property", "idle-active"], "request_id": 111}, silent=True)
|
||||
idle_resp = _send_ipc_command(
|
||||
{"command": ["get_property", "idle-active"], "request_id": 111}, silent=True
|
||||
)
|
||||
if idle_resp and idle_resp.get("error") == "success":
|
||||
idle_before = bool(idle_resp.get("data"))
|
||||
except Exception:
|
||||
@@ -1262,7 +1327,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Debug: inspect incoming result and attributes
|
||||
try:
|
||||
debug(f"pipe._run: received result type={type(result)} repr={repr(result)[:200]}")
|
||||
debug(f"pipe._run: attrs path={getattr(result, 'path', None)} url={getattr(result, 'url', None)} store={getattr(result, 'store', None)} hash={getattr(result, 'hash', None)}")
|
||||
debug(
|
||||
f"pipe._run: attrs path={getattr(result, 'path', None)} url={getattr(result, 'url', None)} store={getattr(result, 'store', None)} hash={getattr(result, 'hash', None)}"
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -1294,8 +1361,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
|
||||
if should_autoplay and after_len > 0:
|
||||
idx_to_play = min(max(0, before_len), after_len - 1)
|
||||
play_resp = _send_ipc_command({"command": ["playlist-play-index", idx_to_play], "request_id": 112}, silent=True)
|
||||
_send_ipc_command({"command": ["set_property", "pause", False], "request_id": 113}, silent=True)
|
||||
play_resp = _send_ipc_command(
|
||||
{"command": ["playlist-play-index", idx_to_play], "request_id": 112},
|
||||
silent=True,
|
||||
)
|
||||
_send_ipc_command(
|
||||
{"command": ["set_property", "pause", False], "request_id": 113},
|
||||
silent=True,
|
||||
)
|
||||
if play_resp and play_resp.get("error") == "success":
|
||||
debug("Auto-playing piped item")
|
||||
|
||||
@@ -1315,6 +1388,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if mpv_started:
|
||||
# MPV was just started, retry getting playlist after a brief delay
|
||||
import time
|
||||
|
||||
time.sleep(0.3)
|
||||
items = _get_playlist(silent=True)
|
||||
|
||||
@@ -1324,10 +1398,20 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
return 0
|
||||
else:
|
||||
# Do not auto-launch MPV when no action/inputs were provided; avoid surprise startups
|
||||
no_inputs = not any([
|
||||
result, url_arg, index_arg, clear_mode, play_mode,
|
||||
pause_mode, save_mode, load_mode, current_mode, list_mode
|
||||
])
|
||||
no_inputs = not any(
|
||||
[
|
||||
result,
|
||||
url_arg,
|
||||
index_arg,
|
||||
clear_mode,
|
||||
play_mode,
|
||||
pause_mode,
|
||||
save_mode,
|
||||
load_mode,
|
||||
current_mode,
|
||||
list_mode,
|
||||
]
|
||||
)
|
||||
|
||||
if no_inputs:
|
||||
# User invoked `.pipe` with no args: treat this as an intent to open MPV.
|
||||
@@ -1337,6 +1421,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Re-check playlist after startup; if IPC still isn't ready, just exit cleanly.
|
||||
try:
|
||||
import time
|
||||
|
||||
time.sleep(0.3)
|
||||
except Exception:
|
||||
pass
|
||||
@@ -1386,18 +1471,26 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
else:
|
||||
debug(f"Failed to remove item: {resp.get('error') if resp else 'No response'}")
|
||||
debug(
|
||||
f"Failed to remove item: {resp.get('error') if resp else 'No response'}"
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
# Play item
|
||||
if hydrus_header and _is_hydrus_path(filename, hydrus_url):
|
||||
header_cmd = {"command": ["set_property", "http-header-fields", hydrus_header], "request_id": 198}
|
||||
header_cmd = {
|
||||
"command": ["set_property", "http-header-fields", hydrus_header],
|
||||
"request_id": 198,
|
||||
}
|
||||
_send_ipc_command(header_cmd, silent=True)
|
||||
cmd = {"command": ["playlist-play-index", idx], "request_id": 102}
|
||||
resp = _send_ipc_command(cmd)
|
||||
if resp and resp.get("error") == "success":
|
||||
# Ensure playback starts (unpause)
|
||||
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
unpause_cmd = {
|
||||
"command": ["set_property", "pause", False],
|
||||
"request_id": 103,
|
||||
}
|
||||
_send_ipc_command(unpause_cmd)
|
||||
|
||||
debug(f"Playing: {title}")
|
||||
@@ -1410,7 +1503,9 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
list_mode = True
|
||||
index_arg = None
|
||||
else:
|
||||
debug(f"Failed to play item: {resp.get('error') if resp else 'No response'}")
|
||||
debug(
|
||||
f"Failed to play item: {resp.get('error') if resp else 'No response'}"
|
||||
)
|
||||
return 1
|
||||
except ValueError:
|
||||
debug(f"Invalid index: {index_arg}")
|
||||
@@ -1425,6 +1520,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
if file_storage is None:
|
||||
try:
|
||||
from Store import Store
|
||||
|
||||
file_storage = Store(config)
|
||||
except Exception as e:
|
||||
debug(f"Warning: Could not initialize Store registry: {e}", file=sys.stderr)
|
||||
@@ -1468,7 +1564,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
# Try to extract hash from filename (e.g., C:\path\1e8c46...a1b2.mp4)
|
||||
path_obj = Path(real_path)
|
||||
stem = path_obj.stem # filename without extension
|
||||
if len(stem) == 64 and all(c in '0123456789abcdef' for c in stem.lower()):
|
||||
if len(stem) == 64 and all(c in "0123456789abcdef" for c in stem.lower()):
|
||||
file_hash = stem.lower()
|
||||
# Find which folder store has this file
|
||||
if file_storage:
|
||||
@@ -1493,7 +1589,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
hash=file_hash or "unknown",
|
||||
store=store_name or "unknown",
|
||||
title=title,
|
||||
path=real_path
|
||||
path=real_path,
|
||||
)
|
||||
pipe_objects.append(pipe_obj)
|
||||
|
||||
@@ -1540,6 +1636,7 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
break
|
||||
try:
|
||||
import time
|
||||
|
||||
time.sleep(0.25)
|
||||
except Exception:
|
||||
break
|
||||
@@ -1550,8 +1647,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
print(ln)
|
||||
else:
|
||||
print("MPV log (tail): <empty>")
|
||||
print("Note: On some Windows builds, mpv cannot start writing to --log-file after launch.")
|
||||
print("If you need full [main2] logs, restart mpv so it starts with --log-file.")
|
||||
print(
|
||||
"Note: On some Windows builds, mpv cannot start writing to --log-file after launch."
|
||||
)
|
||||
print(
|
||||
"If you need full [main2] logs, restart mpv so it starts with --log-file."
|
||||
)
|
||||
|
||||
# Also print the helper log tail (this captures Python helper output that won't
|
||||
# necessarily show up in MPV's own log-file).
|
||||
@@ -1597,7 +1698,12 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_opts: Optional[Dict[str, Any]] = None) -> None:
|
||||
|
||||
def _start_mpv(
|
||||
items: List[Any],
|
||||
config: Optional[Dict[str, Any]] = None,
|
||||
start_opts: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Start MPV with a list of items."""
|
||||
import time as _time_module
|
||||
|
||||
@@ -1624,7 +1730,7 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_
|
||||
|
||||
try:
|
||||
extra_args: List[str] = [
|
||||
'--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]',
|
||||
"--ytdl-format=bestvideo[height<=?1080]+bestaudio/best[height<=?1080]",
|
||||
]
|
||||
|
||||
# Optional: borderless window (useful for uosc-like overlay UI without fullscreen).
|
||||
@@ -1645,7 +1751,7 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_
|
||||
detached=True,
|
||||
)
|
||||
debug("Started MPV process")
|
||||
|
||||
|
||||
# Wait for IPC pipe to be ready
|
||||
if not mpv.wait_for_ipc(retries=20, delay_seconds=0.2):
|
||||
debug("Timed out waiting for MPV IPC connection", file=sys.stderr)
|
||||
@@ -1659,15 +1765,16 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_
|
||||
# Queue items via IPC
|
||||
if items:
|
||||
_queue_items(items, config=config, start_opts=start_opts)
|
||||
|
||||
|
||||
# Auto-play the first item
|
||||
import time
|
||||
|
||||
time.sleep(0.3) # Give MPV a moment to process the queued items
|
||||
|
||||
|
||||
# Play the first item (index 0) and unpause
|
||||
play_cmd = {"command": ["playlist-play-index", 0], "request_id": 102}
|
||||
play_resp = _send_ipc_command(play_cmd, silent=True)
|
||||
|
||||
|
||||
if play_resp and play_resp.get("error") == "success":
|
||||
# Ensure playback starts (unpause)
|
||||
unpause_cmd = {"command": ["set_property", "pause", False], "request_id": 103}
|
||||
@@ -1675,7 +1782,7 @@ def _start_mpv(items: List[Any], config: Optional[Dict[str, Any]] = None, start_
|
||||
debug("Auto-playing first item")
|
||||
|
||||
# Overlay already started above; it will follow track changes automatically.
|
||||
|
||||
|
||||
except Exception as e:
|
||||
debug(f"Error starting MPV: {e}", file=sys.stderr)
|
||||
|
||||
@@ -1688,36 +1795,19 @@ CMDLET = Cmdlet(
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="index",
|
||||
type="string", # Changed to string to allow URL detection
|
||||
type="string", # Changed to string to allow URL detection
|
||||
description="Index of item to play/clear, or URL to queue",
|
||||
required=False
|
||||
),
|
||||
CmdletArg(
|
||||
name="url",
|
||||
type="string",
|
||||
description="URL to queue",
|
||||
required=False
|
||||
required=False,
|
||||
),
|
||||
CmdletArg(name="url", type="string", description="URL to queue", required=False),
|
||||
CmdletArg(
|
||||
name="clear",
|
||||
type="flag",
|
||||
description="Remove the selected item, or clear entire playlist if no index provided"
|
||||
),
|
||||
CmdletArg(
|
||||
name="list",
|
||||
type="flag",
|
||||
description="List items (default)"
|
||||
),
|
||||
CmdletArg(
|
||||
name="play",
|
||||
type="flag",
|
||||
description="Resume playback"
|
||||
),
|
||||
CmdletArg(
|
||||
name="pause",
|
||||
type="flag",
|
||||
description="Pause playback"
|
||||
description="Remove the selected item, or clear entire playlist if no index provided",
|
||||
),
|
||||
CmdletArg(name="list", type="flag", description="List items (default)"),
|
||||
CmdletArg(name="play", type="flag", description="Resume playback"),
|
||||
CmdletArg(name="pause", type="flag", description="Pause playback"),
|
||||
CmdletArg(
|
||||
name="save",
|
||||
type="flag",
|
||||
@@ -1733,19 +1823,18 @@ CMDLET = Cmdlet(
|
||||
CmdletArg(
|
||||
name="current",
|
||||
type="flag",
|
||||
description="Emit the currently playing item to pipeline for further processing"
|
||||
description="Emit the currently playing item to pipeline for further processing",
|
||||
),
|
||||
CmdletArg(
|
||||
name="log",
|
||||
type="flag",
|
||||
description="Enable pipeable debug output and write an mpv log file"
|
||||
description="Enable pipeable debug output and write an mpv log file",
|
||||
),
|
||||
CmdletArg(
|
||||
name="borderless",
|
||||
type="flag",
|
||||
description="Start mpv with no window border (uosc-like overlay feel without fullscreen)"
|
||||
description="Start mpv with no window border (uosc-like overlay feel without fullscreen)",
|
||||
),
|
||||
],
|
||||
exec=_run
|
||||
exec=_run,
|
||||
)
|
||||
|
||||
|
||||
@@ -14,307 +14,329 @@ _TELEGRAM_PENDING_ITEMS_KEY = "telegram_pending_items"
|
||||
|
||||
|
||||
def _has_flag(args: Sequence[str], flag: str) -> bool:
|
||||
try:
|
||||
want = str(flag or "").strip().lower()
|
||||
if not want:
|
||||
return False
|
||||
return any(str(a).strip().lower() == want for a in (args or []))
|
||||
except Exception:
|
||||
return False
|
||||
try:
|
||||
want = str(flag or "").strip().lower()
|
||||
if not want:
|
||||
return False
|
||||
return any(str(a).strip().lower() == want for a in (args or []))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _normalize_to_list(value: Any) -> List[Any]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def _extract_chat_id(chat_obj: Any) -> Optional[int]:
|
||||
try:
|
||||
if isinstance(chat_obj, dict):
|
||||
maybe_id = chat_obj.get("id")
|
||||
if maybe_id is not None:
|
||||
return int(maybe_id)
|
||||
extra = chat_obj.get("extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
v = extra.get("chat_id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
# PipeObject stores unknown fields in .extra
|
||||
if hasattr(chat_obj, "extra"):
|
||||
extra = getattr(chat_obj, "extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
v = extra.get("chat_id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
if hasattr(chat_obj, "id"):
|
||||
maybe_id = getattr(chat_obj, "id")
|
||||
if maybe_id is not None:
|
||||
return int(maybe_id)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
try:
|
||||
if isinstance(chat_obj, dict):
|
||||
maybe_id = chat_obj.get("id")
|
||||
if maybe_id is not None:
|
||||
return int(maybe_id)
|
||||
extra = chat_obj.get("extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
v = extra.get("chat_id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
# PipeObject stores unknown fields in .extra
|
||||
if hasattr(chat_obj, "extra"):
|
||||
extra = getattr(chat_obj, "extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
v = extra.get("chat_id")
|
||||
if v is not None:
|
||||
return int(v)
|
||||
if hasattr(chat_obj, "id"):
|
||||
maybe_id = getattr(chat_obj, "id")
|
||||
if maybe_id is not None:
|
||||
return int(maybe_id)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _extract_chat_username(chat_obj: Any) -> str:
|
||||
try:
|
||||
if isinstance(chat_obj, dict):
|
||||
u = chat_obj.get("username")
|
||||
return str(u or "").strip()
|
||||
if hasattr(chat_obj, "extra"):
|
||||
extra = getattr(chat_obj, "extra")
|
||||
if isinstance(extra, dict):
|
||||
u = extra.get("username")
|
||||
if isinstance(u, str) and u.strip():
|
||||
return u.strip()
|
||||
if hasattr(chat_obj, "username"):
|
||||
return str(getattr(chat_obj, "username") or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return ""
|
||||
try:
|
||||
if isinstance(chat_obj, dict):
|
||||
u = chat_obj.get("username")
|
||||
return str(u or "").strip()
|
||||
if hasattr(chat_obj, "extra"):
|
||||
extra = getattr(chat_obj, "extra")
|
||||
if isinstance(extra, dict):
|
||||
u = extra.get("username")
|
||||
if isinstance(u, str) and u.strip():
|
||||
return u.strip()
|
||||
if hasattr(chat_obj, "username"):
|
||||
return str(getattr(chat_obj, "username") or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
|
||||
def _extract_title(item: Any) -> str:
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
return str(item.get("title") or "").strip()
|
||||
if hasattr(item, "title"):
|
||||
return str(getattr(item, "title") or "").strip()
|
||||
# PipeObject stores some fields in .extra
|
||||
if hasattr(item, "extra"):
|
||||
extra = getattr(item, "extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("title")
|
||||
if isinstance(v, str) and v.strip():
|
||||
return v.strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return ""
|
||||
try:
|
||||
if isinstance(item, dict):
|
||||
return str(item.get("title") or "").strip()
|
||||
if hasattr(item, "title"):
|
||||
return str(getattr(item, "title") or "").strip()
|
||||
# PipeObject stores some fields in .extra
|
||||
if hasattr(item, "extra"):
|
||||
extra = getattr(item, "extra")
|
||||
if isinstance(extra, dict):
|
||||
v = extra.get("title")
|
||||
if isinstance(v, str) and v.strip():
|
||||
return v.strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
|
||||
def _extract_file_path(item: Any) -> Optional[str]:
|
||||
def _maybe(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
if not text:
|
||||
return None
|
||||
if text.startswith("http://") or text.startswith("https://"):
|
||||
return None
|
||||
try:
|
||||
p = Path(text).expanduser()
|
||||
if p.exists():
|
||||
return str(p)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
def _maybe(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
if not text:
|
||||
return None
|
||||
if text.startswith("http://") or text.startswith("https://"):
|
||||
return None
|
||||
try:
|
||||
p = Path(text).expanduser()
|
||||
if p.exists():
|
||||
return str(p)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
try:
|
||||
if hasattr(item, "path"):
|
||||
found = _maybe(getattr(item, "path"))
|
||||
if found:
|
||||
return found
|
||||
if hasattr(item, "file_path"):
|
||||
found = _maybe(getattr(item, "file_path"))
|
||||
if found:
|
||||
return found
|
||||
if isinstance(item, dict):
|
||||
for key in ("path", "file_path", "target"):
|
||||
found = _maybe(item.get(key))
|
||||
if found:
|
||||
return found
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
try:
|
||||
if hasattr(item, "path"):
|
||||
found = _maybe(getattr(item, "path"))
|
||||
if found:
|
||||
return found
|
||||
if hasattr(item, "file_path"):
|
||||
found = _maybe(getattr(item, "file_path"))
|
||||
if found:
|
||||
return found
|
||||
if isinstance(item, dict):
|
||||
for key in ("path", "file_path", "target"):
|
||||
found = _maybe(item.get(key))
|
||||
if found:
|
||||
return found
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _run(_result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
|
||||
from Provider.telegram import Telegram
|
||||
from Provider.telegram import Telegram
|
||||
|
||||
try:
|
||||
provider = Telegram(config)
|
||||
except Exception as exc:
|
||||
log(f"Telegram not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
try:
|
||||
provider = Telegram(config)
|
||||
except Exception as exc:
|
||||
log(f"Telegram not available: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if _has_flag(args, "-login"):
|
||||
ok = False
|
||||
try:
|
||||
ok = provider.ensure_session(prompt=True)
|
||||
except Exception:
|
||||
ok = False
|
||||
if not ok:
|
||||
err = getattr(provider, "_last_login_error", None)
|
||||
if isinstance(err, str) and err.strip():
|
||||
log(f"Telegram login failed: {err}", file=sys.stderr)
|
||||
else:
|
||||
log("Telegram login failed (no session created).", file=sys.stderr)
|
||||
return 1
|
||||
log("Telegram login OK (authorized session ready).", file=sys.stderr)
|
||||
return 0
|
||||
if _has_flag(args, "-login"):
|
||||
ok = False
|
||||
try:
|
||||
ok = provider.ensure_session(prompt=True)
|
||||
except Exception:
|
||||
ok = False
|
||||
if not ok:
|
||||
err = getattr(provider, "_last_login_error", None)
|
||||
if isinstance(err, str) and err.strip():
|
||||
log(f"Telegram login failed: {err}", file=sys.stderr)
|
||||
else:
|
||||
log("Telegram login failed (no session created).", file=sys.stderr)
|
||||
return 1
|
||||
log("Telegram login OK (authorized session ready).", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
# Internal stage: send previously selected pipeline items to selected chats.
|
||||
if _has_flag(args, "-send"):
|
||||
# Ensure we don't keep showing the picker table on the send stage.
|
||||
try:
|
||||
if hasattr(ctx, "set_last_result_table_overlay"):
|
||||
ctx.set_last_result_table_overlay(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if hasattr(ctx, "set_current_stage_table"):
|
||||
ctx.set_current_stage_table(None)
|
||||
except Exception:
|
||||
pass
|
||||
# Internal stage: send previously selected pipeline items to selected chats.
|
||||
if _has_flag(args, "-send"):
|
||||
# Ensure we don't keep showing the picker table on the send stage.
|
||||
try:
|
||||
if hasattr(ctx, "set_last_result_table_overlay"):
|
||||
ctx.set_last_result_table_overlay(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if hasattr(ctx, "set_current_stage_table"):
|
||||
ctx.set_current_stage_table(None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
selected_chats = _normalize_to_list(_result)
|
||||
chat_ids: List[int] = []
|
||||
chat_usernames: List[str] = []
|
||||
for c in selected_chats:
|
||||
cid = _extract_chat_id(c)
|
||||
if cid is not None:
|
||||
chat_ids.append(cid)
|
||||
else:
|
||||
u = _extract_chat_username(c)
|
||||
if u:
|
||||
chat_usernames.append(u)
|
||||
selected_chats = _normalize_to_list(_result)
|
||||
chat_ids: List[int] = []
|
||||
chat_usernames: List[str] = []
|
||||
for c in selected_chats:
|
||||
cid = _extract_chat_id(c)
|
||||
if cid is not None:
|
||||
chat_ids.append(cid)
|
||||
else:
|
||||
u = _extract_chat_username(c)
|
||||
if u:
|
||||
chat_usernames.append(u)
|
||||
|
||||
# De-dupe chat identifiers (preserve order).
|
||||
try:
|
||||
chat_ids = list(dict.fromkeys([int(x) for x in chat_ids]))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
chat_usernames = list(dict.fromkeys([str(u).strip() for u in chat_usernames if str(u).strip()]))
|
||||
except Exception:
|
||||
pass
|
||||
# De-dupe chat identifiers (preserve order).
|
||||
try:
|
||||
chat_ids = list(dict.fromkeys([int(x) for x in chat_ids]))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
chat_usernames = list(
|
||||
dict.fromkeys([str(u).strip() for u in chat_usernames if str(u).strip()])
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not chat_ids and not chat_usernames:
|
||||
log("No Telegram chat selected (use @N on the Telegram table)", file=sys.stderr)
|
||||
return 1
|
||||
if not chat_ids and not chat_usernames:
|
||||
log("No Telegram chat selected (use @N on the Telegram table)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
pending_items = ctx.load_value(_TELEGRAM_PENDING_ITEMS_KEY, default=[])
|
||||
items = _normalize_to_list(pending_items)
|
||||
if not items:
|
||||
log("No pending items to send (use: @N | .telegram)", file=sys.stderr)
|
||||
return 1
|
||||
pending_items = ctx.load_value(_TELEGRAM_PENDING_ITEMS_KEY, default=[])
|
||||
items = _normalize_to_list(pending_items)
|
||||
if not items:
|
||||
log("No pending items to send (use: @N | .telegram)", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
file_jobs: List[Dict[str, str]] = []
|
||||
any_failed = False
|
||||
for item in items:
|
||||
p = _extract_file_path(item)
|
||||
if not p:
|
||||
any_failed = True
|
||||
log("Telegram send requires local file path(s) on the piped item(s)", file=sys.stderr)
|
||||
continue
|
||||
title = _extract_title(item)
|
||||
file_jobs.append({"path": p, "title": title})
|
||||
file_jobs: List[Dict[str, str]] = []
|
||||
any_failed = False
|
||||
for item in items:
|
||||
p = _extract_file_path(item)
|
||||
if not p:
|
||||
any_failed = True
|
||||
log(
|
||||
"Telegram send requires local file path(s) on the piped item(s)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
title = _extract_title(item)
|
||||
file_jobs.append({"path": p, "title": title})
|
||||
|
||||
# De-dupe file paths (preserve order).
|
||||
try:
|
||||
seen: set[str] = set()
|
||||
unique_jobs: List[Dict[str, str]] = []
|
||||
for j in file_jobs:
|
||||
k = str(j.get("path") or "").strip().lower()
|
||||
if not k or k in seen:
|
||||
continue
|
||||
seen.add(k)
|
||||
unique_jobs.append(j)
|
||||
file_jobs = unique_jobs
|
||||
except Exception:
|
||||
pass
|
||||
# De-dupe file paths (preserve order).
|
||||
try:
|
||||
seen: set[str] = set()
|
||||
unique_jobs: List[Dict[str, str]] = []
|
||||
for j in file_jobs:
|
||||
k = str(j.get("path") or "").strip().lower()
|
||||
if not k or k in seen:
|
||||
continue
|
||||
seen.add(k)
|
||||
unique_jobs.append(j)
|
||||
file_jobs = unique_jobs
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not file_jobs:
|
||||
return 1
|
||||
if not file_jobs:
|
||||
return 1
|
||||
|
||||
try:
|
||||
provider.send_files_to_chats(chat_ids=chat_ids, usernames=chat_usernames, files=file_jobs)
|
||||
except Exception as exc:
|
||||
log(f"Telegram send failed: {exc}", file=sys.stderr)
|
||||
any_failed = True
|
||||
try:
|
||||
provider.send_files_to_chats(
|
||||
chat_ids=chat_ids, usernames=chat_usernames, files=file_jobs
|
||||
)
|
||||
except Exception as exc:
|
||||
log(f"Telegram send failed: {exc}", file=sys.stderr)
|
||||
any_failed = True
|
||||
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, [])
|
||||
return 1 if any_failed else 0
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, [])
|
||||
return 1 if any_failed else 0
|
||||
|
||||
selected_items = _normalize_to_list(_result)
|
||||
if selected_items:
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, selected_items)
|
||||
else:
|
||||
# Avoid stale sends if the user just wants to browse chats.
|
||||
try:
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, [])
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if hasattr(ctx, "clear_pending_pipeline_tail"):
|
||||
ctx.clear_pending_pipeline_tail()
|
||||
except Exception:
|
||||
pass
|
||||
selected_items = _normalize_to_list(_result)
|
||||
if selected_items:
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, selected_items)
|
||||
else:
|
||||
# Avoid stale sends if the user just wants to browse chats.
|
||||
try:
|
||||
ctx.store_value(_TELEGRAM_PENDING_ITEMS_KEY, [])
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if hasattr(ctx, "clear_pending_pipeline_tail"):
|
||||
ctx.clear_pending_pipeline_tail()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Default: list available chats/channels (requires an existing session or bot_token).
|
||||
try:
|
||||
rows = provider.list_chats(limit=200)
|
||||
except Exception as exc:
|
||||
log(f"Failed to list Telegram chats: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
# Default: list available chats/channels (requires an existing session or bot_token).
|
||||
try:
|
||||
rows = provider.list_chats(limit=200)
|
||||
except Exception as exc:
|
||||
log(f"Failed to list Telegram chats: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Only show dialogs you can typically post to.
|
||||
try:
|
||||
rows = [r for r in (rows or []) if str(r.get("type") or "").strip().lower() in {"group", "user"}]
|
||||
except Exception:
|
||||
pass
|
||||
# Only show dialogs you can typically post to.
|
||||
try:
|
||||
rows = [
|
||||
r for r in (rows or []) if str(r.get("type") or "").strip().lower() in {"group", "user"}
|
||||
]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not rows:
|
||||
log("No Telegram groups/users available (or not logged in). Run: .telegram -login", file=sys.stderr)
|
||||
return 0
|
||||
if not rows:
|
||||
log(
|
||||
"No Telegram groups/users available (or not logged in). Run: .telegram -login",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
table = ResultTable("Telegram Chats")
|
||||
table.set_table("telegram")
|
||||
table.set_source_command(".telegram", [])
|
||||
table = ResultTable("Telegram Chats")
|
||||
table.set_table("telegram")
|
||||
table.set_source_command(".telegram", [])
|
||||
|
||||
chat_items: List[Dict[str, Any]] = []
|
||||
for item in rows:
|
||||
row = table.add_row()
|
||||
title = str(item.get("title") or "").strip()
|
||||
username = str(item.get("username") or "").strip()
|
||||
chat_id = item.get("id")
|
||||
kind = str(item.get("type") or "").strip()
|
||||
row.add_column("Type", kind)
|
||||
row.add_column("Title", title)
|
||||
row.add_column("Username", username)
|
||||
row.add_column("Id", str(chat_id) if chat_id is not None else "")
|
||||
chat_items.append(
|
||||
{
|
||||
**item,
|
||||
"store": "telegram",
|
||||
"title": title or username or str(chat_id) or "Telegram",
|
||||
}
|
||||
)
|
||||
chat_items: List[Dict[str, Any]] = []
|
||||
for item in rows:
|
||||
row = table.add_row()
|
||||
title = str(item.get("title") or "").strip()
|
||||
username = str(item.get("username") or "").strip()
|
||||
chat_id = item.get("id")
|
||||
kind = str(item.get("type") or "").strip()
|
||||
row.add_column("Type", kind)
|
||||
row.add_column("Title", title)
|
||||
row.add_column("Username", username)
|
||||
row.add_column("Id", str(chat_id) if chat_id is not None else "")
|
||||
chat_items.append(
|
||||
{
|
||||
**item,
|
||||
"store": "telegram",
|
||||
"title": title or username or str(chat_id) or "Telegram",
|
||||
}
|
||||
)
|
||||
|
||||
# Overlay table: ensures @N selection targets this Telegram picker, not a previous table.
|
||||
ctx.set_last_result_table_overlay(table, chat_items)
|
||||
ctx.set_current_stage_table(table)
|
||||
if selected_items:
|
||||
ctx.set_pending_pipeline_tail([[".telegram", "-send"]], ".telegram")
|
||||
return 0
|
||||
# Overlay table: ensures @N selection targets this Telegram picker, not a previous table.
|
||||
ctx.set_last_result_table_overlay(table, chat_items)
|
||||
ctx.set_current_stage_table(table)
|
||||
if selected_items:
|
||||
ctx.set_pending_pipeline_tail([[".telegram", "-send"]], ".telegram")
|
||||
return 0
|
||||
|
||||
|
||||
CMDLET = Cmdlet(
|
||||
name=".telegram",
|
||||
alias=["telegram"],
|
||||
summary="Telegram login and chat listing",
|
||||
usage="@N | .telegram (pick a chat, then send piped files)",
|
||||
arg=[
|
||||
CmdletArg(name="login", type="bool", description="Create/refresh a Telegram session (prompts)", required=False),
|
||||
CmdletArg(name="send", type="bool", description="(internal) Send to selected chat(s)", required=False),
|
||||
],
|
||||
exec=_run,
|
||||
name=".telegram",
|
||||
alias=["telegram"],
|
||||
summary="Telegram login and chat listing",
|
||||
usage="@N | .telegram (pick a chat, then send piped files)",
|
||||
arg=[
|
||||
CmdletArg(
|
||||
name="login",
|
||||
type="bool",
|
||||
description="Create/refresh a Telegram session (prompts)",
|
||||
required=False,
|
||||
),
|
||||
CmdletArg(
|
||||
name="send",
|
||||
type="bool",
|
||||
description="(internal) Send to selected chat(s)",
|
||||
required=False,
|
||||
),
|
||||
],
|
||||
exec=_run,
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Worker cmdlet: Display workers table in ResultTable format."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
@@ -21,11 +22,26 @@ CMDLET = Cmdlet(
|
||||
summary="Display workers table in result table format.",
|
||||
usage=".worker [status] [-limit N] [@N]",
|
||||
arg=[
|
||||
CmdletArg("status", description="Filter by status: running, completed, error (default: all)", requires_db=True),
|
||||
CmdletArg("limit", type="integer", description="Limit results (default: 100)", requires_db=True),
|
||||
CmdletArg("@N", description="Select worker by index (1-based) and display full logs", requires_db=True),
|
||||
CmdletArg(
|
||||
"status",
|
||||
description="Filter by status: running, completed, error (default: all)",
|
||||
requires_db=True,
|
||||
),
|
||||
CmdletArg(
|
||||
"limit", type="integer", description="Limit results (default: 100)", requires_db=True
|
||||
),
|
||||
CmdletArg(
|
||||
"@N",
|
||||
description="Select worker by index (1-based) and display full logs",
|
||||
requires_db=True,
|
||||
),
|
||||
CmdletArg("-id", description="Show full logs for a specific worker", requires_db=True),
|
||||
CmdletArg("-clear", type="flag", description="Remove completed workers from the database", requires_db=True),
|
||||
CmdletArg(
|
||||
"-clear",
|
||||
type="flag",
|
||||
description="Remove completed workers from the database",
|
||||
requires_db=True,
|
||||
),
|
||||
],
|
||||
detail=[
|
||||
"- Shows all background worker tasks and their output",
|
||||
@@ -188,7 +204,11 @@ def _render_worker_selection(db, selected_items: Any) -> int:
|
||||
continue
|
||||
events: List[Dict[str, Any]] = []
|
||||
try:
|
||||
events = db.get_worker_events(worker.get("worker_id")) if hasattr(db, "get_worker_events") else []
|
||||
events = (
|
||||
db.get_worker_events(worker.get("worker_id"))
|
||||
if hasattr(db, "get_worker_events")
|
||||
else []
|
||||
)
|
||||
except Exception:
|
||||
events = []
|
||||
_emit_worker_detail(worker, events)
|
||||
|
||||
Reference in New Issue
Block a user