This commit is contained in:
2026-01-11 04:54:27 -08:00
parent bf8ef6d128
commit 5f8f49c530
6 changed files with 239 additions and 69 deletions

2
CLI.py
View File

@@ -1935,7 +1935,7 @@ class CmdletExecutor:
if ret_code != 0:
stage_status = "failed"
stage_error = f"exit code {ret_code}"
print(f"[exit code: {ret_code}]\n")
# No print here - we want to keep output clean and avoid redundant "exit code" notices.
except Exception as exc:
stage_status = "failed"
stage_error = f"{type(exc).__name__}: {exc}"

View File

@@ -47,13 +47,23 @@ async def _suppress_aioslsk_asyncio_task_noise() -> Any:
try:
exc = context.get("exception")
msg = str(context.get("message") or "")
# Only suppress un-retrieved task exceptions from aioslsk connection failures.
if msg == "Task exception was never retrieved" and exc is not None:
cls = getattr(exc, "__class__", None)
name = getattr(cls, "__name__", "")
mod = getattr(cls, "__module__", "")
if name == "ConnectionFailedError" and str(mod).startswith("aioslsk"):
return
if exc is not None:
# Suppress internal asyncio AssertionError on Windows teardown (Proactor loop)
if isinstance(exc, AssertionError):
m_lower = msg.lower()
if "proactor" in m_lower or "_start_serving" in m_lower or "self._sockets is not None" in str(exc):
return
# Only suppress un-retrieved task exceptions from aioslsk connection failures.
if msg == "Task exception was never retrieved":
cls = getattr(exc, "__class__", None)
name = getattr(cls, "__name__", "")
mod = getattr(cls, "__module__", "")
# Suppress ConnectionFailedError from aioslsk
if name == "ConnectionFailedError" and str(mod).startswith("aioslsk"):
return
except Exception:
# If our filter logic fails, fall through to default handling.
pass
@@ -206,10 +216,34 @@ def _suppress_aioslsk_noise() -> Any:
class Soulseek(Provider):
TABLE_AUTO_STAGES = {
"soulseek": ["download-file"],
"soulseek": ["download-file", "-provider", "soulseek"],
}
"""Search provider for Soulseek P2P network."""
def selector(
self,
selected_items: List[Any],
*,
ctx: Any,
stage_is_last: bool = True,
**_kwargs: Any,
) -> bool:
"""Handle Soulseek selection.
Currently defaults to download-file via TABLE_AUTO_STAGES, but this
hook allows for future 'Browse User' or 'Browse Folder' drill-down.
"""
if not stage_is_last:
return False
# If we wanted to handle drill-down (like HIFI.py) we would:
# 1. Fetch more data (e.g. user shares)
# 2. Create a new ResultTable
# 3. ctx.set_current_stage_table(new_table)
# 4. return True
return False
@classmethod
def config(cls) -> List[Dict[str, Any]]:
return [
@@ -277,19 +311,34 @@ class Soulseek(Provider):
username = full_metadata.get("username")
filename = full_metadata.get("filename") or result.path
if not username or not filename:
# If we were invoked via generic download-file on a SearchResult
# that has minimal data (e.g. from table selection), try to rescue it.
if isinstance(result, SearchResult) and result.full_metadata:
username = result.full_metadata.get("username")
filename = result.full_metadata.get("filename")
if not username or not filename:
log(
f"[soulseek] Missing metadata for download: {result.title}",
file=sys.stderr
)
return None
# Use tempfile directory as default if '.' or generic placeholder was passed
# by a caller that didn't know better.
target_dir = Path(output_dir)
if str(target_dir) == "." or str(target_dir) == "downloads":
import tempfile
target_dir = Path(tempfile.gettempdir()) / "Medios" / "Soulseek"
target_dir.mkdir(parents=True, exist_ok=True)
# This cmdlet stack is synchronous; use asyncio.run for clarity.
return asyncio.run(
download_soulseek_file(
username=username,
filename=filename,
output_dir=output_dir,
output_dir=target_dir,
timeout=self.MAX_WAIT_TRANSFER,
)
)
@@ -328,7 +377,7 @@ class Soulseek(Provider):
from aioslsk.client import SoulSeekClient
from aioslsk.settings import CredentialsSettings, Settings
os.makedirs(self.DOWNLOAD_DIR, exist_ok=True)
# Removed legacy os.makedirs(self.DOWNLOAD_DIR) - specific commands handle output dirs.
settings = Settings(
credentials=CredentialsSettings(
@@ -376,8 +425,9 @@ class Soulseek(Provider):
await client.stop()
except Exception:
pass
# Give Proactor/Windows loop a moment to drain internal buffers after stop.
try:
await asyncio.sleep(0)
await asyncio.sleep(0.2)
except Exception:
pass
@@ -386,33 +436,19 @@ class Soulseek(Provider):
for result in getattr(search_request, "results", []):
username = getattr(result, "username", "?")
def _add(file_data: Any) -> None:
flat.append({
"file": file_data,
"username": username,
"filename": getattr(file_data, "filename", "?"),
"size": getattr(file_data, "filesize", 0)
})
for file_data in getattr(result, "shared_items", []):
flat.append(
{
"file": file_data,
"username": username,
"filename": getattr(file_data,
"filename",
"?"),
"size": getattr(file_data,
"filesize",
0),
}
)
_add(file_data)
for file_data in getattr(result, "locked_results", []):
flat.append(
{
"file": file_data,
"username": username,
"filename": getattr(file_data,
"filename",
"?"),
"size": getattr(file_data,
"filesize",
0),
}
)
_add(file_data)
return flat
@@ -440,6 +476,11 @@ class Soulseek(Provider):
) -> List[SearchResult]:
filters = filters or {}
# Ensure temp download dir structure exists, but don't create legacy ./downloads here.
import tempfile
base_tmp = Path(tempfile.gettempdir()) / "Medios" / "Soulseek"
base_tmp.mkdir(parents=True, exist_ok=True)
try:
flat_results = asyncio.run(
self.perform_search(query,
@@ -561,6 +602,7 @@ class Soulseek(Provider):
media_kind="audio",
size_bytes=item["size"],
columns=columns,
selection_action=["download-file", "-provider", "soulseek"],
full_metadata={
"username": item["username"],
"filename": item["filename"],
@@ -568,6 +610,7 @@ class Soulseek(Provider):
"album": item["album"],
"track_num": item["track_num"],
"ext": item["ext"],
"provider": "soulseek"
},
)
)
@@ -785,8 +828,9 @@ async def download_soulseek_file(
except Exception:
pass
# Let cancellation/cleanup callbacks run while our exception handler is still installed.
# Increased to 0.2s for Windows Proactor loop stability.
try:
await asyncio.sleep(0)
await asyncio.sleep(0.2)
except Exception:
pass

View File

@@ -24,6 +24,8 @@ class SearchResult:
size_bytes: Optional[int] = None
tag: set[str] = field(default_factory=set) # Searchable tag values
columns: List[Tuple[str, str]] = field(default_factory=list) # Display columns
selection_action: Optional[Dict[str, Any]] = None
selection_args: Optional[List[str]] = None
full_metadata: Dict[str, Any] = field(default_factory=dict) # Extra metadata
def to_dict(self) -> Dict[str, Any]:

View File

@@ -15,7 +15,7 @@ from types import ModuleType
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Type
from urllib.parse import urlparse
from SYS.logger import log
from SYS.logger import log, debug
from ProviderCore.base import FileProvider, Provider, SearchProvider, SearchResult
from Provider.soulseek import download_soulseek_file
@@ -265,17 +265,17 @@ def _provider_url_patterns(provider_class: Type[Provider]) -> Sequence[str]:
def get_provider(name: str, config: Optional[Dict[str, Any]] = None) -> Optional[Provider]:
info = REGISTRY.get(name)
if info is None:
log(f"[provider] Unknown provider: {name}", file=sys.stderr)
debug(f"[provider] Unknown provider: {name}")
return None
try:
provider = info.provider_class(config)
if not provider.validate():
log(f"[provider] Provider '{name}' is not available", file=sys.stderr)
debug(f"[provider] Provider '{name}' is not available")
return None
return provider
except Exception as exc:
log(f"[provider] Error initializing '{name}': {exc}", file=sys.stderr)
debug(f"[provider] Error initializing '{name}': {exc}")
return None
@@ -296,7 +296,7 @@ def get_search_provider(name: str,
if provider is None:
return None
if not _supports_search(provider):
log(f"[provider] Provider '{name}' does not support search", file=sys.stderr)
debug(f"[provider] Provider '{name}' does not support search")
return None
return provider # type: ignore[return-value]
@@ -320,7 +320,7 @@ def get_file_provider(name: str,
if provider is None:
return None
if not _supports_upload(provider):
log(f"[provider] Provider '{name}' does not support upload", file=sys.stderr)
debug(f"[provider] Provider '{name}' does not support upload")
return None
return provider # type: ignore[return-value]

View File

@@ -77,23 +77,108 @@ def capture_rich_output(*, stdout: TextIO, stderr: TextIO) -> Iterator[None]:
def show_provider_config_panel(
provider_name: str,
keys: Sequence[str] | None = None,
*,
config_hint: str = "config.conf"
provider_names: str | List[str],
) -> None:
"""Show a Rich panel explaining how to configure a provider."""
pass
"""Show a Rich panel explaining how to configure providers."""
from rich.table import Table
from rich.text import Text
from rich.console import Group
if isinstance(provider_names, str):
providers = [p.strip() for p in provider_names.split(",")]
else:
providers = provider_names
table = Table.grid(padding=(0, 1))
table.add_column(style="bold red")
for provider in providers:
table.add_row(f"{provider}")
group = Group(
Text("The following providers are not configured and cannot be used:\n"),
table,
Text.from_markup("\nTo configure them, run the command with [bold cyan].config[/bold cyan] or use the [bold green]TUI[/bold green] config menu.")
)
panel = Panel(
group,
title="[bold red]Configuration Required[/bold red]",
border_style="red",
padding=(1, 2)
)
stdout_console().print()
stdout_console().print(panel)
def show_store_config_panel(
store_type: str,
keys: Sequence[str] | None = None,
*,
config_hint: str = "config.conf"
store_names: str | List[str],
) -> None:
"""Show a Rich panel explaining how to configure a storage backend."""
pass
"""Show a Rich panel explaining how to configure storage backends."""
from rich.table import Table
from rich.text import Text
from rich.console import Group
if isinstance(store_names, str):
stores = [s.strip() for s in store_names.split(",")]
else:
stores = store_names
table = Table.grid(padding=(0, 1))
table.add_column(style="bold yellow")
for store in stores:
table.add_row(f"{store}")
group = Group(
Text("The following stores are not configured or available:\n"),
table,
Text.from_markup("\nInitialize them using [bold cyan].config[/bold cyan] or ensure they are properly set up.")
)
panel = Panel(
group,
title="[bold yellow]Store Not Configured[/bold yellow]",
border_style="yellow",
padding=(1, 2)
)
stdout_console().print()
stdout_console().print(panel)
def show_available_providers_panel(provider_names: List[str]) -> None:
"""Show a Rich panel listing available/configured providers."""
from rich.columns import Columns
from rich.console import Group
from rich.text import Text
if not provider_names:
return
# Use Columns to display them efficiently in the panel
cols = Columns(
[f"[bold green] \u2713 [/bold green]{p}" for p in sorted(provider_names)],
equal=True,
column_first=True,
expand=True
)
group = Group(
Text("The following providers are configured and ready to use:\n"),
cols
)
panel = Panel(
group,
title="[bold green]Configured Providers[/bold green]",
border_style="green",
padding=(1, 2)
)
stdout_console().print()
stdout_console().print(panel)
IMAGE_EXTENSIONS = {".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".tiff"}

View File

@@ -13,6 +13,11 @@ import sys
from SYS.logger import log, debug
from ProviderCore.registry import get_search_provider, list_search_providers
from SYS.config import get_local_storage_path
from SYS.rich_display import (
show_provider_config_panel,
show_store_config_panel,
show_available_providers_panel,
)
from . import _shared as sh
@@ -165,29 +170,54 @@ class search_file(Cmdlet):
"""Execute external provider search."""
if not provider_name or not query:
from SYS import pipeline as ctx_mod
progress = None
if hasattr(ctx_mod, "get_pipeline_state"):
progress = ctx_mod.get_pipeline_state().live_progress
if progress:
try:
progress.stop()
except Exception:
pass
log("Error: search-file -provider requires both provider and query", file=sys.stderr)
log(f"Usage: {self.usage}", file=sys.stderr)
log("Available providers:", file=sys.stderr)
providers = list_search_providers(config)
for name, available in sorted(providers.items()):
status = "\u2713" if available else "\u2717"
log(f" {status} {name}", file=sys.stderr)
providers_map = list_search_providers(config)
available = [n for n, a in providers_map.items() if a]
unconfigured = [n for n, a in providers_map.items() if not a]
if unconfigured:
show_provider_config_panel(unconfigured)
if available:
show_available_providers_panel(available)
return 1
# Align with provider default when user did not set -limit.
if not limit_set:
limit = 50
debug(f"[search-file] provider={provider_name}, query={query}, limit={limit}, open_id={open_id}")
from SYS import pipeline as ctx_mod
progress = None
if hasattr(ctx_mod, "get_pipeline_state"):
progress = ctx_mod.get_pipeline_state().live_progress
provider = get_search_provider(provider_name, config)
if not provider:
log(f"Error: Provider '{provider_name}' is not available", file=sys.stderr)
log("Available providers:", file=sys.stderr)
providers = list_search_providers(config)
for name, available in sorted(providers.items()):
if available:
log(f" - {name}", file=sys.stderr)
if progress:
try:
progress.stop()
except Exception:
pass
show_provider_config_panel([provider_name])
providers_map = list_search_providers(config)
available = [n for n, a in providers_map.items() if a]
if available:
show_available_providers_panel(available)
return 1
worker_id = str(uuid.uuid4())
@@ -595,7 +625,16 @@ class search_file(Cmdlet):
continue
if not library_root:
log("No library root configured. Use the .config command to set up storage.", file=sys.stderr)
from SYS import pipeline as ctx_mod
progress = None
if hasattr(ctx_mod, "get_pipeline_state"):
progress = ctx_mod.get_pipeline_state().live_progress
if progress:
try:
progress.stop()
except Exception:
pass
show_store_config_panel(["Folder Store"])
return 1
# Use context manager to ensure database is always closed