dfdsf
This commit is contained in:
@@ -7,26 +7,22 @@ import shutil
|
||||
|
||||
import models
|
||||
import pipeline as ctx
|
||||
from helper import hydrus as hydrus_wrapper
|
||||
from helper.logger import log, debug
|
||||
from helper.store import FileStorage
|
||||
from API import HydrusNetwork as hydrus_wrapper
|
||||
from SYS.logger import log, debug
|
||||
from Store import Store
|
||||
from ._shared import (
|
||||
Cmdlet, CmdletArg, parse_cmdlet_args, SharedArgs,
|
||||
extract_tags_from_result, extract_title_from_result, extract_url_from_result,
|
||||
merge_sequences, extract_relationships, extract_duration, get_origin, coerce_to_pipe_object
|
||||
merge_sequences, extract_relationships, extract_duration, coerce_to_pipe_object
|
||||
)
|
||||
from ._shared import collapse_namespace_tags
|
||||
from helper.folder_store import read_sidecar, find_sidecar, write_sidecar, FolderDB
|
||||
from helper.utils import sha256_file, unique_path
|
||||
from API.folder import read_sidecar, find_sidecar, write_sidecar, API_folder_store
|
||||
from SYS.utils import sha256_file, unique_path
|
||||
from metadata import write_metadata
|
||||
|
||||
# Use official Hydrus supported filetypes from hydrus_wrapper
|
||||
SUPPORTED_MEDIA_EXTENSIONS = hydrus_wrapper.ALL_SUPPORTED_EXTENSIONS
|
||||
|
||||
# Initialize file storage system
|
||||
storage = FileStorage()
|
||||
|
||||
|
||||
class Add_File(Cmdlet):
|
||||
"""Add file into the DB"""
|
||||
|
||||
@@ -96,8 +92,11 @@ class Add_File(Cmdlet):
|
||||
media_path_or_url, file_hash = self._resolve_source(result, path_arg, pipe_obj, config)
|
||||
debug(f"[add-file] RESOLVED source: path={media_path_or_url}, hash={file_hash[:12] if file_hash else 'N/A'}...")
|
||||
if not media_path_or_url:
|
||||
debug(f"[add-file] ERROR: Could not resolve source file/URL")
|
||||
return 1
|
||||
debug(f"[add-file] ERROR: Could not resolve source file/URL")
|
||||
return 1
|
||||
|
||||
# Update pipe_obj with resolved path
|
||||
pipe_obj.path = str(media_path_or_url) if isinstance(media_path_or_url, (str, Path)) else str(media_path_or_url)
|
||||
|
||||
# Check if it's a URL before validating as file
|
||||
if isinstance(media_path_or_url, str) and media_path_or_url.lower().startswith(("http://", "https://", "magnet:", "torrent:")):
|
||||
@@ -116,15 +115,15 @@ class Add_File(Cmdlet):
|
||||
debug(f"[add-file] DECISION POINT: provider={provider_name}, location={location}")
|
||||
debug(f" media_path={media_path}, exists={media_path.exists()}")
|
||||
|
||||
# Execute transfer based on destination (using class-based FileStorage system)
|
||||
# Execute transfer based on destination (using Store registry)
|
||||
if provider_name:
|
||||
debug(f"[add-file] ROUTE: file provider upload")
|
||||
return self._handle_provider_upload(media_path, provider_name, pipe_obj, config, delete_after)
|
||||
elif location:
|
||||
# Check if location is a registered backend name using FileStorage
|
||||
# Check if location is a registered backend name
|
||||
try:
|
||||
storage = FileStorage(config)
|
||||
backends = storage.list_backends()
|
||||
store = Store(config)
|
||||
backends = store.list_backends()
|
||||
|
||||
if location in backends:
|
||||
debug(f"[add-file] ROUTE: storage backend '{location}'")
|
||||
@@ -165,15 +164,19 @@ class Add_File(Cmdlet):
|
||||
debug(f"[add-file] Using hash+store from result: hash={result_hash[:12]}..., store={result_store}")
|
||||
# Use get_file to retrieve from the specific store
|
||||
try:
|
||||
from helper.store import FileStorage
|
||||
storage = FileStorage(config)
|
||||
if result_store in storage.list_backends():
|
||||
backend = storage[result_store]
|
||||
store = Store(config)
|
||||
if result_store in store.list_backends():
|
||||
backend = store[result_store]
|
||||
media_path = backend.get_file(result_hash)
|
||||
if media_path and media_path.exists():
|
||||
if isinstance(media_path, Path) and media_path.exists():
|
||||
pipe_obj.path = str(media_path)
|
||||
debug(f"[add-file] Retrieved file from {result_store}: {media_path}")
|
||||
return media_path, result_hash
|
||||
|
||||
if isinstance(media_path, str) and media_path.lower().startswith(("http://", "https://")):
|
||||
pipe_obj.path = media_path
|
||||
debug(f"[add-file] Retrieved URL from {result_store}: {media_path}")
|
||||
return media_path, result_hash
|
||||
except Exception as exc:
|
||||
debug(f"[add-file] Failed to retrieve via hash+store: {exc}")
|
||||
|
||||
@@ -385,20 +388,6 @@ class Add_File(Cmdlet):
|
||||
url = list(extract_url_from_result(result) or [])
|
||||
return url
|
||||
|
||||
@staticmethod
|
||||
def _get_origin(result: Any, pipe_obj: models.PipeObject) -> Optional[str]:
|
||||
try:
|
||||
if isinstance(pipe_obj.extra, dict):
|
||||
origin = get_origin(pipe_obj.extra)
|
||||
if origin:
|
||||
return origin
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(result, dict):
|
||||
return get_origin(result)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_relationships(result: Any, pipe_obj: models.PipeObject) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
@@ -427,16 +416,16 @@ class Add_File(Cmdlet):
|
||||
def _update_pipe_object_destination(
|
||||
pipe_obj: models.PipeObject,
|
||||
*,
|
||||
hash: str,
|
||||
hash_value: str,
|
||||
store: str,
|
||||
file_path: str,
|
||||
path: Optional[str],
|
||||
tags: List[str],
|
||||
title: Optional[str],
|
||||
extra_updates: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
pipe_obj.hash = hash
|
||||
pipe_obj.hash = hash_value
|
||||
pipe_obj.store = store
|
||||
pipe_obj.path = file_path
|
||||
pipe_obj.path = path
|
||||
pipe_obj.tags = tags
|
||||
if title:
|
||||
pipe_obj.title = title
|
||||
@@ -485,9 +474,9 @@ class Add_File(Cmdlet):
|
||||
if preferred_title:
|
||||
preferred_title = preferred_title.replace("_", " ").strip()
|
||||
|
||||
result_origin = Add_File._get_origin(result, pipe_obj)
|
||||
store = getattr(pipe_obj, "store", None)
|
||||
_, sidecar_hash, sidecar_tags, sidecar_url = Add_File._load_sidecar_bundle(
|
||||
media_path, result_origin, config
|
||||
media_path, store, config
|
||||
)
|
||||
|
||||
def normalize_title_tag(tag: str) -> str:
|
||||
@@ -589,7 +578,6 @@ class Add_File(Cmdlet):
|
||||
|
||||
# Update PipeObject and emit
|
||||
extra_updates = {
|
||||
"storage_source": "local",
|
||||
"url": url,
|
||||
"export_path": str(destination_root),
|
||||
}
|
||||
@@ -600,9 +588,9 @@ class Add_File(Cmdlet):
|
||||
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=f_hash or "unknown",
|
||||
hash_value=f_hash or "unknown",
|
||||
store="local",
|
||||
file_path=str(target_path),
|
||||
path=str(target_path),
|
||||
tags=tags,
|
||||
title=chosen_title,
|
||||
extra_updates=extra_updates,
|
||||
@@ -615,6 +603,78 @@ class Add_File(Cmdlet):
|
||||
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _download_soulseek_file(
|
||||
result: Any,
|
||||
config: Dict[str, Any]
|
||||
) -> Optional[Path]:
|
||||
"""
|
||||
Download a file from Soulseek peer.
|
||||
|
||||
Extracts username and filename from soulseek result metadata and initiates download.
|
||||
"""
|
||||
try:
|
||||
import asyncio
|
||||
from Provider.registry import download_soulseek_file
|
||||
from pathlib import Path
|
||||
|
||||
# Extract metadata from result
|
||||
full_metadata = {}
|
||||
if isinstance(result, dict):
|
||||
full_metadata = result.get("full_metadata", {})
|
||||
elif hasattr(result, "extra") and isinstance(result.extra, dict) and "full_metadata" in result.extra:
|
||||
full_metadata = result.extra.get("full_metadata", {})
|
||||
elif hasattr(result, "full_metadata"):
|
||||
# Direct attribute access (fallback)
|
||||
val = getattr(result, "full_metadata", {})
|
||||
if isinstance(val, dict):
|
||||
full_metadata = val
|
||||
|
||||
username = full_metadata.get("username")
|
||||
filename = full_metadata.get("filename")
|
||||
|
||||
if not username or not filename:
|
||||
debug(f"[add-file] ERROR: Could not extract soulseek metadata from result (type={type(result).__name__})")
|
||||
if hasattr(result, "extra"):
|
||||
debug(f"[add-file] Result extra keys: {list(result.extra.keys())}")
|
||||
return None
|
||||
|
||||
if not username or not filename:
|
||||
debug(f"[add-file] ERROR: Missing soulseek metadata (username={username}, filename={filename})")
|
||||
return None
|
||||
|
||||
debug(f"[add-file] Starting soulseek download: {username} -> {filename}")
|
||||
|
||||
# Determine output directory (prefer downloads folder in config)
|
||||
output_dir = Path(config.get("output_dir", "./downloads")) if isinstance(config.get("output_dir"), str) else Path("./downloads")
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Run async download in event loop
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_closed():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
downloaded_path = loop.run_until_complete(
|
||||
download_soulseek_file(
|
||||
username=username,
|
||||
filename=filename,
|
||||
output_dir=output_dir,
|
||||
timeout=1200 # 20 minutes
|
||||
)
|
||||
)
|
||||
|
||||
return downloaded_path
|
||||
|
||||
except Exception as e:
|
||||
log(f"[add-file] Soulseek download error: {type(e).__name__}: {e}", file=sys.stderr)
|
||||
debug(f"[add-file] Soulseek download traceback: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _handle_provider_upload(
|
||||
media_path: Path,
|
||||
@@ -624,7 +684,7 @@ class Add_File(Cmdlet):
|
||||
delete_after: bool,
|
||||
) -> int:
|
||||
"""Handle uploading to a file provider (e.g. 0x0)."""
|
||||
from helper.provider import get_file_provider
|
||||
from Provider.registry import get_file_provider
|
||||
|
||||
log(f"Uploading via {provider_name}: {media_path.name}", file=sys.stderr)
|
||||
|
||||
@@ -666,9 +726,9 @@ class Add_File(Cmdlet):
|
||||
file_path = pipe_obj.path or (str(media_path) if media_path else None) or ""
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=f_hash or "unknown",
|
||||
hash_value=f_hash or "unknown",
|
||||
store=provider_name or "provider",
|
||||
file_path=file_path,
|
||||
path=file_path,
|
||||
tags=pipe_obj.tags,
|
||||
title=pipe_obj.title or (media_path.name if media_path else None),
|
||||
extra_updates=extra_updates,
|
||||
@@ -687,14 +747,11 @@ class Add_File(Cmdlet):
|
||||
delete_after: bool,
|
||||
) -> int:
|
||||
"""Handle uploading to a registered storage backend (e.g., 'test' folder store, 'hydrus', etc.)."""
|
||||
from config import load_config
|
||||
|
||||
log(f"Adding file to storage backend '{backend_name}': {media_path.name}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
cfg = load_config()
|
||||
storage = FileStorage(cfg)
|
||||
backend = storage[backend_name]
|
||||
store = Store(config)
|
||||
backend = store[backend_name]
|
||||
|
||||
# Prepare metadata from pipe_obj and sidecars
|
||||
tags, url, title, f_hash = Add_File._prepare_metadata(None, media_path, pipe_obj, config)
|
||||
@@ -708,24 +765,26 @@ class Add_File(Cmdlet):
|
||||
url=url
|
||||
)
|
||||
log(f"✓ File added to '{backend_name}': {file_identifier}", file=sys.stderr)
|
||||
|
||||
# Update pipe object with result
|
||||
# For backends that return paths, file_path = identifier
|
||||
# For backends that return hashes, file_path = "backend:hash"
|
||||
file_path_str = str(file_identifier)
|
||||
if len(file_identifier) == 64 and all(c in '0123456789abcdef' for c in file_identifier.lower()):
|
||||
# It's a hash - use backend:hash format
|
||||
file_path_str = f"{backend_name}:{file_identifier}"
|
||||
|
||||
stored_path: Optional[str] = None
|
||||
try:
|
||||
maybe_path = backend.get_file(file_identifier)
|
||||
if isinstance(maybe_path, Path):
|
||||
stored_path = str(maybe_path)
|
||||
elif isinstance(maybe_path, str) and maybe_path:
|
||||
# Some backends may return a browser URL
|
||||
stored_path = maybe_path
|
||||
except Exception:
|
||||
stored_path = None
|
||||
|
||||
Add_File._update_pipe_object_destination(
|
||||
pipe_obj,
|
||||
hash=file_identifier if len(file_identifier) == 64 else f_hash or "unknown",
|
||||
hash_value=file_identifier if len(file_identifier) == 64 else f_hash or "unknown",
|
||||
store=backend_name,
|
||||
file_path=file_path_str,
|
||||
path=stored_path,
|
||||
tags=tags,
|
||||
title=title or pipe_obj.title or media_path.name,
|
||||
extra_updates={
|
||||
"storage_source": backend_name,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
@@ -745,16 +804,16 @@ class Add_File(Cmdlet):
|
||||
@staticmethod
|
||||
def _load_sidecar_bundle(
|
||||
media_path: Path,
|
||||
origin: Optional[str],
|
||||
store: Optional[str],
|
||||
config: Dict[str, Any],
|
||||
) -> Tuple[Optional[Path], Optional[str], List[str], List[str]]:
|
||||
"""Load sidecar metadata."""
|
||||
if origin and origin.lower() == "local":
|
||||
if store and store.lower() == "local":
|
||||
try:
|
||||
from config import get_local_storage_path
|
||||
db_root = get_local_storage_path(config)
|
||||
if db_root:
|
||||
with FolderDB(Path(db_root)) as db:
|
||||
with API_folder_store(Path(db_root)) as db:
|
||||
file_hash = db.get_file_hash(media_path)
|
||||
if file_hash:
|
||||
tags = db.get_tags(file_hash) or []
|
||||
@@ -837,7 +896,7 @@ class Add_File(Cmdlet):
|
||||
except OSError:
|
||||
payload['size'] = None
|
||||
|
||||
with FolderDB(library_root) as db:
|
||||
with API_folder_store(library_root) as db:
|
||||
try:
|
||||
db.save_file_info(dest_path, payload, tags)
|
||||
except Exception as exc:
|
||||
|
||||
Reference in New Issue
Block a user