Files
Medios-Macina/scripts/remote_storage_server.py

768 lines
26 KiB
Python
Raw Normal View History

2025-12-03 15:18:57 -08:00
"""Remote Storage Server - REST API for file management on mobile devices.
This server runs on a mobile device (Android with Termux, iOS with iSH, etc.)
and exposes the local library database as a REST API. Your PC connects to this
server and uses it as a remote storage backend through the RemoteStorageBackend.
## INSTALLATION
### On Android (Termux):
1. Install Termux from Play Store: https://play.google.com/store/apps/details?id=com.termux
2. In Termux:
$ apt update && apt install python
$ pip install flask flask-cors
3. Copy this file to your device
4. Run it (with optional API key):
2026-01-14 01:33:25 -08:00
$ python remote_storage_server.py --storage-path /path/to/storage --port 999
2025-12-03 15:18:57 -08:00
$ python remote_storage_server.py --storage-path /path/to/storage --api-key mysecretkey
5. Server prints connection info automatically (IP, port, API key)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
### On PC:
1. Install requests: pip install requests
2025-12-13 00:18:30 -08:00
2. Add to config.conf:
[store=remote]
name="phone"
2026-01-14 01:33:25 -08:00
url="http://192.168.1.100:999"
2025-12-13 00:18:30 -08:00
api_key="mysecretkey"
timeout=30
2025-12-03 15:18:57 -08:00
Note: API key is optional. Works on WiFi or cellular data.
## USAGE
2025-12-12 21:55:38 -08:00
After setup, all cmdlet work with the phone:
2025-12-03 15:18:57 -08:00
$ search-file zohar -store phone
$ @1-3 | add-relationship -king @4 -store phone
$ @1 | get-relationship -store phone
The server exposes REST endpoints that RemoteStorageBackend uses internally.
"""
from __future__ import annotations
import os
import sys
import argparse
import logging
2026-01-14 04:27:54 -08:00
import threading
import time
2025-12-03 15:18:57 -08:00
from pathlib import Path
from typing import Optional, Dict, Any
from datetime import datetime
from functools import wraps
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
# ============================================================================
# CONFIGURATION
# ============================================================================
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] %(levelname)s: %(message)s"
)
2025-12-03 15:18:57 -08:00
logger = logging.getLogger(__name__)
STORAGE_PATH: Optional[Path] = None
API_KEY: Optional[str] = None # API key for authentication (None = no auth required)
2026-01-14 15:56:04 -08:00
# Cache for database connection to prevent "database is locked" on high frequency requests
_DB_CACHE: Dict[str, Any] = {}
def get_db(path: Path):
from API.folder import LocalLibrarySearchOptimizer
p_str = str(path)
if p_str not in _DB_CACHE:
_DB_CACHE[p_str] = LocalLibrarySearchOptimizer(path)
_DB_CACHE[p_str].__enter__()
return _DB_CACHE[p_str]
2025-12-03 15:18:57 -08:00
# Try importing Flask - will be used in main() only
try:
from flask import Flask, request, jsonify
from flask_cors import CORS
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
HAS_FLASK = True
except ImportError:
HAS_FLASK = False
# ============================================================================
# UTILITY FUNCTIONS
# ============================================================================
2025-12-29 17:05:03 -08:00
2026-01-14 04:27:54 -08:00
def monitor_parent(parent_pid: int):
"""Monitor the parent process and shut down if it dies."""
if parent_pid <= 1:
return
logger.info(f"Monitoring parent process {parent_pid}")
# On Windows, we might need a different approach if os.kill(pid, 0) is unreliable
is_windows = sys.platform == "win32"
while True:
try:
if is_windows:
# OpenProcess with PROCESS_QUERY_LIMITED_INFORMATION (0x1000)
# This is safer than os.kill on Windows for existence checks
import ctypes
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
handle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, False, parent_pid)
if handle:
exit_code = ctypes.c_ulong()
ctypes.windll.kernel32.GetExitCodeProcess(handle, ctypes.byref(exit_code))
ctypes.windll.kernel32.CloseHandle(handle)
# STILL_ACTIVE is 259
if exit_code.value != 259:
logger.info(f"Parent process {parent_pid} finished with code {exit_code.value}. Shutting down...")
os._exit(0)
else:
# On Windows, sometimes we lose access to the handle if the parent is transitioning
# or if it was started from a shell that already closed.
# We'll ignore handle failures for now unless we want to be very strict.
pass
else:
os.kill(parent_pid, 0)
except Exception as e:
# Parent is dead or inaccessible
logger.info(f"Parent process {parent_pid} no longer accessible: {e}. Shutting down server...")
os._exit(0)
time.sleep(5) # Increase check interval to be less aggressive
2025-12-03 15:18:57 -08:00
def get_local_ip() -> Optional[str]:
"""Get the local IP address that would be used for external connections."""
import socket
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
# Create a socket to determine which interface would be used
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80)) # Google DNS
ip = s.getsockname()[0]
s.close()
return ip
except Exception:
return None
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ============================================================================
# FLASK APP FACTORY
# ============================================================================
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def create_app():
"""Create and configure Flask app with all routes."""
if not HAS_FLASK:
raise ImportError(
"Flask not installed. Install with: pip install flask flask-cors"
)
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
from flask import Flask, request, jsonify, send_file
2025-12-03 15:18:57 -08:00
from flask_cors import CORS
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
app = Flask(__name__)
CORS(app)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ========================================================================
# HELPER DECORATORS
# ========================================================================
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def require_auth():
"""Decorator to check API key authentication if configured."""
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def decorator(f):
2025-12-03 15:18:57 -08:00
@wraps(f)
def decorated_function(*args, **kwargs):
if API_KEY:
# Get API key from header or query parameter
provided_key = request.headers.get("X-API-Key"
) or request.args.get("api_key")
2025-12-03 15:18:57 -08:00
if not provided_key or provided_key != API_KEY:
return jsonify({"error": "Unauthorized. Invalid or missing API key."}), 401
return f(*args, **kwargs)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return decorated_function
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return decorator
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def require_storage():
"""Decorator to ensure storage path is configured."""
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def decorator(f):
2025-12-03 15:18:57 -08:00
@wraps(f)
def decorated_function(*args, **kwargs):
if not STORAGE_PATH:
return jsonify({"error": "Storage path not configured"}), 500
return f(*args, **kwargs)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return decorated_function
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return decorator
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ========================================================================
# HEALTH CHECK
# ========================================================================
2025-12-29 17:05:03 -08:00
@app.route("/health", methods=["GET"])
2025-12-03 15:18:57 -08:00
def health():
"""Check server health and storage availability."""
2026-01-14 15:56:04 -08:00
# Check auth manually to allow discovery even if locked
authed = True
if API_KEY:
provided_key = request.headers.get("X-API-Key") or request.args.get("api_key")
if not provided_key or provided_key != API_KEY:
authed = False
2025-12-03 15:18:57 -08:00
status = {
"status": "ok",
2026-01-14 14:54:18 -08:00
"service": "remote_storage",
"name": os.environ.get("MM_SERVER_NAME", "Remote Storage"),
2025-12-03 15:18:57 -08:00
"storage_configured": STORAGE_PATH is not None,
2025-12-29 17:05:03 -08:00
"timestamp": datetime.now().isoformat(),
2026-01-14 15:56:04 -08:00
"locked": not authed and API_KEY is not None
2025-12-03 15:18:57 -08:00
}
2025-12-29 17:05:03 -08:00
2026-01-14 15:56:04 -08:00
# If not authed but API_KEY is required, return minimal info for discovery
if not authed and API_KEY:
return jsonify(status), 200
2025-12-03 15:18:57 -08:00
if STORAGE_PATH:
status["storage_path"] = str(STORAGE_PATH)
status["storage_exists"] = STORAGE_PATH.exists()
try:
2026-01-14 15:56:04 -08:00
search_db = get_db(STORAGE_PATH)
status["database_accessible"] = True
2025-12-03 15:18:57 -08:00
except Exception as e:
status["database_accessible"] = False
status["database_error"] = str(e)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return jsonify(status), 200
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ========================================================================
# FILE OPERATIONS
# ========================================================================
2025-12-29 17:05:03 -08:00
@app.route("/files/search", methods=["GET"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def search_files():
"""Search for files by name or tag."""
2025-12-29 17:05:03 -08:00
query = request.args.get("q", "")
limit = request.args.get("limit", 100, type=int)
2026-01-14 14:54:18 -08:00
# Allow empty query or '*' for "list everything"
db_query = query if query and query != "*" else ""
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2026-01-14 15:56:04 -08:00
search_db = get_db(STORAGE_PATH)
results = search_db.search_by_name(db_query, limit)
tag_results = search_db.search_by_tag(db_query, limit)
all_results_dict = {
r["hash"]: r
for r in (results + tag_results)
}
# Fetch tags for each result to support title extraction on client
if search_db.db:
for res in all_results_dict.values():
file_hash = res.get("hash")
if file_hash:
tags = search_db.db.get_tags(file_hash)
res["tag"] = tags
return (
jsonify(
{
"query": query,
"count": len(all_results_dict),
"files": list(all_results_dict.values()),
}
),
200,
)
2025-12-03 15:18:57 -08:00
except Exception as e:
logger.error(f"Search error: {e}", exc_info=True)
return jsonify({"error": f"Search failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/files/<file_hash>", methods=["GET"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def get_file_metadata(file_hash: str):
"""Get metadata for a specific file by hash."""
try:
2026-01-14 15:56:04 -08:00
search_db = get_db(STORAGE_PATH)
db = search_db.db
if not db:
return jsonify({"error": "Database unavailable"}), 500
file_path = db.search_hash(file_hash)
if not file_path or not file_path.exists():
return jsonify({"error": "File not found"}), 404
metadata = db.get_metadata(file_hash)
tags = db.get_tags(file_hash) # Use hash string
return (
jsonify(
{
"hash": file_hash,
"path": str(file_path),
"size": file_path.stat().st_size,
"metadata": metadata,
"tag": tags,
}
),
200,
)
2025-12-03 15:18:57 -08:00
except Exception as e:
logger.error(f"Get metadata error: {e}", exc_info=True)
return jsonify({"error": f"Failed to get metadata: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
@app.route("/files/raw/<file_hash>", methods=["GET"])
@require_auth()
@require_storage()
def download_file(file_hash: str):
"""Download a raw file by hash."""
try:
search_db = get_db(STORAGE_PATH)
db = search_db.db
if not db:
return jsonify({"error": "Database unavailable"}), 500
file_path = db.search_hash(file_hash)
if not file_path or not file_path.exists():
return jsonify({"error": "File not found"}), 404
return send_file(file_path)
except Exception as e:
logger.error(f"Download error: {e}", exc_info=True)
return jsonify({"error": f"Download failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/files/index", methods=["POST"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def index_file():
"""Index a new file in the storage."""
2025-12-11 19:04:02 -08:00
from SYS.utils import sha256_file
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
data = request.get_json() or {}
2025-12-29 17:05:03 -08:00
file_path_str = data.get("path")
tags = data.get("tag", [])
url = data.get("url", [])
2025-12-03 15:18:57 -08:00
if not file_path_str:
return jsonify({"error": "File path required"}), 400
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
file_path = Path(file_path_str)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
if not file_path.exists():
return jsonify({"error": "File does not exist"}), 404
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
search_db = get_db(STORAGE_PATH)
db = search_db.db
if not db:
return jsonify({"error": "Database unavailable"}), 500
db.get_or_create_file_entry(file_path)
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
if tags:
db.add_tags(file_path, tags)
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
if url:
db.add_url(file_path, url)
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
file_hash = sha256_file(file_path)
2025-12-29 17:05:03 -08:00
2026-01-14 18:15:00 -08:00
return (
jsonify(
{
"hash": file_hash,
"path": str(file_path),
"tags_added": len(tags),
"url_added": len(url),
}
),
201,
)
2025-12-03 15:18:57 -08:00
except Exception as e:
logger.error(f"Index error: {e}", exc_info=True)
return jsonify({"error": f"Indexing failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
2026-01-13 20:04:24 -08:00
@app.route("/files/upload", methods=["POST"])
@require_auth()
@require_storage()
def upload_file():
"""Upload a file into storage (multipart/form-data).
Accepts form fields:
- file: uploaded file (required)
- tag: repeated tag parameters or comma-separated string
- url: repeated url parameters or comma-separated string
"""
from API.folder import API_folder_store
from SYS.utils import sha256_file, sanitize_filename, ensure_directory, unique_path
if 'file' not in request.files:
return jsonify({"error": "file required"}), 400
file_storage = request.files.get('file')
if file_storage is None:
return jsonify({"error": "file required"}), 400
filename = sanitize_filename(file_storage.filename or "upload")
incoming_dir = STORAGE_PATH / "incoming"
target_path = incoming_dir / filename
target_path = unique_path(target_path)
try:
2026-01-19 03:14:30 -08:00
# Initialize the DB first (run safety checks) before creating any files.
2026-01-13 20:04:24 -08:00
with API_folder_store(STORAGE_PATH) as db:
2026-01-19 03:14:30 -08:00
# Ensure the incoming directory exists only after DB safety checks pass.
ensure_directory(incoming_dir)
# Save uploaded file to storage
file_storage.save(str(target_path))
# Extract optional metadata
tags = []
if 'tag' in request.form:
# Support repeated form fields or comma-separated list
tags = request.form.getlist('tag') or []
if not tags and request.form.get('tag'):
tags = [t.strip() for t in str(request.form.get('tag') or "").split(",") if t.strip()]
urls = []
if 'url' in request.form:
urls = request.form.getlist('url') or []
if not urls and request.form.get('url'):
urls = [u.strip() for u in str(request.form.get('url') or "").split(",") if u.strip()]
2026-01-13 20:04:24 -08:00
db.get_or_create_file_entry(target_path)
if tags:
db.add_tags(target_path, tags)
if urls:
db.add_url(target_path, urls)
file_hash = sha256_file(target_path)
return (
jsonify({
"hash": file_hash,
"path": str(target_path),
"tags_added": len(tags),
"url_added": len(urls),
}),
201,
)
except Exception as e:
logger.error(f"Upload error: {e}", exc_info=True)
return jsonify({"error": f"Upload failed: {str(e)}"}), 500
2025-12-03 15:18:57 -08:00
# ========================================================================
# TAG OPERATIONS
# ========================================================================
2025-12-29 17:05:03 -08:00
@app.route("/tags/<file_hash>", methods=["GET"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def get_tags(file_hash: str):
"""Get tags for a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
tags = db.get_tags(file_path)
2025-12-11 23:21:45 -08:00
return jsonify({"hash": file_hash, "tag": tags}), 200
2025-12-03 15:18:57 -08:00
except Exception as e:
logger.error(f"Get tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/tags/<file_hash>", methods=["POST"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def add_tags(file_hash: str):
"""Add tags to a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
data = request.get_json() or {}
2025-12-29 17:05:03 -08:00
tags = data.get("tag", [])
mode = data.get("mode", "add")
2025-12-03 15:18:57 -08:00
if not tags:
2025-12-11 23:21:45 -08:00
return jsonify({"error": "Tag required"}), 400
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
if mode == "replace":
2025-12-03 15:18:57 -08:00
db.remove_tags(file_path, db.get_tags(file_path))
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
db.add_tags(file_path, tags)
2025-12-11 23:21:45 -08:00
return jsonify({"hash": file_hash, "tag_added": len(tags), "mode": mode}), 200
2025-12-03 15:18:57 -08:00
except Exception as e:
logger.error(f"Add tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/tags/<file_hash>", methods=["DELETE"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def remove_tags(file_hash: str):
"""Remove tags from a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
tags_str = request.args.get("tag", "")
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
if tags_str:
2025-12-29 17:05:03 -08:00
tags_to_remove = [t.strip() for t in tags_str.split(",")]
2025-12-03 15:18:57 -08:00
else:
tags_to_remove = db.get_tags(file_path)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
db.remove_tags(file_path, tags_to_remove)
return jsonify({"hash": file_hash, "tags_removed": len(tags_to_remove)}), 200
except Exception as e:
logger.error(f"Remove tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ========================================================================
# RELATIONSHIP OPERATIONS
# ========================================================================
2025-12-29 17:05:03 -08:00
@app.route("/relationships/<file_hash>", methods=["GET"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def get_relationships(file_hash: str):
"""Get relationships for a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
metadata = db.get_metadata(file_path)
relationships = metadata.get("relationships",
{}) if metadata else {}
2025-12-03 15:18:57 -08:00
return jsonify({"hash": file_hash, "relationships": relationships}), 200
except Exception as e:
logger.error(f"Get relationships error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/relationships", methods=["POST"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
def set_relationship():
"""Set a relationship between two files."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
data = request.get_json() or {}
2025-12-29 17:05:03 -08:00
from_hash = data.get("from_hash")
to_hash = data.get("to_hash")
rel_type = data.get("type", "alt")
2025-12-03 15:18:57 -08:00
if not from_hash or not to_hash:
return jsonify({"error": "from_hash and to_hash required"}), 400
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
from_path = db.search_hash(from_hash)
to_path = db.search_hash(to_hash)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
if not from_path or not to_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
db.set_relationship(from_path, to_path, rel_type)
return jsonify({"from_hash": from_hash, "to_hash": to_hash, "type": rel_type}), 200
except Exception as e:
logger.error(f"Set relationship error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ========================================================================
# URL OPERATIONS
# ========================================================================
2025-12-29 17:05:03 -08:00
@app.route("/url/<file_hash>", methods=["GET"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
2025-12-11 12:47:30 -08:00
def get_url(file_hash: str):
"""Get known url for a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
metadata = db.get_metadata(file_path)
2025-12-29 17:05:03 -08:00
url = metadata.get("url", []) if metadata else []
2025-12-11 12:47:30 -08:00
return jsonify({"hash": file_hash, "url": url}), 200
2025-12-03 15:18:57 -08:00
except Exception as e:
2025-12-11 12:47:30 -08:00
logger.error(f"Get url error: {e}", exc_info=True)
2025-12-03 15:18:57 -08:00
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
@app.route("/url/<file_hash>", methods=["POST"])
2025-12-03 15:18:57 -08:00
@require_auth()
@require_storage()
2025-12-11 12:47:30 -08:00
def add_url(file_hash: str):
"""Add url to a file."""
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
data = request.get_json() or {}
2025-12-29 17:05:03 -08:00
url = data.get("url", [])
2025-12-11 12:47:30 -08:00
if not url:
return jsonify({"error": "url required"}), 400
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-11 12:47:30 -08:00
file_path = db.search_hash(file_hash)
2025-12-03 15:18:57 -08:00
if not file_path:
return jsonify({"error": "File not found"}), 404
2025-12-29 17:05:03 -08:00
2025-12-11 12:47:30 -08:00
db.add_url(file_path, url)
return jsonify({"hash": file_hash, "url_added": len(url)}), 200
2025-12-03 15:18:57 -08:00
except Exception as e:
2025-12-11 12:47:30 -08:00
logger.error(f"Add url error: {e}", exc_info=True)
2025-12-03 15:18:57 -08:00
return jsonify({"error": f"Failed: {str(e)}"}), 500
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
return app
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# ============================================================================
# MAIN
# ============================================================================
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
def main():
if not HAS_FLASK:
print("ERROR: Flask and flask-cors required")
print("Install with: pip install flask flask-cors")
sys.exit(1)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
parser = argparse.ArgumentParser(
2025-12-29 17:05:03 -08:00
description="Remote Storage Server for Medios-Macina",
epilog=
2026-01-14 01:33:25 -08:00
"Example: python remote_storage_server.py --storage-path /storage/media --port 999 --api-key mysecretkey",
)
parser.add_argument(
"--storage-path",
type=str,
required=True,
help="Path to storage directory"
)
parser.add_argument(
"--host",
type=str,
default="0.0.0.0",
help="Server host (default: 0.0.0.0)"
2025-12-29 17:05:03 -08:00
)
parser.add_argument(
"--port",
type=int,
2026-01-14 01:33:25 -08:00
default=999,
help="Server port (default: 999)"
2025-12-03 15:18:57 -08:00
)
2025-12-29 17:05:03 -08:00
parser.add_argument(
"--api-key",
type=str,
default=None,
help="API key for authentication (optional)"
2025-12-29 17:05:03 -08:00
)
parser.add_argument("--debug", action="store_true", help="Enable debug mode")
2026-01-14 04:27:54 -08:00
parser.add_argument(
"--monitor",
action="store_true",
help="Shut down if parent process dies"
)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
args = parser.parse_args()
2025-12-29 17:05:03 -08:00
2026-01-14 04:27:54 -08:00
# Start monitor thread if requested
if args.monitor:
ppid = os.getppid()
if ppid > 1:
monitor_thread = threading.Thread(
target=monitor_parent,
args=(ppid, ),
daemon=True
)
monitor_thread.start()
2025-12-03 15:18:57 -08:00
global STORAGE_PATH, API_KEY
STORAGE_PATH = Path(args.storage_path).resolve()
API_KEY = args.api_key
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
if not STORAGE_PATH.exists():
print(f"ERROR: Storage path does not exist: {STORAGE_PATH}")
sys.exit(1)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
# Get local IP address
local_ip = get_local_ip()
if not local_ip:
local_ip = "127.0.0.1"
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
print(f"\n{'='*70}")
2026-01-19 03:14:30 -08:00
print("Remote Storage Server - Medios-Macina")
2025-12-03 15:18:57 -08:00
print(f"{'='*70}")
print(f"Storage Path: {STORAGE_PATH}")
print(f"Local IP: {local_ip}")
print(f"Server URL: http://{local_ip}:{args.port}")
print(f"Health URL: http://{local_ip}:{args.port}/health")
2025-12-29 17:05:03 -08:00
print(
f"API Key: {'Enabled - ' + ('***' + args.api_key[-4:]) if args.api_key else 'Disabled (no auth)'}"
)
2025-12-03 15:18:57 -08:00
print(f"Debug Mode: {args.debug}")
2025-12-13 00:18:30 -08:00
print("\n📋 Config for config.conf:")
print("[store=remote]")
2025-12-29 17:05:03 -08:00
print('name="phone"')
print(f'url="http://{local_ip}:{args.port}"')
2025-12-03 15:18:57 -08:00
if args.api_key:
2025-12-29 17:05:03 -08:00
print(f'api_key="{args.api_key}"')
2025-12-13 00:18:30 -08:00
print("timeout=30")
2026-01-14 01:33:25 -08:00
print("\nOR use ZeroTier Networking (Server Side):")
print("[networking=zerotier]")
print(f'serve="{STORAGE_PATH.name}"')
print(f'port="{args.port}"')
if args.api_key:
print(f'api_key="{args.api_key}"')
2025-12-03 15:18:57 -08:00
print(f"\n{'='*70}\n")
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
try:
2025-12-11 19:04:02 -08:00
from API.folder import API_folder_store
2025-12-29 17:05:03 -08:00
2025-12-11 19:04:02 -08:00
with API_folder_store(STORAGE_PATH) as db:
2025-12-03 15:18:57 -08:00
logger.info("Database initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize database: {e}")
sys.exit(1)
2025-12-29 17:05:03 -08:00
2025-12-03 15:18:57 -08:00
app = create_app()
app.run(host=args.host, port=args.port, debug=args.debug, use_reloader=False)
2025-12-29 17:05:03 -08:00
if __name__ == "__main__":
2025-12-03 15:18:57 -08:00
main()