This commit is contained in:
nose
2025-12-11 19:04:02 -08:00
parent 6863c6c7ea
commit 16d8a763cd
103 changed files with 4759 additions and 9156 deletions

View File

@@ -1,10 +0,0 @@
import importlib
import traceback
import sys
try:
importlib.import_module('cmdlets')
print('cmdlets imported OK')
except Exception:
traceback.print_exc()
sys.exit(1)

View File

@@ -1,8 +0,0 @@
import importlib, traceback, sys
try:
importlib.import_module('cmdlets.download_media')
print('download_media imported OK')
except Exception:
traceback.print_exc()
sys.exit(1)

View File

@@ -1,5 +0,0 @@
from pathlib import Path
p = Path('cmdlets/_shared.py')
for i, line in enumerate(p.read_text().splitlines(), start=1):
if 1708 <= i <= 1720:
print(f"{i:4}: {repr(line)}")

View File

@@ -1,24 +0,0 @@
from pathlib import Path
import re
p = Path('cmdlets/_shared.py')
src = p.read_text(encoding='utf-8')
lines = src.splitlines(True)
changed = False
new_lines = []
for line in lines:
m = re.match(r'^(?P<ws>[ \t]*)', line)
ws = m.group('ws') if m else ''
if '\t' in ws:
new_ws = ws.replace('\t', ' ')
new_line = new_ws + line[len(ws):]
new_lines.append(new_line)
changed = True
else:
new_lines.append(line)
if changed:
p.write_text(''.join(new_lines), encoding='utf-8')
print('Normalized leading tabs to spaces in', p)
else:
print('No leading tabs found; no changes made')

View File

@@ -1,160 +0,0 @@
#!/usr/bin/env python3
"""
Careful refactoring of download_data.py to class-based pattern.
Handles nested functions and inner definitions correctly.
"""
import re
from pathlib import Path
def refactor_download_data():
backup_file = Path('cmdlets/download_data_backup.py')
output_file = Path('cmdlets/download_data.py')
print(f"Reading: {backup_file}")
content = backup_file.read_text(encoding='utf-8')
lines = content.split('\n')
output = []
i = 0
in_cmdlet_def = False
skip_old_run_wrapper = False
class_added = False
while i < len(lines):
line = lines[i]
# Skip old _run wrapper function
if line.strip().startswith('def _run(result: Any'):
while i < len(lines):
i += 1
if lines[i] and not lines[i][0].isspace():
break
continue
# Skip old CMDLET definition
if line.strip().startswith('CMDLET = Cmdlet('):
while i < len(lines):
i += 1
if lines[i].strip() == ')':
i += 1
break
output.append('')
output.append('# Create and register the cmdlet')
output.append('CMDLET = Download_Data()')
output.append('')
continue
# Insert class definition before first top-level helper
if not class_added and line.strip().startswith('def _download_torrent_worker('):
# Add class header with __init__ and run()
output.extend([
'',
'',
'class Download_Data(Cmdlet):',
' """Class-based download-data cmdlet with self-registration."""',
'',
' def __init__(self) -> None:',
' """Initialize download-data cmdlet."""',
' super().__init__(',
' name="download-data",',
' summary="Download data from url with playlist/clip support using yt-dlp",',
' usage="download-data <url> [options] or search-file | download-data [options]",',
' alias=["download", "dl"],',
' arg=[',
' CmdletArg(name="url", type="string", required=False, description="URL to download (HTTP/HTTPS or file with URL list)", variadic=True),',
' CmdletArg(name="-url", type="string", description="URL to download (alias for positional argument)", variadic=True),',
' CmdletArg(name="list-formats", type="flag", description="List available formats without downloading"),',
' CmdletArg(name="audio", type="flag", alias="a", description="Download audio only (extract from video)"),',
' CmdletArg(name="video", type="flag", alias="v", description="Download video (default if not specified)"),',
' CmdletArg(name="format", type="string", alias="fmt", description="Explicit yt-dlp format selector (e.g., bestvideo+bestaudio)"),',
' CmdletArg(name="clip", type="string", description="Extract time range: MM:SS-MM:SS (e.g., 34:03-35:08) or seconds"),',
' CmdletArg(name="section", type="string", description="Download sections (yt-dlp only): TIME_RANGE[,TIME_RANGE...] (e.g., 1:30-1:35,0:05-0:15)"),',
' CmdletArg(name="cookies", type="string", description="Path to cookies.txt file for authentication"),',
' CmdletArg(name="torrent", type="flag", description="Download torrent/magnet via AllDebrid (requires API key in config)"),',
' CmdletArg(name="wait", type="float", description="Wait time (seconds) for magnet processing timeout"),',
' CmdletArg(name="background", type="flag", alias="bg", description="Start download in background and return to prompt immediately"),',
' CmdletArg(name="item", type="string", alias="items", description="Item selection for playlists/formats: use -item N to select format N, or -item to show table for @N selection in next command"),',
' SharedArgs.STORAGE,',
' ],',
' detail=["Download media from url with advanced features.", "", "See help for full usage examples."],',
' exec=self.run,',
' )',
' self.register()',
'',
' def run(self, result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:',
' """Main execution method."""',
' stage_ctx = pipeline_context.get_stage_context()',
' in_pipeline = stage_ctx is not None and getattr(stage_ctx, "total_stages", 1) > 1',
' if in_pipeline and isinstance(config, dict):',
' config["_quiet_background_output"] = True',
' return self._run_impl(result, args, config, emit_results=True)',
'',
' # ' + '='*70,
' # HELPER METHODS',
' # ' + '='*70,
'',
])
class_added = True
# Convert top-level helper functions to static methods
if class_added and line and not line[0].isspace() and line.strip().startswith('def _'):
output.append(' @staticmethod')
output.append(f' {line}')
i += 1
# Copy function body with indentation
while i < len(lines):
next_line = lines[i]
# Stop at next top-level definition
if next_line and not next_line[0].isspace() and (next_line.strip().startswith(('def ', 'class ', 'CMDLET'))):
break
# Add indentation
if next_line.strip():
output.append(f' {next_line}')
else:
output.append(next_line)
i += 1
continue
output.append(line)
i += 1
result_text = '\n'.join(output)
# NOW: Update function calls carefully
# Only update calls in _run_impl, not in nested function definitions
# Pattern: match _func( but NOT when it's after "def " on the same line
helper_funcs = [
'_download_torrent_worker', '_guess_libgen_title', '_is_libgen_entry',
'_download_libgen_entry', '_libgen_background_worker',
'_start_libgen_background_worker', '_run_pipeline_tail',
'_download_http_background_worker', '_start_http_background_download',
'_parse_torrent_file', '_download_torrent_file', '_is_torrent_file_or_url',
'_process_torrent_input', '_show_playlist_table', '_parse_time_range',
'_parse_section_ranges', '_parse_playlist_selection_indices',
'_select_playlist_entries', '_sanitize_title_for_filename',
'_find_playlist_files_from_entries', '_snapshot_playlist_paths',
'_is_openlibrary_downloadable', '_as_dict', '_is_youtube_url',
]
# Split into lines for careful replacement
result_lines = result_text.split('\n')
for idx, line in enumerate(result_lines):
# Skip lines that are function definitions
if 'def ' in line:
continue
# Replace helper function calls with self.
for func in helper_funcs:
# Pattern: _func( with word boundary before
pattern = rf'\b({re.escape(func)})\('
if re.search(pattern, line):
result_lines[idx] = re.sub(pattern, r'self.\1(', line)
result_text = '\n'.join(result_lines)
output_file.write_text(result_text, encoding='utf-8')
print(f"✓ Written: {output_file}")
print(f"✓ Class-based refactor complete")
if __name__ == '__main__':
refactor_download_data()

View File

@@ -1,131 +0,0 @@
#!/usr/bin/env python3
"""
Automated refactoring script for download_data.py
Converts module-level functions to class-based cmdlet pattern.
"""
import re
from pathlib import Path
def main():
backup_file = Path('cmdlets/download_data_backup.py')
output_file = Path('cmdlets/download_data.py')
print(f"Reading: {backup_file}")
content = backup_file.read_text(encoding='utf-8')
lines = content.split('\n')
output = []
i = 0
in_cmdlet_def = False
skip_old_run_wrapper = False
class_section_added = False
# Track where to insert class definition
last_import_line = 0
while i < len(lines):
line = lines[i]
# Track imports
if line.strip().startswith(('import ', 'from ')):
last_import_line = len(output)
# Skip old _run wrapper function
if 'def _run(result: Any' in line:
skip_old_run_wrapper = True
i += 1
continue
if skip_old_run_wrapper:
if line and not line[0].isspace():
skip_old_run_wrapper = False
else:
i += 1
continue
# Skip old CMDLET definition
if line.strip().startswith('CMDLET = Cmdlet('):
in_cmdlet_def = True
i += 1
continue
if in_cmdlet_def:
if line.strip() == ')':
in_cmdlet_def = False
# Add class instantiation instead
output.append('')
output.append('# Create and register the cmdlet')
output.append('CMDLET = Download_Data()')
output.append('')
i += 1
continue
# Insert class definition before first helper function
if not class_section_added and line.strip().startswith('def _download_torrent_worker('):
output.append('')
output.append('')
output.append('class Download_Data(Cmdlet):')
output.append(' """Class-based download-data cmdlet with self-registration."""')
output.append('')
output.append(' # Full __init__ implementation to be added')
output.append(' # Full run() method to be added')
output.append('')
output.append(' # ' + '='*70)
output.append(' # HELPER METHODS')
output.append(' # ' + '='*70)
output.append('')
class_section_added = True
# Convert top-level helper functions to static methods
if class_section_added and line.strip().startswith('def _') and not line.strip().startswith('def __'):
# Check if this is a top-level function (no indentation)
if not line.startswith((' ', '\t')):
output.append(' @staticmethod')
output.append(f' {line}')
i += 1
# Copy function body with indentation
while i < len(lines):
next_line = lines[i]
# Stop at next top-level definition
if next_line and not next_line[0].isspace() and (next_line.strip().startswith('def ') or next_line.strip().startswith('class ') or next_line.strip().startswith('CMDLET')):
break
# Add indentation
if next_line.strip():
output.append(f' {next_line}')
else:
output.append(next_line)
i += 1
continue
# Convert _run_impl to method (but keep as-is for now, will be updated later)
if class_section_added and line.strip().startswith('def _run_impl('):
output.append(' def _run_impl(self, result: Any, args: Sequence[str], config: Dict[str, Any], emit_results: bool = True) -> int:')
i += 1
# Copy function body with indentation
while i < len(lines):
next_line = lines[i]
if next_line and not next_line[0].isspace() and next_line.strip():
break
if next_line.strip():
output.append(f' {next_line}')
else:
output.append(next_line)
i += 1
continue
output.append(line)
i += 1
# Write output
result_text = '\n'.join(output)
output_file.write_text(result_text, encoding='utf-8')
print(f"✓ Written: {output_file}")
print(f"✓ Converted {content.count('def _')} helper functions to static methods")
print("\nNext steps:")
print("1. Add full __init__ method with cmdlet args")
print("2. Add run() method that calls _run_impl")
print("3. Update function calls in _run_impl from _func() to self._func()")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,523 @@
"""Remote Storage Server - REST API for file management on mobile devices.
This server runs on a mobile device (Android with Termux, iOS with iSH, etc.)
and exposes the local library database as a REST API. Your PC connects to this
server and uses it as a remote storage backend through the RemoteStorageBackend.
## INSTALLATION
### On Android (Termux):
1. Install Termux from Play Store: https://play.google.com/store/apps/details?id=com.termux
2. In Termux:
$ apt update && apt install python
$ pip install flask flask-cors
3. Copy this file to your device
4. Run it (with optional API key):
$ python remote_storage_server.py --storage-path /path/to/storage --port 5000
$ python remote_storage_server.py --storage-path /path/to/storage --api-key mysecretkey
5. Server prints connection info automatically (IP, port, API key)
### On PC:
1. Install requests: pip install requests
2. Add to config.json:
{
"remote_storages": [
{
"name": "phone",
"url": "http://192.168.1.100:5000",
"api_key": "mysecretkey",
"timeout": 30
}
]
}
Note: API key is optional. Works on WiFi or cellular data.
## USAGE
After setup, all cmdlets work with the phone:
$ search-file zohar -store phone
$ @1-3 | add-relationship -king @4 -store phone
$ @1 | get-relationship -store phone
The server exposes REST endpoints that RemoteStorageBackend uses internally.
"""
from __future__ import annotations
import os
import sys
import json
import argparse
import logging
from pathlib import Path
from typing import Optional, Dict, Any
from datetime import datetime
from functools import wraps
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from SYS.logger import log
# ============================================================================
# CONFIGURATION
# ============================================================================
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s'
)
logger = logging.getLogger(__name__)
STORAGE_PATH: Optional[Path] = None
API_KEY: Optional[str] = None # API key for authentication (None = no auth required)
# Try importing Flask - will be used in main() only
try:
from flask import Flask, request, jsonify
from flask_cors import CORS
HAS_FLASK = True
except ImportError:
HAS_FLASK = False
# ============================================================================
# UTILITY FUNCTIONS
# ============================================================================
def get_local_ip() -> Optional[str]:
"""Get the local IP address that would be used for external connections."""
import socket
try:
# Create a socket to determine which interface would be used
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80)) # Google DNS
ip = s.getsockname()[0]
s.close()
return ip
except Exception:
return None
# ============================================================================
# FLASK APP FACTORY
# ============================================================================
def create_app():
"""Create and configure Flask app with all routes."""
if not HAS_FLASK:
raise ImportError("Flask not installed. Install with: pip install flask flask-cors")
from flask import Flask, request, jsonify
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
# ========================================================================
# HELPER DECORATORS
# ========================================================================
def require_auth():
"""Decorator to check API key authentication if configured."""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if API_KEY:
# Get API key from header or query parameter
provided_key = request.headers.get('X-API-Key') or request.args.get('api_key')
if not provided_key or provided_key != API_KEY:
return jsonify({"error": "Unauthorized. Invalid or missing API key."}), 401
return f(*args, **kwargs)
return decorated_function
return decorator
def require_storage():
"""Decorator to ensure storage path is configured."""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not STORAGE_PATH:
return jsonify({"error": "Storage path not configured"}), 500
return f(*args, **kwargs)
return decorated_function
return decorator
# ========================================================================
# HEALTH CHECK
# ========================================================================
@app.route('/health', methods=['GET'])
@require_auth()
def health():
"""Check server health and storage availability."""
status = {
"status": "ok",
"storage_configured": STORAGE_PATH is not None,
"timestamp": datetime.now().isoformat()
}
if STORAGE_PATH:
status["storage_path"] = str(STORAGE_PATH)
status["storage_exists"] = STORAGE_PATH.exists()
try:
from API.folder import API_folder_store
with API_folder_store(STORAGE_PATH) as db:
status["database_accessible"] = True
except Exception as e:
status["database_accessible"] = False
status["database_error"] = str(e)
return jsonify(status), 200
# ========================================================================
# FILE OPERATIONS
# ========================================================================
@app.route('/files/search', methods=['GET'])
@require_auth()
@require_storage()
def search_files():
"""Search for files by name or tag."""
from API.folder import LocalLibrarySearchOptimizer
query = request.args.get('q', '')
limit = request.args.get('limit', 100, type=int)
if not query:
return jsonify({"error": "Search query required"}), 400
try:
with LocalLibrarySearchOptimizer(STORAGE_PATH) as db:
results = db.search_by_name(query, limit)
tag_results = db.search_by_tag(query, limit)
all_results = {r['hash']: r for r in (results + tag_results)}
return jsonify({
"query": query,
"count": len(all_results),
"files": list(all_results.values())
}), 200
except Exception as e:
logger.error(f"Search error: {e}", exc_info=True)
return jsonify({"error": f"Search failed: {str(e)}"}), 500
@app.route('/files/<file_hash>', methods=['GET'])
@require_auth()
@require_storage()
def get_file_metadata(file_hash: str):
"""Get metadata for a specific file by hash."""
from API.folder import API_folder_store
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path or not file_path.exists():
return jsonify({"error": "File not found"}), 404
metadata = db.get_metadata(file_path)
tags = db.get_tags(file_path)
return jsonify({
"hash": file_hash,
"path": str(file_path),
"size": file_path.stat().st_size,
"metadata": metadata,
"tags": tags
}), 200
except Exception as e:
logger.error(f"Get metadata error: {e}", exc_info=True)
return jsonify({"error": f"Failed to get metadata: {str(e)}"}), 500
@app.route('/files/index', methods=['POST'])
@require_auth()
@require_storage()
def index_file():
"""Index a new file in the storage."""
from API.folder import API_folder_store
from SYS.utils import sha256_file
data = request.get_json() or {}
file_path_str = data.get('path')
tags = data.get('tags', [])
url = data.get('url', [])
if not file_path_str:
return jsonify({"error": "File path required"}), 400
try:
file_path = Path(file_path_str)
if not file_path.exists():
return jsonify({"error": "File does not exist"}), 404
with API_folder_store(STORAGE_PATH) as db:
db.get_or_create_file_entry(file_path)
if tags:
db.add_tags(file_path, tags)
if url:
db.add_url(file_path, url)
file_hash = sha256_file(file_path)
return jsonify({
"hash": file_hash,
"path": str(file_path),
"tags_added": len(tags),
"url_added": len(url)
}), 201
except Exception as e:
logger.error(f"Index error: {e}", exc_info=True)
return jsonify({"error": f"Indexing failed: {str(e)}"}), 500
# ========================================================================
# TAG OPERATIONS
# ========================================================================
@app.route('/tags/<file_hash>', methods=['GET'])
@require_auth()
@require_storage()
def get_tags(file_hash: str):
"""Get tags for a file."""
from API.folder import API_folder_store
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
tags = db.get_tags(file_path)
return jsonify({"hash": file_hash, "tags": tags}), 200
except Exception as e:
logger.error(f"Get tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
@app.route('/tags/<file_hash>', methods=['POST'])
@require_auth()
@require_storage()
def add_tags(file_hash: str):
"""Add tags to a file."""
from API.folder import API_folder_store
data = request.get_json() or {}
tags = data.get('tags', [])
mode = data.get('mode', 'add')
if not tags:
return jsonify({"error": "Tags required"}), 400
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
if mode == 'replace':
db.remove_tags(file_path, db.get_tags(file_path))
db.add_tags(file_path, tags)
return jsonify({"hash": file_hash, "tags_added": len(tags), "mode": mode}), 200
except Exception as e:
logger.error(f"Add tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
@app.route('/tags/<file_hash>', methods=['DELETE'])
@require_auth()
@require_storage()
def remove_tags(file_hash: str):
"""Remove tags from a file."""
from API.folder import API_folder_store
tags_str = request.args.get('tags', '')
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
if tags_str:
tags_to_remove = [t.strip() for t in tags_str.split(',')]
else:
tags_to_remove = db.get_tags(file_path)
db.remove_tags(file_path, tags_to_remove)
return jsonify({"hash": file_hash, "tags_removed": len(tags_to_remove)}), 200
except Exception as e:
logger.error(f"Remove tags error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
# ========================================================================
# RELATIONSHIP OPERATIONS
# ========================================================================
@app.route('/relationships/<file_hash>', methods=['GET'])
@require_auth()
@require_storage()
def get_relationships(file_hash: str):
"""Get relationships for a file."""
from API.folder import API_folder_store
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
metadata = db.get_metadata(file_path)
relationships = metadata.get('relationships', {}) if metadata else {}
return jsonify({"hash": file_hash, "relationships": relationships}), 200
except Exception as e:
logger.error(f"Get relationships error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
@app.route('/relationships', methods=['POST'])
@require_auth()
@require_storage()
def set_relationship():
"""Set a relationship between two files."""
from API.folder import API_folder_store
data = request.get_json() or {}
from_hash = data.get('from_hash')
to_hash = data.get('to_hash')
rel_type = data.get('type', 'alt')
if not from_hash or not to_hash:
return jsonify({"error": "from_hash and to_hash required"}), 400
try:
with API_folder_store(STORAGE_PATH) as db:
from_path = db.search_hash(from_hash)
to_path = db.search_hash(to_hash)
if not from_path or not to_path:
return jsonify({"error": "File not found"}), 404
db.set_relationship(from_path, to_path, rel_type)
return jsonify({"from_hash": from_hash, "to_hash": to_hash, "type": rel_type}), 200
except Exception as e:
logger.error(f"Set relationship error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
# ========================================================================
# URL OPERATIONS
# ========================================================================
@app.route('/url/<file_hash>', methods=['GET'])
@require_auth()
@require_storage()
def get_url(file_hash: str):
"""Get known url for a file."""
from API.folder import API_folder_store
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
metadata = db.get_metadata(file_path)
url = metadata.get('url', []) if metadata else []
return jsonify({"hash": file_hash, "url": url}), 200
except Exception as e:
logger.error(f"Get url error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
@app.route('/url/<file_hash>', methods=['POST'])
@require_auth()
@require_storage()
def add_url(file_hash: str):
"""Add url to a file."""
from API.folder import API_folder_store
data = request.get_json() or {}
url = data.get('url', [])
if not url:
return jsonify({"error": "url required"}), 400
try:
with API_folder_store(STORAGE_PATH) as db:
file_path = db.search_hash(file_hash)
if not file_path:
return jsonify({"error": "File not found"}), 404
db.add_url(file_path, url)
return jsonify({"hash": file_hash, "url_added": len(url)}), 200
except Exception as e:
logger.error(f"Add url error: {e}", exc_info=True)
return jsonify({"error": f"Failed: {str(e)}"}), 500
return app
# ============================================================================
# MAIN
# ============================================================================
def main():
if not HAS_FLASK:
print("ERROR: Flask and flask-cors required")
print("Install with: pip install flask flask-cors")
sys.exit(1)
parser = argparse.ArgumentParser(
description='Remote Storage Server for Medios-Macina',
epilog='Example: python remote_storage_server.py --storage-path /storage/media --port 5000 --api-key mysecretkey'
)
parser.add_argument('--storage-path', type=str, required=True, help='Path to storage directory')
parser.add_argument('--host', type=str, default='0.0.0.0', help='Server host (default: 0.0.0.0)')
parser.add_argument('--port', type=int, default=5000, help='Server port (default: 5000)')
parser.add_argument('--api-key', type=str, default=None, help='API key for authentication (optional)')
parser.add_argument('--debug', action='store_true', help='Enable debug mode')
args = parser.parse_args()
global STORAGE_PATH, API_KEY
STORAGE_PATH = Path(args.storage_path).resolve()
API_KEY = args.api_key
if not STORAGE_PATH.exists():
print(f"ERROR: Storage path does not exist: {STORAGE_PATH}")
sys.exit(1)
# Get local IP address
local_ip = get_local_ip()
if not local_ip:
local_ip = "127.0.0.1"
print(f"\n{'='*70}")
print(f"Remote Storage Server - Medios-Macina")
print(f"{'='*70}")
print(f"Storage Path: {STORAGE_PATH}")
print(f"Local IP: {local_ip}")
print(f"Server URL: http://{local_ip}:{args.port}")
print(f"Health URL: http://{local_ip}:{args.port}/health")
print(f"API Key: {'Enabled - ' + ('***' + args.api_key[-4:]) if args.api_key else 'Disabled (no auth)'}")
print(f"Debug Mode: {args.debug}")
print(f"\n📋 Config for config.json:")
config_entry = {
"name": "phone",
"url": f"http://{local_ip}:{args.port}",
"timeout": 30
}
if args.api_key:
config_entry["api_key"] = args.api_key
print(json.dumps(config_entry, indent=2))
print(f"\n{'='*70}\n")
try:
from API.folder import API_folder_store
with API_folder_store(STORAGE_PATH) as db:
logger.info("Database initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize database: {e}")
sys.exit(1)
app = create_app()
app.run(host=args.host, port=args.port, debug=args.debug, use_reloader=False)
if __name__ == '__main__':
main()