This commit is contained in:
2026-01-14 18:15:00 -08:00
parent d474916874
commit f40d0b61a2
4 changed files with 122 additions and 59 deletions

View File

@@ -353,7 +353,7 @@
"filedot\\.(xyz|to|top)/([0-9a-zA-Z]{12})"
],
"regexp": "filedot\\.(xyz|to|top)/([0-9a-zA-Z]{12})",
"status": false
"status": true
},
"filefactory": {
"name": "filefactory",
@@ -786,7 +786,7 @@
"(upl\\.wf/d/[0-9a-zA-Z]+)"
],
"regexp": "((world\\-files\\.com/[0-9a-zA-Z]{12}))|((upl\\.wf/d/[0-9a-zA-Z]+))",
"status": false,
"status": true,
"hardRedirect": [
"world\\-files\\.com/([0-9a-zA-Z]{12})"
]

View File

@@ -277,15 +277,55 @@ class ZeroTier(Store):
debug(f"Hydrus get_file failed: {exc}")
return None
# remote storage: try metadata endpoint
res = self._request_remote("GET", f"/files/{file_hash}")
if isinstance(res, dict):
# remote server returns a 'path' to the file (server-local path)
p = res.get("path") or res.get("file") or None
if isinstance(p, str) and p.startswith("http"):
return p
return p
return None
# remote storage: return download URL
base = self._ensure_client()
if not base or not isinstance(base, str):
return None
url = f"{base.rstrip('/')}/files/raw/{file_hash}"
if self._api_key:
sep = "&" if "?" in url else "?"
url += f"{sep}api_key={self._api_key}"
return url
def download_to_temp(self, file_hash: str, temp_root: Optional[Path] = None) -> Optional[Path]:
"""Download a file from the remote peer to a local temporary file."""
import os
import httpx
import tempfile
if self._service == "hydrus":
return None
url = self.get_file(file_hash)
if not url or not isinstance(url, str) or not url.startswith("http"):
return None
try:
# Use provided temp_root or system temp
if temp_root:
temp_root.mkdir(parents=True, exist_ok=True)
fd, tmp_path = tempfile.mkstemp(dir=str(temp_root), suffix=".tmp")
else:
fd, tmp_path = tempfile.mkstemp(suffix=".tmp")
os_fd = os.fdopen(fd, 'wb')
headers = {}
if self._api_key:
headers["X-API-Key"] = self._api_key
with httpx.stream("GET", url, headers=headers, timeout=self._timeout) as r:
r.raise_for_status()
for chunk in r.iter_bytes():
os_fd.write(chunk)
os_fd.close()
return Path(tmp_path)
except Exception as exc:
debug(f"ZeroTier download_to_temp failed for {file_hash}: {exc}")
return None
def add_file(self, file_path: Path, **kwargs: Any) -> Optional[str]:
"""Upload a local file to the remote ZeroTier peer (supports 'remote' and 'hydrus' services).

View File

@@ -1017,34 +1017,34 @@ class Add_File(Cmdlet):
except Exception:
pass
# PRIORITY 1b: Try hash+store from result dict (fetch from backend)
if isinstance(result, dict):
r_hash = result.get("hash")
r_store = result.get("store")
if r_hash and r_store:
try:
store = store_instance
if not store:
store = Store(config)
# PRIORITY 1b: Try hash+store from result (fetch from backend)
r_hash = get_field(result, "hash") or get_field(result, "file_hash")
r_store = get_field(result, "store")
if r_hash and r_store:
try:
store = store_instance
if not store:
store = Store(config)
if r_store in store.list_backends():
backend = store[r_store]
# Try direct access (Path)
mp = backend.get_file(r_hash)
if isinstance(mp, Path) and mp.exists():
pipe_obj.path = str(mp)
return mp, str(r_hash), None
if r_store in store.list_backends():
backend = store[r_store]
# Try direct access (Path)
mp = backend.get_file(r_hash)
if isinstance(mp, Path) and mp.exists():
pipe_obj.path = str(mp)
return mp, str(r_hash), None
# Try download to temp
if isinstance(mp, str) and mp.strip():
dl_path, tmp_dir = Add_File._maybe_download_backend_file(
backend, str(r_hash), pipe_obj
)
if dl_path and dl_path.exists():
pipe_obj.path = str(dl_path)
return dl_path, str(r_hash), tmp_dir
except Exception:
pass
# Try download to temp
if isinstance(mp, str) and mp.strip():
dl_path, tmp_dir = Add_File._maybe_download_backend_file(
backend, str(r_hash), pipe_obj
)
if dl_path and dl_path.exists():
pipe_obj.path = str(dl_path)
return dl_path, str(r_hash), tmp_dir
except Exception:
pass
# PRIORITY 2: Generic Coercion (Path arg > PipeObject > Result)
candidate: Optional[Path] = None
@@ -1130,7 +1130,6 @@ class Add_File(Cmdlet):
return files_info
@staticmethod
@staticmethod
def _validate_source(media_path: Optional[Path], allow_all_extensions: bool = False) -> bool:
"""Validate that the source file exists and is supported.

View File

@@ -161,7 +161,7 @@ def create_app():
"Flask not installed. Install with: pip install flask flask-cors"
)
from flask import Flask, request, jsonify
from flask import Flask, request, jsonify, send_file
from flask_cors import CORS
app = Flask(__name__)
@@ -325,12 +325,32 @@ def create_app():
logger.error(f"Get metadata error: {e}", exc_info=True)
return jsonify({"error": f"Failed to get metadata: {str(e)}"}), 500
@app.route("/files/raw/<file_hash>", methods=["GET"])
@require_auth()
@require_storage()
def download_file(file_hash: str):
"""Download a raw file by hash."""
try:
search_db = get_db(STORAGE_PATH)
db = search_db.db
if not db:
return jsonify({"error": "Database unavailable"}), 500
file_path = db.search_hash(file_hash)
if not file_path or not file_path.exists():
return jsonify({"error": "File not found"}), 404
return send_file(file_path)
except Exception as e:
logger.error(f"Download error: {e}", exc_info=True)
return jsonify({"error": f"Download failed: {str(e)}"}), 500
@app.route("/files/index", methods=["POST"])
@require_auth()
@require_storage()
def index_file():
"""Index a new file in the storage."""
from API.folder import API_folder_store
from SYS.utils import sha256_file
data = request.get_json() or {}
@@ -347,28 +367,32 @@ def create_app():
if not file_path.exists():
return jsonify({"error": "File does not exist"}), 404
with API_folder_store(STORAGE_PATH) as db:
db.get_or_create_file_entry(file_path)
search_db = get_db(STORAGE_PATH)
db = search_db.db
if not db:
return jsonify({"error": "Database unavailable"}), 500
db.get_or_create_file_entry(file_path)
if tags:
db.add_tags(file_path, tags)
if tags:
db.add_tags(file_path, tags)
if url:
db.add_url(file_path, url)
if url:
db.add_url(file_path, url)
file_hash = sha256_file(file_path)
file_hash = sha256_file(file_path)
return (
jsonify(
{
"hash": file_hash,
"path": str(file_path),
"tags_added": len(tags),
"url_added": len(url),
}
),
201,
)
return (
jsonify(
{
"hash": file_hash,
"path": str(file_path),
"tags_added": len(tags),
"url_added": len(url),
}
),
201,
)
except Exception as e:
logger.error(f"Index error: {e}", exc_info=True)
return jsonify({"error": f"Indexing failed: {str(e)}"}), 500