Spaces:
Sleeping
Sleeping
Create upload.py
Browse files- src/api/upload.py +114 -0
src/api/upload.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import io
|
| 3 |
+
import time
|
| 4 |
+
import uuid
|
| 5 |
+
from typing import List
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, File, Form, HTTPException, Request, UploadFile, Depends
|
| 8 |
+
|
| 9 |
+
from src.core.config import IDX_FACES, IDX_OBJECTS, MAX_FILES_PER_UPLOAD
|
| 10 |
+
from src.core.security import get_verified_keys
|
| 11 |
+
from src.services.db_client import cld_upload, pinecone_pool
|
| 12 |
+
from src.core.logging import log
|
| 13 |
+
from src.common.utils import get_ip, standardize_category_name, to_list
|
| 14 |
+
|
| 15 |
+
router = APIRouter()
|
| 16 |
+
|
| 17 |
+
def chunker(seq, size):
|
| 18 |
+
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
|
| 19 |
+
|
| 20 |
+
@router.post("/api/upload")
|
| 21 |
+
async def upload_images(
|
| 22 |
+
request: Request,
|
| 23 |
+
files: List[UploadFile] = File(...),
|
| 24 |
+
folder_name: str = Form(...),
|
| 25 |
+
detect_faces: bool = Form(True),
|
| 26 |
+
user_id: str = Form(""),
|
| 27 |
+
keys: dict = Depends(get_verified_keys)
|
| 28 |
+
):
|
| 29 |
+
ip = get_ip(request)
|
| 30 |
+
start = time.perf_counter()
|
| 31 |
+
|
| 32 |
+
if len(files) > MAX_FILES_PER_UPLOAD:
|
| 33 |
+
raise HTTPException(400, f"Too many files. Max {MAX_FILES_PER_UPLOAD} per request.")
|
| 34 |
+
|
| 35 |
+
folder = standardize_category_name(folder_name)
|
| 36 |
+
pc = pinecone_pool.get(keys["pinecone_key"])
|
| 37 |
+
idx_obj = pc.Index(IDX_OBJECTS)
|
| 38 |
+
idx_face = pc.Index(IDX_FACES)
|
| 39 |
+
|
| 40 |
+
ai_manager = request.app.state.ai
|
| 41 |
+
sem = request.app.state.ai_semaphore
|
| 42 |
+
|
| 43 |
+
all_face_upserts: list[dict] = []
|
| 44 |
+
all_object_upserts: list[dict] = []
|
| 45 |
+
uploaded_urls: list[str] = []
|
| 46 |
+
|
| 47 |
+
async def _process_file(file: UploadFile) -> tuple[str, str, list]:
|
| 48 |
+
file_bytes = await file.read()
|
| 49 |
+
file_id = uuid.uuid4().hex
|
| 50 |
+
|
| 51 |
+
async def _run_ai():
|
| 52 |
+
async with sem:
|
| 53 |
+
return await ai_manager.process_image_bytes_async(file_bytes, detect_faces=detect_faces)
|
| 54 |
+
|
| 55 |
+
cld_task = asyncio.to_thread(cld_upload, io.BytesIO(file_bytes), folder, keys["cloudinary_creds"])
|
| 56 |
+
ai_task = _run_ai()
|
| 57 |
+
|
| 58 |
+
cld_res, vectors = await asyncio.gather(cld_task, ai_task)
|
| 59 |
+
return file_id, cld_res["secure_url"], vectors
|
| 60 |
+
|
| 61 |
+
results = await asyncio.gather(*[_process_file(f) for f in files])
|
| 62 |
+
|
| 63 |
+
for file_id, image_url, vectors in results:
|
| 64 |
+
uploaded_urls.append(image_url)
|
| 65 |
+
for i, v in enumerate(vectors):
|
| 66 |
+
vector_id = f"{file_id}_{i}"
|
| 67 |
+
if v["type"] == "face":
|
| 68 |
+
all_face_upserts.append({
|
| 69 |
+
"id": vector_id,
|
| 70 |
+
"values": to_list(v["vector"]),
|
| 71 |
+
"metadata": {
|
| 72 |
+
"url": image_url,
|
| 73 |
+
"folder": folder,
|
| 74 |
+
"face_crop": v.get("face_crop", ""),
|
| 75 |
+
"det_score": float(v.get("det_score", 1.0)),
|
| 76 |
+
"face_width_px": int(v.get("face_width_px", 0)),
|
| 77 |
+
},
|
| 78 |
+
})
|
| 79 |
+
else:
|
| 80 |
+
all_object_upserts.append({
|
| 81 |
+
"id": vector_id,
|
| 82 |
+
"values": to_list(v["vector"]),
|
| 83 |
+
"metadata": {"url": image_url, "folder": folder},
|
| 84 |
+
})
|
| 85 |
+
|
| 86 |
+
db_tasks = []
|
| 87 |
+
def batched_upsert(index, vectors):
|
| 88 |
+
for batch in chunker(vectors, 200):
|
| 89 |
+
index.upsert(vectors=batch)
|
| 90 |
+
|
| 91 |
+
if all_face_upserts:
|
| 92 |
+
db_tasks.append(asyncio.to_thread(batched_upsert, idx_face, all_face_upserts))
|
| 93 |
+
if all_object_upserts:
|
| 94 |
+
db_tasks.append(asyncio.to_thread(batched_upsert, idx_obj, all_object_upserts))
|
| 95 |
+
|
| 96 |
+
if db_tasks:
|
| 97 |
+
try:
|
| 98 |
+
await asyncio.gather(*db_tasks)
|
| 99 |
+
except Exception as e:
|
| 100 |
+
raise HTTPException(500, f"Database insertion failed: {e}")
|
| 101 |
+
|
| 102 |
+
duration_ms = round((time.perf_counter() - start) * 1000)
|
| 103 |
+
log("INFO", "upload.complete", user_id=user_id or "anonymous", ip=ip,
|
| 104 |
+
files=len(files), folder=folder, duration_ms=duration_ms)
|
| 105 |
+
|
| 106 |
+
return {
|
| 107 |
+
"message": "Done!",
|
| 108 |
+
"urls": uploaded_urls,
|
| 109 |
+
"summary": {
|
| 110 |
+
"files": len(files),
|
| 111 |
+
"face_vectors": len(all_face_upserts),
|
| 112 |
+
"object_vectors": len(all_object_upserts),
|
| 113 |
+
},
|
| 114 |
+
}
|