BAT / api.py
aiyubali's picture
planogram updated
cb90fd0
from fastapi import FastAPI
from pydantic import BaseModel
from typing import Union, List
import uvicorn
import logging
from datetime import datetime
import pytz
import torch
from main import ImageProcessor
image_processor = ImageProcessor() # Initialize the image processor
logging.basicConfig(filename="drinksLog.log", filemode='w')
logger = logging.getLogger("drinks")
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler("drinksLog.log")
logger.addHandler(file_handler)
app = FastAPI()
class RequestBody(BaseModel):
cat: str
img: str
class RequestData(BaseModel):
body: Union[RequestBody, List[RequestBody]]
@app.get("/status")
async def status():
return {"status": "AI Server is running"}
# Function to process the image based on the category
async def process_image(item: RequestBody):
category = item.cat
img_url = item.img
if category == "posm":
result = await image_processor.process_image(img_url)
return result
elif category == "planogram":
result = await image_processor.process_image(img_url)
return result
else:
return {"error": f"Unsupported category {category}"}
@app.post("/bats")
async def detect_items(request_data: RequestData):
try:
# Initialize an empty list to hold the processed results
results = []
# Check if data.body is a list or a single item
if isinstance(request_data.body, list):
# If body is already a list, iterate through each item
for item in request_data.body:
# Process each item and append the result to results
processed_result = await process_image(item) # Use await here
results.append(processed_result)
else:
# If body is a single item, process it directly
processed_result = await process_image(request_data.body) # Use await here
results.append(processed_result)
# Return the results as a JSON response
return results
except Exception as e:
logger.error(f"Error during detection: {str(e)}")
return {"error": "An error occurred during detection"}
if __name__ == "__main__":
try:
uvicorn.run(app, host="127.0.0.1", port=4444)
finally:
torch.cuda.empty_cache()