|
from fastapi import FastAPI, File, UploadFile |
|
from tensorflow.keras.applications.mobilenet_v2 import MobileNetV2, preprocess_input, decode_predictions |
|
from tensorflow.keras.preprocessing import image |
|
import numpy as np |
|
from io import BytesIO |
|
from PIL import Image |
|
from fastapi import FastAPI |
|
from fastapi.responses import HTMLResponse |
|
import os |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
|
|
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' |
|
|
|
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0' |
|
|
|
@app.get("/logs=container") |
|
async def container_logs(): |
|
return {"status": "No logs available"} |
|
|
|
|
|
|
|
|
|
@app.get("/") |
|
def greet_json(): |
|
return {"Hello": "World!"} |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
model = MobileNetV2(weights="imagenet") |
|
|
|
def prepare_image(img): |
|
img = img.resize((224, 224)) |
|
img_array = image.img_to_array(img) |
|
img_array = np.expand_dims(img_array, axis=0) |
|
return preprocess_input(img_array) |
|
|
|
@app.post("/predict") |
|
async def predict(file: UploadFile = File(...)): |
|
contents = await file.read() |
|
img = Image.open(BytesIO(contents)).convert("RGB") |
|
processed_image = prepare_image(img) |
|
|
|
predictions = model.predict(processed_image) |
|
results = decode_predictions(predictions, top=3)[0] |
|
return [{"label": label, "probability": float(prob)} for (_, label, prob) in results] |
|
|