Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, File, UploadFile | |
from fastapi.middleware.cors import CORSMiddleware | |
import tensorflow as tf | |
import numpy as np | |
import google.generativeai as genai | |
import os | |
app = FastAPI() | |
# Add CORS middleware | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
# Configure Gemini API | |
GEMINI_API_KEY = os.getenv('GEMINI_API_KEY', 'AIzaSyBx0A7BA-nKVZOiVn39JXzdGKgeGQqwAFg') | |
genai.configure(api_key=GEMINI_API_KEY) | |
gemini_model = genai.GenerativeModel('gemini-pro') | |
# Load model with specific version handling | |
model = tf.keras.models.load_model( | |
'Image_classify.keras', | |
custom_objects=None, | |
compile=False # Don't compile the model on load | |
) | |
# Define categories and image dimensions | |
data_cat = ['disposable cups', 'paper', 'plastic bottle'] | |
img_height, img_width = 224, 224 | |
def generate_recycling_insight(detected_object): | |
"""Generate sustainability insights for detected objects""" | |
try: | |
prompt = f""" | |
You are a sustainability-focused AI. Analyze the {detected_object} (which is a solid dry waste) | |
and generate the top three innovative, eco-friendly recommendations for repurposing it. Ensure each recommendation is: | |
- Give the Title of the recommendation | |
- Practical and easy to implement | |
- Environmentally beneficial | |
- Clearly explained in one or two concise sentences | |
""" | |
response = gemini_model.generate_content(prompt) | |
return response.text.strip() | |
except Exception as e: | |
return f"Error generating insight: {str(e)}" | |
async def predict(file: UploadFile = File(...)): | |
try: | |
# Read and preprocess the image | |
contents = await file.read() | |
image = tf.image.decode_image(contents, channels=3) | |
image = tf.image.resize(image, [img_height, img_width]) | |
image = tf.cast(image, tf.float32) | |
image = tf.expand_dims(image, 0) | |
# Make prediction | |
predictions = model.predict(image, verbose=0) | |
score = tf.nn.softmax(predictions[0]) | |
confidence = float(np.max(score) * 100) | |
if confidence < 45: | |
return { | |
"error": "Confidence too low to make a prediction", | |
"confidence": confidence | |
} | |
predicted_class = data_cat[np.argmax(score)] | |
sustainability_insight = generate_recycling_insight(predicted_class) | |
return { | |
"class": predicted_class, | |
"confidence": confidence, | |
"insights": sustainability_insight | |
} | |
except Exception as e: | |
return {"error": str(e)} | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="0.0.0.0", port=7860) |