Spaces:
Sleeping
Sleeping
File size: 1,740 Bytes
b4263ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
import pandas as pd
from generate_recommendations import RecommendationGenerator
import torch
app = FastAPI()
# CORS middleware configuration
app.add_middleware(
CORSMiddleware,
allow_origins=[
"http://localhost:3000",
"http://127.0.0.1:3000",
# Add your Windows IP if needed
"http://192.168.1.x:3000" # Replace x with your actual IP
],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Load model and data once when starting the server
try:
model_path = '../checkpoints/best_model.pth'
catalog_data = pd.read_csv('../../data/o2_data.csv')
recommender = RecommendationGenerator(model_path, catalog_data)
except Exception as e:
print(f"Error loading model: {str(e)}")
raise
class UserInput(BaseModel):
user_id: str
age: int
gender: str
genre: str
music: str
@app.post("/recommendations/")
async def get_recommendations(user_input: UserInput):
try:
user_info = {
'user_id': user_input.user_id,
'age': user_input.age,
'gender': user_input.gender,
'genre': user_input.genre,
'music': user_input.music
}
recommendations = recommender.generate_recommendations(user_info, n_recommendations=10)
return {
"status": "success",
"recommendations": recommendations.to_dict(orient='records')
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/health")
async def health_check():
return {"status": "healthy"}
|