Spaces:
Running
Running
from fastapi import FastAPI, HTTPException | |
from fastapi.middleware.cors import CORSMiddleware | |
from schemas import InputModel | |
from config import settings | |
from chatbot.workflow import graph | |
from langchain_core.messages import convert_to_messages | |
# Create FastAPI app instance | |
app = FastAPI( | |
title=settings.API_TITLE, | |
description=settings.API_DESCRIPTION, | |
version=settings.API_VERSION | |
) | |
# Configure CORS | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=settings.ALLOWED_ORIGINS, | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
#---------- Health Check Endpoint ----------# | |
async def health_check() -> dict[str, str]: | |
""" | |
Health check endpoint to verify API is running | |
""" | |
return {"status": "healthy", "message": "Makhfi Chatbot API is running"} | |
#----------- API Endpoint to interact with the Chatbot ----------# | |
async def chat_with_makhfi(user_input: InputModel): | |
""" | |
Process a chat message and return the bot's response | |
Args: | |
user_input: InputModel containing the question and chat history | |
Returns: | |
Dict containing the answer and source links | |
Raises: | |
HTTPException: If there's an error processing the request | |
""" | |
try: | |
# Prepare input for the graph | |
inputs = { | |
"question": user_input.question, | |
"chat_history": convert_to_messages(user_input.chat_history), | |
} | |
# Process through the workflow graph | |
response = await graph.ainvoke(inputs) | |
output = response['output'].model_dump() | |
# Clean up the response | |
answer = output['answer'].replace('\\"', '"') | |
links = output['sources'] | |
return { | |
"answer": answer, | |
"sources": links | |
} | |
except Exception as e: | |
raise HTTPException( | |
status_code=500, | |
detail=f"Internal server error: {e}" | |
) | |