File size: 2,016 Bytes
a465905
 
 
4210f9f
 
a465905
4210f9f
2b35aa2
a465905
4210f9f
 
 
 
 
 
c38efb3
4210f9f
a465905
 
4210f9f
a465905
 
 
 
4210f9f
 
 
 
 
 
 
 
 
 
a465905
 
4210f9f
 
 
 
 
 
 
 
 
 
 
 
a465905
4210f9f
a465905
 
2b35aa2
a465905
4210f9f
 
a465905
 
4210f9f
 
a465905
 
 
 
 
4210f9f
 
a465905
 
4210f9f
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware

from schemas import InputModel
from config import settings

from chatbot.workflow import graph
from langchain_core.messages import convert_to_messages

# Create FastAPI app instance
app = FastAPI(
    title=settings.API_TITLE,
    description=settings.API_DESCRIPTION,
    version=settings.API_VERSION
)

# Configure CORS
app.add_middleware(
    CORSMiddleware,
    allow_origins=settings.ALLOWED_ORIGINS,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

#---------- Health Check Endpoint ----------#
@app.get("/", tags=["Health Check"])
async def health_check() -> dict[str, str]:
    """
    Health check endpoint to verify API is running
    """
    return {"status": "healthy", "message": "Makhfi Chatbot API is running"}

#----------- API Endpoint to interact with the Chatbot ----------#
@app.post("/chat")
async def chat_with_makhfi(user_input: InputModel):
    """
    Process a chat message and return the bot's response
    
    Args:
        user_input: InputModel containing the question and chat history
        
    Returns:
        Dict containing the answer and source links
        
    Raises:
        HTTPException: If there's an error processing the request
    """
    try:
        # Prepare input for the graph
        inputs = {
            "question": user_input.question,
            "chat_history": convert_to_messages(user_input.chat_history),
        }

        # Process through the workflow graph
        response = await graph.ainvoke(inputs)
        output = response['output'].model_dump()

        # Clean up the response
        answer = output['answer'].replace('\\"', '"')
        links = output['sources']

        return {
            "answer": answer,
            "sources": links
            }
    
    except Exception as e:
        raise HTTPException(
            status_code=500, 
            detail=f"Internal server error: {e}"
            )