File size: 1,270 Bytes
220a370 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
from langchain_community.document_loaders import PyPDFLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_core.messages import AIMessage, HumanMessage
from fastapi import FastAPI
from pydantic import BaseModel
import os
from rag import Rag
from storePDF import get_documents_from_path
folder_path = "files"
all_documents = get_documents_from_path(folder_path)
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200, add_start_index=True)
texts = text_splitter.split_documents(all_documents)
rag_llm = Rag();
rag_llm.createRagChain()
##Chat API
chat_history = []
class ChatInput(BaseModel):
question: str
app = FastAPI()
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.post("/generatechat/")
async def generateResponse(chat_input: ChatInput):
ai_msg = rag_llm.generateResponse(chat_input.question, chat_history)
chat_history.extend(
[
HumanMessage(content=chat_input.question),
AIMessage(content=ai_msg["answer"]),
]
)
return {"response": ai_msg}
def run_server():
import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8000)
print("Server is running")
if __name__ == "__main__":
run_server() |