| |
|
| | import os |
| | from rag import RAGModel |
| | from langchain.chat_models import ChatOpenAI |
| | from langchain.prompts import ChatPromptTemplate |
| | from langchain.schema import StrOutputParser |
| | from langchain.chains import LLMChain |
| |
|
| | import chainlit as cl |
| |
|
| | |
| |
|
| | rag = RAGModel(os.getenv("OPENAI_API_KEY")) |
| |
|
| |
|
| | @cl.on_chat_start |
| | async def on_chat_start(): |
| | msg=cl.Message(content="Firing up the research info bot...") |
| | await msg.send() |
| | msg.content= "Hi, welcome to research info bot. What is your query?" |
| | await msg.update() |
| |
|
| |
|
| | @cl.on_message |
| | async def on_message(message: cl.Message): |
| | answer = rag.query(question=message.content) |
| | await cl.Message(content=answer).send() |
| |
|
| |
|
| |
|