RAG-Chat / app.py
Nitish-py's picture
attatchment enabled
2ce9cbb
raw
history blame
No virus
3.72 kB
import chainlit as cl
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
def indexing(llm,path=None):
if path==None:
path="data.txt"
documents = SimpleDirectoryReader(input_files=[path]).load_data()
print("loading done")
service_context = ServiceContext.from_defaults(
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
)
print("indexing")
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
query_engine = index.as_query_engine()
print("all done")
print(query_engine)
return query_engine
def qa(sp,engine,message):
message=message.content
ques=sp+" "+message
response=engine.query(ques)
return response
@cl.on_chat_start
async def factory():
url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
print(url)
if url['output'][-1]=="/":
url['output']=url['output'].replace(".ai/",".ai")
auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
print(auth)
model = 'deploy-llm'
llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
cl.user_session.set("llm", llm)
# files = None
# while files is None:
# files = await cl.AskFileMessage(author="Beast",
# content="Please upload a PDF file to begin!",
# accept=["application/pdf"],
# max_size_mb=20,
# timeout=180,
# ).send()
# pdf = files[0]
# print(pdf)
res = await cl.AskActionMessage(author="Beast",
content="Do you want to enter system prompt?",
actions=[
cl.Action(name="yes", value="yes", label="βœ… Yes"),
cl.Action(name="no", value="no", label="❌ No"),
],
).send()
query_engine = await cl.make_async(indexing)(llm)
if res and res.get("value") == "yes":
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
cl.user_session.set("sp", sp["output"])
else:
await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
cl.user_session.set("engine", query_engine)
@cl.on_message
async def main(message: cl.Message):
engine = cl.user_session.get("engine")
llm=cl.user_session.get("llm")
sp=cl.user_session.get("sp")
if sp==None:
sp=""
if not message.elements:
msg = cl.Message(author="Beast",content=f"Processing...", disable_feedback=False)
await msg.send()
response =await cl.make_async(qa)(sp,engine,message)
print(response)
msg.content = str(response)
await msg.update()
elif message.elements:
go=True
for file in message.elements:
if "pdf" in file.mime:
pdf=file
else:
await cl.Message(author="Beast",content="We only support PDF for now").send()
go=False
break
if go:
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
await msg.send()
query_engine = await cl.make_async(indexing)(llm,pdf.path)
msg.content = f"`{pdf.name}` processed."
await msg.update()
response =await cl.make_async(qa)(sp,query_engine,message)
print(response)
msg.content = str(response)
msg.author="Beast"
await msg.send()