Spaces:
Runtime error
Runtime error
File size: 4,843 Bytes
3ad39d0 1d03ff3 3ad39d0 1d03ff3 3ad39d0 e6877b0 3ad39d0 1c2c36a 3ad39d0 01eb4d4 1fbcbcc 8d9c4c1 3ad39d0 540fe46 1d03ff3 2ce9cbb 1d03ff3 3ad39d0 1d03ff3 3ad39d0 1d03ff3 1fbcbcc 3ad39d0 1d03ff3 3ad39d0 1d03ff3 1c2c36a 1d03ff3 3ad39d0 1d03ff3 3ad39d0 2ce9cbb 1c2c36a 1d03ff3 3ad39d0 1d03ff3 2ce9cbb 1c2c36a 2ce9cbb 1d03ff3 e6877b0 1c2c36a 2ce9cbb e6877b0 1c2c36a 2ce9cbb 1d03ff3 1c2c36a 1d03ff3 1c2c36a 1d03ff3 1c2c36a 1d03ff3 1c2c36a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
import chainlit as cl
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
from monsterapi import client as mclient
import json
def indexing(llm,path):
documents = SimpleDirectoryReader(input_files=[path]).load_data()
service_context = ServiceContext.from_defaults(
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
)
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
query_engine = index.as_query_engine()
cl.user_session.set("engine", query_engine)
def qa(sp,message):
engine=cl.user_session.get("engine")
message=message.content
ques=sp+" "+message
response=engine.query(ques)
return response
@cl.on_chat_start
async def factory():
url = await cl.AskUserMessage(author="Beast",content="Enter API endpoint of your LLM Deployment on MonsterAPI. For any questions, checkout our \"Read Me\" section.").send()
index_ai = url['output'].find(".monsterapi.ai")
url_ai = url['output'][:index_ai + len(".monsterapi.ai")]
auth = await cl.AskUserMessage(author="Beast",content="Enter auth-token of your deployment").send()
model = 'deploy-llm'
llm = MonsterLLM(model=model,base_url=url_ai,monster_api_key=auth['output'],temperature=0.75, context_window=1024)
service_client = mclient(api_key = auth['output'], base_url = url_ai)
cl.user_session.set("service_client",service_client)
cl.user_session.set("llm", llm)
res = await cl.AskActionMessage(author="Beast",
content="Do you want to enter system prompt?",
actions=[
cl.Action(name="yes", value="yes", label="β
Yes"),
cl.Action(name="no", value="no", label="β No"),
],
).send()
if res and res.get("value") == "yes":
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
msg=cl.Message(author="Beast",content="Noted. Go ahead as your questions!!")
await msg.send()
cl.user_session.set("sp", sp["output"])
else:
await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
@cl.on_message
async def main(message: cl.Message):
service_client=cl.user_session.get("service_client")
engine = cl.user_session.get("engine")
llm=cl.user_session.get("llm")
sp=cl.user_session.get("sp")
if sp==None:
sp=""
if message.elements:
go=True
for file in message.elements:
if "pdf" in file.mime:
pdf=file
else:
await cl.Message(author="Beast",content="We only support PDF for now").send()
go=False
break
if go:
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
await msg.send()
await cl.make_async(indexing)(llm,pdf.path)
msg.content = f"`{pdf.name}` processed."
await msg.update()
msg = cl.Message(author="Beast",content=f"Generating Response...")
await msg.send()
response =await cl.make_async(qa)(sp,message)
print(response)
msg.content = str(response)
await msg.update()
comp = await cl.AskActionMessage(author="Beast",
content="Do you want answer without RAG?",
actions=[
cl.Action(name="yes", value="yes", label="β
Yes"),
cl.Action(name="no", value="no", label="β No"),
],
).send()
elif not message.elements and engine!=None:
msg = cl.Message(author="Beast",content=f"Generating Response...")
await msg.send()
response =await cl.make_async(qa)(sp,message)
print(response)
msg.content = str(response)
await msg.update()
comp = await cl.AskActionMessage(author="Beast",
content="Do you want answer without RAG?",
actions=[
cl.Action(name="yes", value="yes", label="β
Yes"),
cl.Action(name="no", value="no", label="β No"),
],
).send()
if (not message.elements and engine==None) or (comp.get("value") == "yes"):
msg = cl.Message(author="Beast",content=f"Generating Response...")
await msg.send()
payload = {
"input_variables": {"system": sp,
"prompt":message.content},
"stream": False,
"temperature": 0.6,
"max_tokens": 512
}
output = service_client.generate(model = "deploy-llm", data = payload)
msg.content = str(output['text'][0])
await msg.update()
else:
cl.Message(author="Beast",content="Broken ;(") |