Nitish-py commited on
Commit
2ce9cbb
β€’
1 Parent(s): df581ae

attatchment enabled

Browse files
Files changed (3) hide show
  1. app.py +46 -23
  2. data.txt +0 -0
  3. requirements.txt +2 -1
app.py CHANGED
@@ -2,7 +2,9 @@ import chainlit as cl
2
  from llama_index.llms import MonsterLLM
3
  from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
4
 
5
- def indexing(llm,path):
 
 
6
  documents = SimpleDirectoryReader(input_files=[path]).load_data()
7
  print("loading done")
8
  service_context = ServiceContext.from_defaults(
@@ -31,22 +33,18 @@ async def factory():
31
  print(auth)
32
  model = 'deploy-llm'
33
  llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
34
- files = None
35
- while files is None:
36
- files = await cl.AskFileMessage(author="Beast",
37
- content="Please upload a PDF file to begin!",
38
- accept=["application/pdf"],
39
- max_size_mb=20,
40
- timeout=180,
41
- ).send()
 
42
 
43
- pdf = files[0]
44
- print(pdf)
45
- msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
46
- await msg.send()
47
- query_engine = await cl.make_async(indexing)(llm,pdf.path)
48
- msg.content = f"`{pdf.name}` processed."
49
- await msg.update()
50
  res = await cl.AskActionMessage(author="Beast",
51
  content="Do you want to enter system prompt?",
52
  actions=[
@@ -54,7 +52,7 @@ async def factory():
54
  cl.Action(name="no", value="no", label="❌ No"),
55
  ],
56
  ).send()
57
-
58
  if res and res.get("value") == "yes":
59
  sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
60
  await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
@@ -66,14 +64,39 @@ async def factory():
66
 
67
  @cl.on_message
68
  async def main(message: cl.Message):
69
- msg = cl.Message(author="Beast",content=f"Processing...", disable_feedback=False)
70
- await msg.send()
71
  engine = cl.user_session.get("engine")
 
72
  sp=cl.user_session.get("sp")
73
  if sp==None:
74
  sp=""
75
- response =await cl.make_async(qa)(sp,engine,message)
76
- print(response)
77
- msg.content = str(response)
78
- await msg.update()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
 
2
  from llama_index.llms import MonsterLLM
3
  from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
4
 
5
+ def indexing(llm,path=None):
6
+ if path==None:
7
+ path="data.txt"
8
  documents = SimpleDirectoryReader(input_files=[path]).load_data()
9
  print("loading done")
10
  service_context = ServiceContext.from_defaults(
 
33
  print(auth)
34
  model = 'deploy-llm'
35
  llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
36
+ cl.user_session.set("llm", llm)
37
+ # files = None
38
+ # while files is None:
39
+ # files = await cl.AskFileMessage(author="Beast",
40
+ # content="Please upload a PDF file to begin!",
41
+ # accept=["application/pdf"],
42
+ # max_size_mb=20,
43
+ # timeout=180,
44
+ # ).send()
45
 
46
+ # pdf = files[0]
47
+ # print(pdf)
 
 
 
 
 
48
  res = await cl.AskActionMessage(author="Beast",
49
  content="Do you want to enter system prompt?",
50
  actions=[
 
52
  cl.Action(name="no", value="no", label="❌ No"),
53
  ],
54
  ).send()
55
+ query_engine = await cl.make_async(indexing)(llm)
56
  if res and res.get("value") == "yes":
57
  sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
58
  await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
 
64
 
65
  @cl.on_message
66
  async def main(message: cl.Message):
 
 
67
  engine = cl.user_session.get("engine")
68
+ llm=cl.user_session.get("llm")
69
  sp=cl.user_session.get("sp")
70
  if sp==None:
71
  sp=""
72
+ if not message.elements:
73
+ msg = cl.Message(author="Beast",content=f"Processing...", disable_feedback=False)
74
+ await msg.send()
75
+ response =await cl.make_async(qa)(sp,engine,message)
76
+ print(response)
77
+ msg.content = str(response)
78
+ await msg.update()
79
+ elif message.elements:
80
+ go=True
81
+ for file in message.elements:
82
+ if "pdf" in file.mime:
83
+ pdf=file
84
+ else:
85
+ await cl.Message(author="Beast",content="We only support PDF for now").send()
86
+ go=False
87
+ break
88
+ if go:
89
+ msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
90
+ await msg.send()
91
+ query_engine = await cl.make_async(indexing)(llm,pdf.path)
92
+ msg.content = f"`{pdf.name}` processed."
93
+ await msg.update()
94
+ response =await cl.make_async(qa)(sp,query_engine,message)
95
+ print(response)
96
+ msg.content = str(response)
97
+ msg.author="Beast"
98
+ await msg.send()
99
+
100
+
101
+
102
 
data.txt ADDED
File without changes
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  monsterapi
2
  chainlit
3
  pypdf
4
- git+https://github.com/Vikasqblocks/llama_index.git@f2f04654e9f2cbf1bf765b0d575a6af1f899b18e
 
 
1
  monsterapi
2
  chainlit
3
  pypdf
4
+ git+https://github.com/Vikasqblocks/llama_index.git@f2f04654e9f2cbf1bf765b0d575a6af1f899b18e
5
+ sentence-transformers