datacipen commited on
Commit
43810bb
1 Parent(s): 2b613f9

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +21 -12
main.py CHANGED
@@ -112,10 +112,8 @@ async def on_chat_start():
112
  timeout="3600"
113
  ).send()
114
 
115
- if res and res.get("value") == "continue":
116
- await cl.Message(
117
- content="On continue!",
118
- ).send()
119
 
120
  template = """Answer the question based only on the following context:
121
 
@@ -162,6 +160,7 @@ async def on_message(message: cl.Message):
162
  self.sources.add(source_page_pair) # Add unique pairs to the set
163
 
164
  def on_llm_end(self, response, *, run_id, parent_run_id, **kwargs):
 
165
  if len(self.sources):
166
  sources_text = "\n".join([f"{source}#page={page}" for source, page in self.sources])
167
  self.msg.elements.append(
@@ -169,13 +168,23 @@ async def on_message(message: cl.Message):
169
  )
170
 
171
  async with cl.Step(type="run", name="QA Assistant"):
172
- async for chunk in runnable.astream(
173
- message.content,
174
- config=RunnableConfig(callbacks=[
175
- cl.LangchainCallbackHandler(),
176
- PostMessageHandler(msg)
177
- ]),
178
- ):
179
- await msg.stream_token(chunk)
 
 
 
 
 
 
 
 
 
 
180
 
181
  await msg.send()
 
112
  timeout="3600"
113
  ).send()
114
 
115
+ if res:
116
+ cl.user_session.set("selectRequest", res.get("value"))
 
 
117
 
118
  template = """Answer the question based only on the following context:
119
 
 
160
  self.sources.add(source_page_pair) # Add unique pairs to the set
161
 
162
  def on_llm_end(self, response, *, run_id, parent_run_id, **kwargs):
163
+ cl.user_session.set("selectRequest","")
164
  if len(self.sources):
165
  sources_text = "\n".join([f"{source}#page={page}" for source, page in self.sources])
166
  self.msg.elements.append(
 
168
  )
169
 
170
  async with cl.Step(type="run", name="QA Assistant"):
171
+ if cl.user_session.get("selectRequest"):
172
+ async for chunk in runnable.astream(
173
+ cl.user_session.get("selectRequest"),
174
+ config=RunnableConfig(callbacks=[
175
+ cl.LangchainCallbackHandler(),
176
+ PostMessageHandler(msg)
177
+ ]),
178
+ ):
179
+ await msg.stream_token(chunk)
180
+ else:
181
+ async for chunk in runnable.astream(
182
+ message.content,
183
+ config=RunnableConfig(callbacks=[
184
+ cl.LangchainCallbackHandler(),
185
+ PostMessageHandler(msg)
186
+ ]),
187
+ ):
188
+ await msg.stream_token(chunk)
189
 
190
  await msg.send()