datacipen commited on
Commit
9bdc739
1 Parent(s): 2e13573

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +15 -56
main.py CHANGED
@@ -21,7 +21,6 @@ from langchain.chains import (
21
 
22
  import chainlit as cl
23
  from chainlit.input_widget import TextInput, Select, Switch, Slider
24
- from chainlit.types import ThreadDict
25
 
26
  from deep_translator import GoogleTranslator
27
 
@@ -144,42 +143,6 @@ async def Search(input, categorie):
144
 
145
  results = [sources_text, verbatim_text, sources_offres]
146
  return results
147
-
148
- @cl.step(type="llm")
149
- async def setup_conversationalChain():
150
- model = await LLModel()
151
- retriever = await Retriever(cl.user_session.get("selectRequest"))
152
- ########## Chain with streaming ##########
153
- message_history = ChatMessageHistory()
154
- memory = ConversationBufferMemory(memory_key="chat_history",output_key="answer",chat_memory=message_history,return_messages=True)
155
-
156
- qa = ConversationalRetrievalChain.from_llm(
157
- model,
158
- memory=cl.user_session.get("memory"),
159
- chain_type="stuff",
160
- return_source_documents=True,
161
- verbose=False,
162
- retriever=retriever
163
- )
164
- cl.user_session.set("runnable", qa)
165
- cl.user_session.set("memory", memory)
166
-
167
- @cl.step(type="tool")
168
- async def switch(value):
169
- if value == "Pédagogie durable":
170
- return "bibliographie-OPP-DGDIN"
171
- elif value == "Lieux d'apprentissage":
172
- return "bibliographie-OPP-DGDIN"
173
- elif value == "Journée de La Pédagogie":
174
- return "year"
175
- elif value == "Compétences du CFA Descartes":
176
- return "skills"
177
- elif value == "Formations Gustave Eiffel":
178
- return "OF"
179
- elif value == "Vidéos paroles de confiné.es":
180
- return "videosTC"
181
- elif value == "Offres d'emploi France Travail":
182
- return "offreST"
183
 
184
  @cl.on_chat_start
185
  async def on_chat_start():
@@ -222,27 +185,23 @@ async def on_chat_start():
222
  await cl.Message(f"Vous pouvez requêter sur la thématique : {res.get('value')}").send()
223
  cl.user_session.set("selectRequest", res.get("name"))
224
 
225
- await setup_conversationalChain()
 
 
 
 
226
 
227
- @cl.on_chat_resume
228
- async def on_chat_resume(thread: ThreadDict):
229
- memory = ConversationBufferMemory(return_messages=True)
230
- root_messages = [m for m in thread["steps"] if m["parentId"] == None]
231
- for message in root_messages:
232
- if message["type"] == "assistant_message" and message["output"][28:37] == "thématique":
233
- resName = await switch(message["output"][42:])
234
- cl.user_session.set("selectRequest", resName)
235
- print(message["output"])
236
-
237
- if message["type"] == "user_message":
238
- memory.chat_memory.add_user_message(message["output"])
239
- else:
240
- memory.chat_memory.add_ai_message(message["output"])
241
-
242
  cl.user_session.set("memory", memory)
243
-
244
- await setup_conversationalChain()
245
-
246
  @cl.on_message
247
  async def on_message(message: cl.Message):
248
  memory = cl.user_session.get("memory")
 
21
 
22
  import chainlit as cl
23
  from chainlit.input_widget import TextInput, Select, Switch, Slider
 
24
 
25
  from deep_translator import GoogleTranslator
26
 
 
143
 
144
  results = [sources_text, verbatim_text, sources_offres]
145
  return results
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
147
  @cl.on_chat_start
148
  async def on_chat_start():
 
185
  await cl.Message(f"Vous pouvez requêter sur la thématique : {res.get('value')}").send()
186
  cl.user_session.set("selectRequest", res.get("name"))
187
 
188
+ model = await LLModel()
189
+ retriever = await Retriever(cl.user_session.get("selectRequest"))
190
+ ########## Chain with streaming ##########
191
+ message_history = ChatMessageHistory()
192
+ memory = ConversationBufferMemory(memory_key="chat_history",output_key="answer",chat_memory=message_history,return_messages=True)
193
 
194
+ qa = ConversationalRetrievalChain.from_llm(
195
+ model,
196
+ memory=memory,
197
+ chain_type="stuff",
198
+ return_source_documents=True,
199
+ verbose=False,
200
+ retriever=retriever
201
+ )
202
+ cl.user_session.set("runnable", qa)
 
 
 
 
 
 
203
  cl.user_session.set("memory", memory)
204
+
 
 
205
  @cl.on_message
206
  async def on_message(message: cl.Message):
207
  memory = cl.user_session.get("memory")