awacke1 commited on
Commit
4106d50
โ€ข
1 Parent(s): 994ff03

Update backup5.Arxiv.app.py

Browse files
Files changed (1) hide show
  1. backup5.Arxiv.app.py +39 -16
backup5.Arxiv.app.py CHANGED
@@ -160,15 +160,31 @@ def search_glossary(query):
160
  # ๐Ÿ•ต๏ธโ€โ™‚๏ธ Searching the glossary for: query
161
  all_results = ""
162
  st.markdown(f"- {query}")
163
-
 
 
 
164
  # ๐Ÿ” Run 1 - ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
165
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
166
  response2 = client.predict(
167
- query, # str in 'parameter_13' Textbox component
168
- "google/gemma-7b-it", # LLM Model Dropdown component
169
- True, # Stream output Checkbox component
170
- api_name="/ask_llm"
 
 
 
 
 
 
 
 
 
 
 
 
171
  )
 
172
  st.write('๐Ÿ” Run of Multi-Agent System Paper Summary Spec is Complete')
173
  st.markdown(response2)
174
 
@@ -198,18 +214,25 @@ def process_text(text_input):
198
  st.markdown(text_input)
199
 
200
  with st.chat_message("assistant"):
201
- completion = openai.ChatCompletion.create(
202
- model=MODEL,
203
- messages=[
204
- {"role": m["role"], "content": m["content"]}
205
- for m in st.session_state.messages
206
- ],
207
- stream=False
208
- )
209
- return_text = completion.choices[0].message.content
210
- st.write("Assistant: " + return_text)
211
- filename = generate_filename(text_input, "md")
212
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
213
  create_and_save_file(return_text, file_type="md", prompt=text_input, is_image=False, should_save=True)
214
  st.session_state.messages.append({"role": "assistant", "content": return_text})
215
 
 
160
  # ๐Ÿ•ต๏ธโ€โ™‚๏ธ Searching the glossary for: query
161
  all_results = ""
162
  st.markdown(f"- {query}")
163
+
164
+ #database_choice Literal['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] Default: "Semantic Search"
165
+ #llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
166
+
167
  # ๐Ÿ” Run 1 - ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
168
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
169
  response2 = client.predict(
170
+ message=query, # str in 'parameter_13' Textbox component
171
+ llm_results_use=5,
172
+ database_choice="Semantic Search",
173
+ llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
174
+ api_name="/update_with_rag_md"
175
+ )
176
+
177
+ #llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
178
+
179
+
180
+ client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
181
+ result = client.predict(
182
+ prompt=query,
183
+ llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
184
+ stream_outputs=True,
185
+ api_name="/ask_llm"
186
  )
187
+
188
  st.write('๐Ÿ” Run of Multi-Agent System Paper Summary Spec is Complete')
189
  st.markdown(response2)
190
 
 
214
  st.markdown(text_input)
215
 
216
  with st.chat_message("assistant"):
 
 
 
 
 
 
 
 
 
 
 
217
 
218
+ search_glossary(text_input)
219
+
220
+
221
+ useOpenAI=False
222
+ if useOpenAI:
223
+ completion = openai.ChatCompletion.create(
224
+ model=MODEL,
225
+ messages=[
226
+ {"role": m["role"], "content": m["content"]}
227
+ for m in st.session_state.messages
228
+ ],
229
+ stream=False
230
+ )
231
+ return_text = completion.choices[0].message.content
232
+ st.write("Assistant: " + return_text)
233
+
234
+
235
+ filename = generate_filename(text_input, "md")
236
  create_and_save_file(return_text, file_type="md", prompt=text_input, is_image=False, should_save=True)
237
  st.session_state.messages.append({"role": "assistant", "content": return_text})
238