awacke1 commited on
Commit
5347d74
โ€ข
1 Parent(s): a534622

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -45
app.py CHANGED
@@ -89,36 +89,21 @@ def SpeechSynthesis(result):
89
 
90
  def parse_to_markdown(text):
91
  return text
92
-
 
93
  def search_arxiv(query):
94
-
95
- # Show ArXiv Scholary Articles! ----------------*************-------------***************----------------------------------------
96
- # st.title("โ–ถ๏ธ Semantic and Episodic Memory System")
97
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
98
-
99
  search_query = query
100
- #top_n_results = st.slider(key='topnresults', label="Top n results as context", min_value=4, max_value=100, value=100)
101
- #search_source = st.sidebar.selectbox(key='searchsource', label="Search Source", ["Semantic Search - up to 10 Mar 2024", "Arxiv Search - Latest - (EXPERIMENTAL)"])
102
  search_source = "Arxiv Search - Latest - (EXPERIMENTAL)" # "Semantic Search - up to 10 Mar 2024"
103
- #llm_model = st.sidebar.selectbox(key='llmmodel', label="LLM Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", "google/gemma-7b-it", "None"])
104
  llm_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
 
105
 
106
- st.sidebar.markdown('### ๐Ÿ”Ž ' + query)
107
-
108
-
109
 
110
- # ArXiv searcher ~-<>-~ Paper Summary - Ask LLM
111
- client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
112
- response2 = client.predict(
113
- query, # str in 'parameter_13' Textbox component
114
- "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
115
- True, # bool in 'Stream output' Checkbox component
116
- api_name="/ask_llm"
117
- )
118
- st.write('๐Ÿ”Run of Multi-Agent System Paper Summary Spec is Complete')
119
- st.markdown(response2)
120
-
121
- # ArXiv searcher ~-<>-~ Paper References - Update with RAG
122
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
123
  response1 = client.predict(
124
  query,
@@ -127,26 +112,46 @@ def search_arxiv(query):
127
  "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
128
  api_name="/update_with_rag_md"
129
  )
130
- st.write('๐Ÿ”Run of Multi-Agent System Paper References is Complete')
131
- responseall = response1[0] + response1[1]
132
- st.markdown(responseall)
133
- result = response2 + responseall
134
-
135
- SpeechSynthesis(result) # Search History Reader / Writer IO Memory - Audio at Same time as Reading.
 
136
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  filename=generate_filename(query, "md")
138
- create_file(filename, query, result, should_save)
139
- saved_files = [f for f in os.listdir(".") if f.endswith(".md")]
140
- selected_file = st.sidebar.selectbox("Saved Files", saved_files)
141
-
142
- if selected_file:
143
- file_content = load_file(selected_file)
144
- st.sidebar.markdown(file_content)
145
- if st.sidebar.button("๐Ÿ—‘๏ธ Delete"):
146
- os.remove(selected_file)
147
- st.warning(f"File deleted: {selected_file}")
148
-
149
- return result
150
 
151
 
152
  # Prompts for App, for App Product, and App Product Code
@@ -1250,7 +1255,7 @@ if GiveFeedback:
1250
  try:
1251
  query_params = st.query_params
1252
  query = (query_params.get('q') or query_params.get('query') or [''])
1253
- if query:
1254
  result = search_arxiv(query)
1255
  #result2 = search_glossary(result)
1256
  except:
@@ -1388,14 +1393,14 @@ if AddAFileForContext:
1388
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
1389
 
1390
 
1391
- num_columns_video=st.slider(key="num_columns_video", label="Choose Number of Video Columns", min_value=1, max_value=15, value=10)
1392
  display_videos_and_links(num_columns_video) # Video Jump Grid
1393
 
1394
- num_columns_images=st.slider(key="num_columns_images", label="Choose Number of Image Columns", min_value=1, max_value=10, value=4)
1395
  display_images_and_wikipedia_summaries(num_columns_images) # Image Jump Grid
1396
 
1397
  display_glossary_grid(roleplaying_glossary) # Word Glossary Jump Grid - Dynamically calculates columns based on details length to keep topic together
1398
 
1399
- num_columns_text=st.slider(key="num_columns_text", label="Choose Number of Text Columns", min_value=1, max_value=10, value=4)
1400
  display_buttons_with_scores(num_columns_text) # Feedback Jump Grid
1401
 
 
89
 
90
  def parse_to_markdown(text):
91
  return text
92
+
93
+ # Show ArXiv Scholary Articles! ----------------*************----โ–ถ๏ธ Semantic and Episodic Memory System
94
  def search_arxiv(query):
95
+ start_time = time.strftime("%Y-%m-%d %H:%M:%S")
96
+
97
+
98
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
 
99
  search_query = query
 
 
100
  search_source = "Arxiv Search - Latest - (EXPERIMENTAL)" # "Semantic Search - up to 10 Mar 2024"
 
101
  llm_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
102
+ st.markdown('### ๐Ÿ”Ž ' + query)
103
 
104
+ # Search 1 - Retrieve the Papers
 
 
105
 
106
+
 
 
 
 
 
 
 
 
 
 
 
107
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
108
  response1 = client.predict(
109
  query,
 
112
  "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
113
  api_name="/update_with_rag_md"
114
  )
115
+ #st.markdown(response1)
116
+ lastpart=''
117
+ totalparts=''
118
+ #for parts in response1:
119
+ # st.markdown(parts) # expect 2
120
+ # lastpart=parts
121
+ # totalparts=totalparts+parts
122
 
123
+ results = response1[0] # Format for markdown display with links
124
+ results2 = response1[1] # format for subquery without links
125
+ st.markdown(results)
126
+
127
+ RunSecondQuery = False
128
+ if RunSecondQuery:
129
+ # Search 2 - Retieve the Summary with Papers Context and Original Query
130
+ #client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
131
+ #newquery='Create a summary as markdown outline with emojis for query: ' + query + ' ' + totalparts
132
+ response2 = client.predict(
133
+ query, # str in 'parameter_13' Textbox component
134
+ #"mistralai/Mixtral-8x7B-Instruct-v0.1",
135
+ #"mistralai/Mistral-7B-Instruct-v0.2",
136
+ "google/gemma-7b-it",
137
+ True, # bool in 'Stream output' Checkbox component
138
+ api_name="/ask_llm"
139
+ )
140
+ st.markdown(response2)
141
+ results = results + response2
142
+
143
+ st.write('๐Ÿ”Run of Multi-Agent System Paper Summary Spec is Complete')
144
+ end_time = time.strftime("%Y-%m-%d %H:%M:%S")
145
+ start_timestamp = time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S"))
146
+ end_timestamp = time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S"))
147
+ elapsed_seconds = end_timestamp - start_timestamp
148
+ st.write(f"Start time: {start_time}")
149
+ st.write(f"Finish time: {end_time}")
150
+ st.write(f"Elapsed time: {elapsed_seconds:.2f} seconds")
151
+ SpeechSynthesis(results) # Search History Reader / Writer IO Memory - Audio at Same time as Reading.
152
  filename=generate_filename(query, "md")
153
+ create_file(filename, query, results, should_save)
154
+ return results
 
 
 
 
 
 
 
 
 
 
155
 
156
 
157
  # Prompts for App, for App Product, and App Product Code
 
1255
  try:
1256
  query_params = st.query_params
1257
  query = (query_params.get('q') or query_params.get('query') or [''])
1258
+ if len(query) > 1:
1259
  result = search_arxiv(query)
1260
  #result2 = search_glossary(result)
1261
  except:
 
1393
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
1394
 
1395
 
1396
+ num_columns_video=st.slider(key="num_columns_video", label="Choose Number of Video Columns", min_value=1, max_value=15, value=4)
1397
  display_videos_and_links(num_columns_video) # Video Jump Grid
1398
 
1399
+ num_columns_images=st.slider(key="num_columns_images", label="Choose Number of Image Columns", min_value=1, max_value=15, value=4)
1400
  display_images_and_wikipedia_summaries(num_columns_images) # Image Jump Grid
1401
 
1402
  display_glossary_grid(roleplaying_glossary) # Word Glossary Jump Grid - Dynamically calculates columns based on details length to keep topic together
1403
 
1404
+ num_columns_text=st.slider(key="num_columns_text", label="Choose Number of Text Columns", min_value=1, max_value=15, value=4)
1405
  display_buttons_with_scores(num_columns_text) # Feedback Jump Grid
1406