awacke1 commited on
Commit
011643a
1 Parent(s): 170954a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -90,7 +90,7 @@ def search_arxiv(query):
90
  st.title("▶️ Semantic and Episodic Memory System")
91
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
92
  search_query = query
93
- top_n_results = st.slider(key='topnresults', label="Top n results as context", min_value=4, max_value=100, value=100)
94
  search_source = st.selectbox("Search Source", ["Semantic Search - up to 10 Mar 2024", "Arxiv Search - Latest - (EXPERIMENTAL)"])
95
  llm_model = st.selectbox("LLM Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", "google/gemma-7b-it", "None"])
96
 
@@ -100,7 +100,7 @@ def search_arxiv(query):
100
 
101
  result = client.predict(
102
  search_query,
103
- top_n_results,
104
  search_source,
105
  llm_model,
106
  api_name="/update_with_rag_md"
 
90
  st.title("▶️ Semantic and Episodic Memory System")
91
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
92
  search_query = query
93
+ #top_n_results = st.slider(key='topnresults', label="Top n results as context", min_value=4, max_value=100, value=100)
94
  search_source = st.selectbox("Search Source", ["Semantic Search - up to 10 Mar 2024", "Arxiv Search - Latest - (EXPERIMENTAL)"])
95
  llm_model = st.selectbox("LLM Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", "google/gemma-7b-it", "None"])
96
 
 
100
 
101
  result = client.predict(
102
  search_query,
103
+ 100,
104
  search_source,
105
  llm_model,
106
  api_name="/update_with_rag_md"