Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,83 @@
|
|
1 |
-
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
|
|
|
|
|
|
3 |
from gradio_client import Client
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
)
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
|
|
1 |
+
#import streamlit as st
|
2 |
+
#from gradio_client import Client
|
3 |
+
#client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
4 |
+
#result = client.predict(
|
5 |
+
# "What is Semantic and Episodic memory?", # str in 'Search' Textbox component
|
6 |
+
# 4, # float (numeric value between 4 and 10) in 'Top n results as context' Slider component
|
7 |
+
# "Semantic Search - up to 10 Mar 2024", # Literal['Semantic Search - up to 10 Mar 2024', 'Arxiv Search - Latest - (EXPERIMENTAL)'] in 'Search Source' Dropdown component
|
8 |
+
# "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
|
9 |
+
# api_name="/update_with_rag_md"
|
10 |
+
#)
|
11 |
+
#st.markdown(result)
|
12 |
+
|
13 |
|
14 |
+
import streamlit as st
|
15 |
+
import os
|
16 |
+
from datetime import datetime
|
17 |
from gradio_client import Client
|
18 |
|
19 |
+
def save_file(content, file_type):
|
20 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
21 |
+
file_name = f"{file_type}_{timestamp}.md"
|
22 |
+
with open(file_name, "w") as file:
|
23 |
+
file.write(content)
|
24 |
+
return file_name
|
25 |
+
|
26 |
+
def load_file(file_name):
|
27 |
+
with open(file_name, "r") as file:
|
28 |
+
content = file.read()
|
29 |
+
return content
|
30 |
+
|
31 |
+
def main():
|
32 |
+
st.set_page_config(page_title="Memory Flag System")
|
33 |
+
st.title("Memory Flag System")
|
34 |
+
|
35 |
+
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
36 |
+
|
37 |
+
search_query = st.text_input("Search")
|
38 |
+
top_n_results = st.slider("Top n results as context", min_value=4, max_value=10, value=4)
|
39 |
+
search_source = st.selectbox("Search Source", ["Semantic Search - up to 10 Mar 2024", "Arxiv Search - Latest - (EXPERIMENTAL)"])
|
40 |
+
llm_model = st.selectbox("LLM Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", "google/gemma-7b-it", "None"])
|
41 |
+
|
42 |
+
if st.button("Search"):
|
43 |
+
result = client.predict(
|
44 |
+
search_query,
|
45 |
+
top_n_results,
|
46 |
+
search_source,
|
47 |
+
llm_model,
|
48 |
+
api_name="/update_with_rag_md"
|
49 |
+
)
|
50 |
+
st.markdown(result)
|
51 |
+
|
52 |
+
file_type = st.radio("Select Memory Flag", ("Semantic", "Episodic"))
|
53 |
+
if st.button("Save"):
|
54 |
+
file_name = save_file(result, file_type)
|
55 |
+
st.success(f"File saved: {file_name}")
|
56 |
+
|
57 |
+
saved_files = [f for f in os.listdir(".") if f.endswith(".md")]
|
58 |
+
selected_file = st.sidebar.selectbox("Saved Files", saved_files)
|
59 |
+
|
60 |
+
if selected_file:
|
61 |
+
file_content = load_file(selected_file)
|
62 |
+
st.sidebar.markdown(file_content)
|
63 |
+
|
64 |
+
if st.sidebar.button("π Edit"):
|
65 |
+
edited_content = st.text_area("Edit File", value=file_content, height=400)
|
66 |
+
new_file_name = st.text_input("File Name", value=selected_file)
|
67 |
+
if st.button("πΎ Save"):
|
68 |
+
with open(new_file_name, "w") as file:
|
69 |
+
file.write(edited_content)
|
70 |
+
st.success(f"File updated: {new_file_name}")
|
71 |
+
|
72 |
+
if st.sidebar.button("ποΈ Delete"):
|
73 |
+
os.remove(selected_file)
|
74 |
+
st.warning(f"File deleted: {selected_file}")
|
75 |
+
|
76 |
+
if __name__ == "__main__":
|
77 |
+
main()
|
78 |
+
|
79 |
+
|
80 |
+
|
81 |
+
|
82 |
+
|
83 |
|