Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,101 +1,44 @@
|
|
1 |
import streamlit as st
|
2 |
-
import
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
#
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
#
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
#
|
31 |
-
|
32 |
-
global view_count
|
33 |
-
while True:
|
34 |
-
time.sleep(5) # Simulate updates every 5 seconds
|
35 |
-
view_count += 1
|
36 |
-
|
37 |
-
# Start the thread to simulate live view count
|
38 |
-
thread = threading.Thread(target=update_view_count)
|
39 |
-
thread.start()
|
40 |
-
|
41 |
-
# Main content area
|
42 |
-
if selected_mode == 'Generate Study Notes (Palm2)':
|
43 |
-
st.header("π Study Notes Generation (Palm2)")
|
44 |
-
|
45 |
-
# User input for class and study topic
|
46 |
-
user_class = st.sidebar.selectbox('π©βπ Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
|
47 |
-
'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
|
48 |
-
user_input = st.text_input(f'βοΈ Enter your study topic for {user_class}:', placeholder='e.g., History')
|
49 |
-
|
50 |
-
if st.button('π Generate Study Notes', key="generate_notes", help="Click to generate study notes"):
|
51 |
if user_input.lower() in ['quit', 'exit', 'bye']:
|
52 |
-
st.success("
|
53 |
else:
|
54 |
-
with st.spinner("
|
55 |
prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
|
56 |
-
response =
|
57 |
-
study_notes = response.result
|
58 |
-
|
59 |
-
# Display the generated study notes
|
60 |
-
st.subheader(f"π Study Notes for {user_class} - {user_input}")
|
61 |
-
st.write(study_notes)
|
62 |
-
|
63 |
-
elif selected_mode == 'Use Llama 70b for Notes':
|
64 |
-
st.header("π¦ Llama 70b Mode")
|
65 |
-
|
66 |
-
# User input for Llama 70b mode
|
67 |
-
llama_input = st.text_input('π¬ Enter a message for Llama 70b (type "exit" to quit):', placeholder='e.g., Tell me a joke')
|
68 |
-
|
69 |
-
if st.button('π Get Llama 70b Response', key="get_llama_response", help="Click to get Llama 70b response"):
|
70 |
-
if llama_input.lower() == 'exit':
|
71 |
-
st.success("π Exiting Llama 70b mode. Have a great day!")
|
72 |
-
else:
|
73 |
-
with st.spinner("β Getting response from Llama 70b. Please wait..."):
|
74 |
-
# Make a prediction using Llama 70b API
|
75 |
-
llama_result = client.predict(
|
76 |
-
llama_input,
|
77 |
-
api_name="/chat"
|
78 |
-
)
|
79 |
-
|
80 |
-
# Check if the result is not None
|
81 |
-
if llama_result is not None:
|
82 |
-
# Display the result
|
83 |
-
st.subheader("π¦ Llama 70b Response")
|
84 |
-
st.write(llama_result)
|
85 |
-
else:
|
86 |
-
st.warning("β οΈ Llama 70b API response was None. Please try again later.")
|
87 |
-
|
88 |
-
# Show live view count
|
89 |
-
st.sidebar.markdown("---")
|
90 |
-
st.sidebar.subheader("π Live View Count:")
|
91 |
-
st.sidebar.write(view_count)
|
92 |
|
93 |
-
#
|
94 |
-
st.
|
|
|
95 |
|
96 |
-
|
97 |
-
|
98 |
-
<style>
|
99 |
-
#MainMenu {visibility: hidden;}
|
100 |
-
</style>
|
101 |
-
""", unsafe_allow_html=True)
|
|
|
1 |
import streamlit as st
|
2 |
+
import requests
|
3 |
+
|
4 |
+
# Create a session for reusing connections
|
5 |
+
session = requests.Session()
|
6 |
+
|
7 |
+
# Function to interact with the AI
|
8 |
+
def chat_with_ai(message):
|
9 |
+
api_url = "https://free-ai-api.devastation-war.repl.co/chat"
|
10 |
+
payload = {"message": message}
|
11 |
+
|
12 |
+
try:
|
13 |
+
with session.post(api_url, json=payload) as response:
|
14 |
+
if response.status_code == 200:
|
15 |
+
return response.json().get('response') # Access 'response' key
|
16 |
+
else:
|
17 |
+
return {"error": "Failed to get a response from the AI API."}
|
18 |
+
except requests.RequestException as e:
|
19 |
+
return {"error": f"Error: {e}"}
|
20 |
+
|
21 |
+
# Streamlit app
|
22 |
+
def main():
|
23 |
+
st.title("Generate Study Notes")
|
24 |
+
|
25 |
+
# User inputs for class and topic
|
26 |
+
user_class = st.sidebar.selectbox('Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
|
27 |
+
'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
|
28 |
+
user_input = st.text_input(f'Enter your study topic for {user_class}:', placeholder='e.g., History')
|
29 |
+
|
30 |
+
# Generate study notes when prompted
|
31 |
+
if st.button('Generate Study Notes'):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
if user_input.lower() in ['quit', 'exit', 'bye']:
|
33 |
+
st.success("Goodbye! Have a great day!")
|
34 |
else:
|
35 |
+
with st.spinner("Generating study notes. Please wait..."):
|
36 |
prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
|
37 |
+
response = chat_with_ai(prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
+
# Display generated study notes
|
40 |
+
st.subheader(f"Study Notes for {user_class} - {user_input}")
|
41 |
+
st.write(response)
|
42 |
|
43 |
+
if __name__ == "__main__":
|
44 |
+
main()
|
|
|
|
|
|
|
|