Abhaykoul commited on
Commit
45021eb
β€’
1 Parent(s): 34a1bb4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -95
app.py CHANGED
@@ -1,101 +1,44 @@
1
  import streamlit as st
2
- import google.generativeai as palm
3
- from gradio_client import Client
4
- import time
5
- import threading
6
-
7
- # Initialize the Gradio client with the API URL
8
- client = Client("https://akdeniz27-llama-2-70b-chat-hf-with-easyllm.hf.space/")
9
-
10
- # Information about obtaining a free API key
11
- st.sidebar.info("πŸ”‘ Get your free palm2 API key at [makersuite.google.com/app/apikey](https://makersuite.google.com/app/apikey)")
12
-
13
- # Ask the user for palm2 API key
14
- api_key_palm2 = st.sidebar.text_input("πŸ”’ Enter your palm2 API key for study notes:", type="password")
15
- palm.configure(api_key=api_key_palm2)
16
-
17
- # Styling for the title
18
- st.title("πŸš€ Auto Study Notes Generator")
19
- st.markdown("---")
20
-
21
- # Sidebar for settings
22
- st.sidebar.title("βš™οΈ Settings")
23
-
24
- # User choice for mode selection
25
- selected_mode = st.sidebar.radio("πŸ” Select Mode:", ['Generate Study Notes (Palm2)', 'Use Llama 70b for Notes'])
26
-
27
- # Initialize view count
28
- view_count = 0
29
-
30
- # Function to simulate live view count
31
- def update_view_count():
32
- global view_count
33
- while True:
34
- time.sleep(5) # Simulate updates every 5 seconds
35
- view_count += 1
36
-
37
- # Start the thread to simulate live view count
38
- thread = threading.Thread(target=update_view_count)
39
- thread.start()
40
-
41
- # Main content area
42
- if selected_mode == 'Generate Study Notes (Palm2)':
43
- st.header("πŸ“š Study Notes Generation (Palm2)")
44
-
45
- # User input for class and study topic
46
- user_class = st.sidebar.selectbox('πŸ‘©β€πŸŽ“ Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
47
- 'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
48
- user_input = st.text_input(f'✏️ Enter your study topic for {user_class}:', placeholder='e.g., History')
49
-
50
- if st.button('πŸš€ Generate Study Notes', key="generate_notes", help="Click to generate study notes"):
51
  if user_input.lower() in ['quit', 'exit', 'bye']:
52
- st.success("πŸ‘‹ Goodbye! Have a great day!")
53
  else:
54
- with st.spinner("βŒ› Generating study notes. Please wait..."):
55
  prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
56
- response = palm.generate_text(model='models/text-bison-001', prompt=prompt)
57
- study_notes = response.result
58
-
59
- # Display the generated study notes
60
- st.subheader(f"πŸ“š Study Notes for {user_class} - {user_input}")
61
- st.write(study_notes)
62
-
63
- elif selected_mode == 'Use Llama 70b for Notes':
64
- st.header("πŸ¦™ Llama 70b Mode")
65
-
66
- # User input for Llama 70b mode
67
- llama_input = st.text_input('πŸ’¬ Enter a message for Llama 70b (type "exit" to quit):', placeholder='e.g., Tell me a joke')
68
-
69
- if st.button('πŸ” Get Llama 70b Response', key="get_llama_response", help="Click to get Llama 70b response"):
70
- if llama_input.lower() == 'exit':
71
- st.success("πŸ‘‹ Exiting Llama 70b mode. Have a great day!")
72
- else:
73
- with st.spinner("βŒ› Getting response from Llama 70b. Please wait..."):
74
- # Make a prediction using Llama 70b API
75
- llama_result = client.predict(
76
- llama_input,
77
- api_name="/chat"
78
- )
79
-
80
- # Check if the result is not None
81
- if llama_result is not None:
82
- # Display the result
83
- st.subheader("πŸ¦™ Llama 70b Response")
84
- st.write(llama_result)
85
- else:
86
- st.warning("⚠️ Llama 70b API response was None. Please try again later.")
87
-
88
- # Show live view count
89
- st.sidebar.markdown("---")
90
- st.sidebar.subheader("πŸ‘€ Live View Count:")
91
- st.sidebar.write(view_count)
92
 
93
- # Add a footer with updated text
94
- st.sidebar.text("Β© 2023 HelpingAI")
 
95
 
96
- # Hide Streamlit menu
97
- st.markdown("""
98
- <style>
99
- #MainMenu {visibility: hidden;}
100
- </style>
101
- """, unsafe_allow_html=True)
 
1
  import streamlit as st
2
+ import requests
3
+
4
+ # Create a session for reusing connections
5
+ session = requests.Session()
6
+
7
+ # Function to interact with the AI
8
+ def chat_with_ai(message):
9
+ api_url = "https://free-ai-api.devastation-war.repl.co/chat"
10
+ payload = {"message": message}
11
+
12
+ try:
13
+ with session.post(api_url, json=payload) as response:
14
+ if response.status_code == 200:
15
+ return response.json().get('response') # Access 'response' key
16
+ else:
17
+ return {"error": "Failed to get a response from the AI API."}
18
+ except requests.RequestException as e:
19
+ return {"error": f"Error: {e}"}
20
+
21
+ # Streamlit app
22
+ def main():
23
+ st.title("Generate Study Notes")
24
+
25
+ # User inputs for class and topic
26
+ user_class = st.sidebar.selectbox('Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
27
+ 'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
28
+ user_input = st.text_input(f'Enter your study topic for {user_class}:', placeholder='e.g., History')
29
+
30
+ # Generate study notes when prompted
31
+ if st.button('Generate Study Notes'):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  if user_input.lower() in ['quit', 'exit', 'bye']:
33
+ st.success("Goodbye! Have a great day!")
34
  else:
35
+ with st.spinner("Generating study notes. Please wait..."):
36
  prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
37
+ response = chat_with_ai(prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
+ # Display generated study notes
40
+ st.subheader(f"Study Notes for {user_class} - {user_input}")
41
+ st.write(response)
42
 
43
+ if __name__ == "__main__":
44
+ main()