Abhaykoul's picture
Update app.py
34a1bb4
raw
history blame
3.98 kB
import streamlit as st
import google.generativeai as palm
from gradio_client import Client
import time
import threading
# Initialize the Gradio client with the API URL
client = Client("https://akdeniz27-llama-2-70b-chat-hf-with-easyllm.hf.space/")
# Information about obtaining a free API key
st.sidebar.info("πŸ”‘ Get your free palm2 API key at [makersuite.google.com/app/apikey](https://makersuite.google.com/app/apikey)")
# Ask the user for palm2 API key
api_key_palm2 = st.sidebar.text_input("πŸ”’ Enter your palm2 API key for study notes:", type="password")
palm.configure(api_key=api_key_palm2)
# Styling for the title
st.title("πŸš€ Auto Study Notes Generator")
st.markdown("---")
# Sidebar for settings
st.sidebar.title("βš™οΈ Settings")
# User choice for mode selection
selected_mode = st.sidebar.radio("πŸ” Select Mode:", ['Generate Study Notes (Palm2)', 'Use Llama 70b for Notes'])
# Initialize view count
view_count = 0
# Function to simulate live view count
def update_view_count():
global view_count
while True:
time.sleep(5) # Simulate updates every 5 seconds
view_count += 1
# Start the thread to simulate live view count
thread = threading.Thread(target=update_view_count)
thread.start()
# Main content area
if selected_mode == 'Generate Study Notes (Palm2)':
st.header("πŸ“š Study Notes Generation (Palm2)")
# User input for class and study topic
user_class = st.sidebar.selectbox('πŸ‘©β€πŸŽ“ Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
user_input = st.text_input(f'✏️ Enter your study topic for {user_class}:', placeholder='e.g., History')
if st.button('πŸš€ Generate Study Notes', key="generate_notes", help="Click to generate study notes"):
if user_input.lower() in ['quit', 'exit', 'bye']:
st.success("πŸ‘‹ Goodbye! Have a great day!")
else:
with st.spinner("βŒ› Generating study notes. Please wait..."):
prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
response = palm.generate_text(model='models/text-bison-001', prompt=prompt)
study_notes = response.result
# Display the generated study notes
st.subheader(f"πŸ“š Study Notes for {user_class} - {user_input}")
st.write(study_notes)
elif selected_mode == 'Use Llama 70b for Notes':
st.header("πŸ¦™ Llama 70b Mode")
# User input for Llama 70b mode
llama_input = st.text_input('πŸ’¬ Enter a message for Llama 70b (type "exit" to quit):', placeholder='e.g., Tell me a joke')
if st.button('πŸ” Get Llama 70b Response', key="get_llama_response", help="Click to get Llama 70b response"):
if llama_input.lower() == 'exit':
st.success("πŸ‘‹ Exiting Llama 70b mode. Have a great day!")
else:
with st.spinner("βŒ› Getting response from Llama 70b. Please wait..."):
# Make a prediction using Llama 70b API
llama_result = client.predict(
llama_input,
api_name="/chat"
)
# Check if the result is not None
if llama_result is not None:
# Display the result
st.subheader("πŸ¦™ Llama 70b Response")
st.write(llama_result)
else:
st.warning("⚠️ Llama 70b API response was None. Please try again later.")
# Show live view count
st.sidebar.markdown("---")
st.sidebar.subheader("πŸ‘€ Live View Count:")
st.sidebar.write(view_count)
# Add a footer with updated text
st.sidebar.text("Β© 2023 HelpingAI")
# Hide Streamlit menu
st.markdown("""
<style>
#MainMenu {visibility: hidden;}
</style>
""", unsafe_allow_html=True)