|
import streamlit as st |
|
import google.generativeai as palm |
|
from gradio_client import Client |
|
import time |
|
import threading |
|
|
|
|
|
client = Client("https://akdeniz27-llama-2-70b-chat-hf-with-easyllm.hf.space/") |
|
|
|
|
|
st.sidebar.info("π Get your free palm2 API key at [makersuite.google.com/app/apikey](https://makersuite.google.com/app/apikey)") |
|
|
|
|
|
api_key_palm2 = st.sidebar.text_input("π Enter your palm2 API key for study notes:", type="password") |
|
palm.configure(api_key=api_key_palm2) |
|
|
|
|
|
st.title("π Auto Study Notes Generator") |
|
st.markdown("---") |
|
|
|
|
|
st.sidebar.title("βοΈ Settings") |
|
|
|
|
|
selected_mode = st.sidebar.radio("π Select Mode:", ['Generate Study Notes (Palm2)', 'Use Llama 70b for Notes']) |
|
|
|
|
|
view_count = 0 |
|
|
|
|
|
def update_view_count(): |
|
global view_count |
|
while True: |
|
time.sleep(5) |
|
view_count += 1 |
|
|
|
|
|
thread = threading.Thread(target=update_view_count) |
|
thread.start() |
|
|
|
|
|
if selected_mode == 'Generate Study Notes (Palm2)': |
|
st.header("π Study Notes Generation (Palm2)") |
|
|
|
|
|
user_class = st.sidebar.selectbox('π©βπ Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6', |
|
'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12']) |
|
user_input = st.text_input(f'βοΈ Enter your study topic for {user_class}:', placeholder='e.g., History') |
|
|
|
if st.button('π Generate Study Notes', key="generate_notes", help="Click to generate study notes"): |
|
if user_input.lower() in ['quit', 'exit', 'bye']: |
|
st.success("π Goodbye! Have a great day!") |
|
else: |
|
with st.spinner("β Generating study notes. Please wait..."): |
|
prompt = f"Provide study notes for {user_class} on the topic: {user_input}." |
|
response = palm.generate_text(model='models/text-bison-001', prompt=prompt) |
|
study_notes = response.result |
|
|
|
|
|
st.subheader(f"π Study Notes for {user_class} - {user_input}") |
|
st.write(study_notes) |
|
|
|
elif selected_mode == 'Use Llama 70b for Notes': |
|
st.header("π¦ Llama 70b Mode") |
|
|
|
|
|
llama_input = st.text_input('π¬ Enter a message for Llama 70b (type "exit" to quit):', placeholder='e.g., Tell me a joke') |
|
|
|
if st.button('π Get Llama 70b Response', key="get_llama_response", help="Click to get Llama 70b response"): |
|
if llama_input.lower() == 'exit': |
|
st.success("π Exiting Llama 70b mode. Have a great day!") |
|
else: |
|
with st.spinner("β Getting response from Llama 70b. Please wait..."): |
|
|
|
llama_result = client.predict( |
|
llama_input, |
|
api_name="/chat" |
|
) |
|
|
|
|
|
if llama_result is not None: |
|
|
|
st.subheader("π¦ Llama 70b Response") |
|
st.write(llama_result) |
|
else: |
|
st.warning("β οΈ Llama 70b API response was None. Please try again later.") |
|
|
|
|
|
st.sidebar.markdown("---") |
|
st.sidebar.subheader("π Live View Count:") |
|
st.sidebar.write(view_count) |
|
|
|
|
|
st.sidebar.text("Β© 2023 HelpingAI") |
|
|
|
|
|
st.markdown(""" |
|
<style> |
|
#MainMenu {visibility: hidden;} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|