File size: 3,246 Bytes
1db2e27
 
16830f5
 
 
 
1db2e27
 
 
 
16830f5
 
 
1db2e27
 
 
 
 
 
 
16830f5
 
 
1db2e27
 
16830f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1db2e27
16830f5
 
 
 
 
 
 
1db2e27
 
 
 
 
 
 
 
 
 
16830f5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import streamlit as st
import google.generativeai as palm
from gradio_client import Client

# Initialize the Gradio client with the API URL
client = Client("https://akdeniz27-llama-2-70b-chat-hf-with-easyllm.hf.space/")

# Information about obtaining a free API key
st.sidebar.info("Get your free palm2 API key at [makersuite.google.com/app/apikey](https://makersuite.google.com/app/apikey)")

# Ask the user for palm2 API key
api_key_palm2 = st.sidebar.text_input("Enter your palm2 API key for study notes:", type="password")
palm.configure(api_key=api_key_palm2)

# Styling for the title
st.title("Auto Study Notes Generator")
st.markdown("---")

# Sidebar for settings
st.sidebar.title("Settings")

# User choice for mode selection
selected_mode = st.sidebar.radio("Select Mode:", ['Generate Study Notes (Palm2)', 'Use Llama 70b for Notes'])

# Main content area
if selected_mode == 'Generate Study Notes (Palm2)':
    st.header("Study Notes Generation (Palm2)")
    user_class = st.sidebar.selectbox('Select your class:', ['Class 1', 'Class 2', 'Class 3', 'Class 4', 'Class 5', 'Class 6',
                                                             'Class 7', 'Class 8', 'Class 9', 'Class 10', 'Class 11', 'Class 12'])
    user_input = st.text_input(f'Enter your study topic for {user_class}:')
    st.markdown("---")

    if st.button('Generate Study Notes'):
        if user_input.lower() in ['quit', 'exit', 'bye']:
            st.success("Goodbye! Have a great day!")
        else:
            with st.spinner("Generating study notes. Please wait..."):
                st.subheader(f"Making notes for you on '{user_input}'")
                prompt = f"Provide study notes for {user_class} on the topic: {user_input}."
                response = palm.generate_text(model='models/text-bison-001', prompt=prompt)
                study_notes = response.result

                # Display the generated study notes
                st.subheader(f"Study Notes for {user_class} - {user_input}:")
                st.write(study_notes)

elif selected_mode == 'Use Llama 70b for Notes':
    st.header("Llama 70b Mode")
    llama_input = st.text_input('Enter a message for Llama 70b (type "exit" to quit):')
    st.markdown("---")

    if st.button('Get Llama 70b Response'):
        if llama_input.lower() == 'exit':
            st.success("Exiting Llama 70b mode. Have a great day!")
        else:
            with st.spinner("Getting response from Llama 70b. Please wait..."):
                # Make a prediction using Llama 70b API
                llama_result = client.predict(
                    llama_input,
                    api_name="/chat"
                )

                # Check if the result is not None
                if llama_result is not None:
                    # Display the result
                    st.subheader("Llama 70b Response:")
                    st.write(llama_result)
                else:
                    st.warning("Llama 70b API response was None. Please try again later.")

# Add a footer with updated text
st.sidebar.markdown("---")
st.sidebar.text("© 2023 HelpingAI")

# Hide Streamlit menu
st.markdown("""
    <style>
        #MainMenu {visibility: hidden;}
    </style>
""", unsafe_allow_html=True)