Spaces:
Sleeping
Sleeping
from dotenv import load_dotenv | |
import streamlit as st | |
import os | |
import google.generativeai as genai | |
# Load environment variables | |
load_dotenv() | |
# Configure Gemini Pro model | |
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) | |
model = genai.GenerativeModel("gemini-1.5-pro") | |
# Initialize Streamlit app | |
st.set_page_config(page_title="Gemmy") | |
# Custom header with centered text | |
header_html = """ | |
<style> | |
.header { | |
text-align: center; | |
font-family: Arial, sans-serif; | |
font-size: 2em; | |
color: #F8F9F9; | |
margin-top: 50px; | |
margin-bottom: 20px; | |
} | |
</style> | |
<div class="header"> | |
Gemmy | |
</div> | |
""" | |
st.markdown(header_html, unsafe_allow_html=True) | |
# Initialize session state for chat history | |
if "chat_history" not in st.session_state: | |
st.session_state.chat_history = [] | |
if 'chat_session' not in st.session_state: | |
st.session_state['chat_session'] = model.start_chat(history=[]) | |
# Function to get response from Gemini model | |
def get_gemini_response(): | |
question = st.session_state.input | |
chat = st.session_state['chat_session'] | |
response = chat.send_message(question, stream=True) | |
response_text = "".join(chunk.text for chunk in response) | |
# Save question and response to chat history | |
st.session_state.chat_history.append({"question": question, "response": response_text}) | |
# Clear the input box after submission | |
st.session_state.input = "" | |
# Display chat history | |
for entry in st.session_state.chat_history: | |
st.write(f"***You :*** {entry['question']}") | |
st.write(f"***Gemmy :*** {entry['response']}") | |
st.markdown("<hr>", unsafe_allow_html=True) # Add a line below each response | |
# Input box | |
st.text_input(" ", key="input", on_change=get_gemini_response, placeholder="Ask Gemmy") | |