Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import google.generativeai as genai | |
| import os | |
| from dotenv import load_dotenv | |
| # Load API key from .env file | |
| load_dotenv() | |
| genai.configure(api_key=os.getenv("GEMINI_API_KEY")) | |
| # Initialize chat model | |
| model = genai.GenerativeModel("gemini-1.5-flash") | |
| # Streamlit UI Design | |
| st.set_page_config(page_title="AI Chatbot", page_icon="π€", layout="centered") | |
| st.markdown( | |
| """ | |
| <style> | |
| body { | |
| background-color: #f5f7fa; | |
| } | |
| .stChatMessage { | |
| border-radius: 12px; | |
| padding: 12px; | |
| } | |
| </style> | |
| """, | |
| unsafe_allow_html=True | |
| ) | |
| # Title and description | |
| st.title("π€ AI Chatbot (Gemini 1.5 Flash)") | |
| st.markdown( | |
| """ | |
| ### Welcome to Your AI Chatbot! | |
| This chatbot is powered by **Google Gemini 1.5 Flash** and built with **Streamlit**. | |
| π§ **What can it do?** | |
| - Answer questions & provide explanations | |
| - Help with writing & analysis tasks | |
| - Assist with problem-solving | |
| π₯ Start chatting below! | |
| """ | |
| ) | |
| # Initialize chat history | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| # Display chat history | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # Get user input | |
| user_input = st.chat_input("Type your message...") | |
| if user_input: | |
| # Display user message | |
| st.chat_message("user").markdown(user_input) | |
| # Format chat history for context | |
| chat_history = [{"role": "user" if m["role"] == "user" else "model", "parts": [m["content"]]} for m in st.session_state.messages] | |
| # Generate AI response | |
| response = model.generate_content( | |
| contents=[{"role": "user", "parts": [user_input]}], | |
| generation_config={"temperature": 0.7}, | |
| safety_settings=[] | |
| ) | |
| bot_reply = response.text | |
| # Display AI response | |
| st.chat_message("assistant").markdown(bot_reply) | |
| # Save conversation | |
| st.session_state.messages.append({"role": "user", "content": user_input}) | |
| st.session_state.messages.append({"role": "assistant", "content": bot_reply}) | |
| # Keep chat history concise | |
| if len(st.session_state.messages) > 6: | |
| st.session_state.messages = st.session_state.messages[-6:] | |