import streamlit as st import os from dotenv import load_dotenv import google.generativeai as genai from langchain_google_genai import ChatGoogleGenerativeAI from langchain_utils import get_chain from langchain.memory import ChatMessageHistory from PIL import Image st.title("Langchain NL2SQL Chatbot") # Set Google GenAI API key from Streamlit secrets #client = OpenAI(api_key="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") load_dotenv() genai.configure(api_key=os.environ["GOOGLE_API_KEY"]) llm = ChatGoogleGenerativeAI(model="gemini-pro",temperature=0,convert_system_message_to_human=True) # Set a default model if "Gemini_model" not in st.session_state: st.session_state["Gemini_model"] = "gemini-pro" history = ChatMessageHistory() if "messages" not in st.session_state: # print("Creating session state") st.session_state.messages = [] def invoke_chain(question,messages): chain = get_chain() #history = create_history(messages) response = chain.invoke({"question": question,"top_k":3,"messages":history.messages}) # history.add_user_message(question) # history.add_ai_message(response) return response question = st.text_input("Ask a Question about the database") # if question : # st.session_state.messages.append({"role": "user", "content": question}) # history.add_user_message(question) # response = invoke_chain(question, st.session_state.messages) # history.add_ai_message(response) # st.session_state.messages.append({"role": "assistant", "content": response}) if st.button("submit") : if question : response = invoke_chain(question, st.session_state.messages) st.markdown(response) # Set up the sidebar with a button st.sidebar.title("Database Info") if st.sidebar.button('Show Database Schema'): # Display the database schema image when the button is clicked image = Image.open('database_schema.PNG') st.image(image, caption='Database Schema', use_column_width=True)