AlanOC's picture
Update app.py
3773d41 verified
raw
history blame
No virus
3.58 kB
import sys
import os
import streamlit as st
import configparser
import datetime
import atexit
import pickle
config = configparser.ConfigParser()
from gradio.components import Textbox
from langchain_community.vectorstores import Chroma
from langchain.chains import ConversationalRetrievalChain
from langchain.text_splitter import CharacterTextSplitter
from langchain.memory import ConversationBufferMemory
from langchain_community.llms import OpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate
from langchain.prompts.prompt import PromptTemplate
from langchain.prompts import SystemMessagePromptTemplate
from langchain.prompts import HumanMessagePromptTemplate
from langchain.prompts import ChatMessagePromptTemplate
from langchain.prompts import ChatPromptTemplate
# Retrieve the API key from the environment variables
api_key = os.getenv("OPENAI_API_KEY")
# Check if the API key is available, if not, raise an error
if api_key is None:
raise ValueError("API key not found. Ensure that the OPENAI_API_KEY environment variable is set.")
# Use the API key as needed in your application
os.environ["OPENAI_API_KEY"] = api_key
# Create a Chroma database instance from the SQLite file
vectordb = Chroma(persist_directory="./data", embedding_function=OpenAIEmbeddings())
# Define the system message template
system_template = """Use only the following pieces of context to answer the question at the end.
If you don't know the answer, just say that you don't know. Don't try to make up an answer.
Always answer in Englsih. Split the answer into easily readable paragraphs. Use bullet points and number points where possible.
Include any useful URLs and/or contact details from the context provided whereever possible.
Always end by adding a carrage return and then saying: Thank you for your query to CitizensInformation.ie chat!
----------------
{context}"""
# Create the chat prompt templates
messages = [
SystemMessagePromptTemplate.from_template(system_template),
HumanMessagePromptTemplate.from_template("{question}")
]
qa_prompt = ChatPromptTemplate.from_messages(messages)
pdf_qa = ConversationalRetrievalChain.from_llm(
ChatOpenAI(temperature=0.9, model_name="gpt-3.5-turbo"),
vectordb.as_retriever(),return_source_documents=True,verbose=False,combine_docs_chain_kwargs={"prompt": qa_prompt})
chat_history = []
def ask_alans_ai(query, vectordb):
global chat_history
result = pdf_qa(
{"question": query, "chat_history": chat_history, "vectordb": vectordb})
chat_history.append((query, result["answer"]))
return result["answer"]
# Define Streamlit app
def main():
st.title("Citizens Information AI Chatbot")
# Text input for user queries
with st.chat_message("assistant", avatar='./ci.png'):
st.write("How can we help you today?")
user_query = st.text_area()
if st.button("Ask"):
# Call your AI model function here with user_query as input
ai_response = ask_alans_ai(user_query, vectordb)
# Display the AI response
st.write("Answer:")
st.write(ai_response)
# Run the Streamlit app
if __name__ == "__main__":
main()
# print("system_template is:", system_template, end="\n")
# print("pdf_qa is:", pdf_qa, end="\n")
# print("messages is:", messages, end="\n")
# print("qa_prompt is:", qa_prompt, end="\n")