File size: 3,581 Bytes
ad9b83c
 
 
 
 
 
095b7f8
ad9b83c
faf7351
8cd4ba6
 
 
 
 
 
faf7351
8cd4ba6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99ef9f0
8ef9363
af7f05f
c155fcf
4046108
f0a8d35
a80efa0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384cc23
a80efa0
 
639ccc4
 
f0a8d35
 
 
 
 
 
 
 
977c373
 
92eb1bc
977c373
 
4aaadbe
0babb9d
6f56270
3773d41
3ea543a
 
977c373
 
 
 
 
 
92eb1bc
977c373
 
 
 
 
 
 
99ef9f0
8ef9363
8cd4ba6
 
 
 
 
 
 
 
384cc23
8cd4ba6
99ef9f0
 
 
f0a8d35
99ef9f0
8cd4ba6
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import sys
import os
import streamlit as st
import configparser
import datetime
import atexit 
import pickle


config = configparser.ConfigParser()




from gradio.components import Textbox

from langchain_community.vectorstores import Chroma
from langchain.chains import ConversationalRetrievalChain
from langchain.text_splitter import CharacterTextSplitter
from langchain.memory import ConversationBufferMemory
from langchain_community.llms import OpenAI

from langchain_community.chat_models import ChatOpenAI

from langchain_community.embeddings import OpenAIEmbeddings

from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate
from langchain.prompts.prompt import PromptTemplate
from langchain.prompts import SystemMessagePromptTemplate
from langchain.prompts import HumanMessagePromptTemplate
from langchain.prompts import ChatMessagePromptTemplate
from langchain.prompts import ChatPromptTemplate



# Retrieve the API key from the environment variables
api_key = os.getenv("OPENAI_API_KEY")

# Check if the API key is available, if not, raise an error
if api_key is None:
    raise ValueError("API key not found. Ensure that the OPENAI_API_KEY environment variable is set.")

# Use the API key as needed in your application
os.environ["OPENAI_API_KEY"] = api_key




# Create a Chroma database instance from the SQLite file
vectordb = Chroma(persist_directory="./data", embedding_function=OpenAIEmbeddings())


# Define the system message template
system_template = """Use only the following pieces of context to answer the question at the end. 
If you don't know the answer, just say that you don't know. Don't try to make up an answer. 
Always answer in Englsih. Split the answer into easily readable paragraphs. Use bullet points and number points where possible. 
Include any useful URLs and/or contact details from the context provided whereever possible.
Always end by adding a carrage return and then saying: Thank you for your query to CitizensInformation.ie chat!
----------------
{context}"""

# Create the chat prompt templates
messages = [
    SystemMessagePromptTemplate.from_template(system_template),
    HumanMessagePromptTemplate.from_template("{question}")
        ]
qa_prompt = ChatPromptTemplate.from_messages(messages)





pdf_qa = ConversationalRetrievalChain.from_llm(
    ChatOpenAI(temperature=0.9, model_name="gpt-3.5-turbo"),
    vectordb.as_retriever(),return_source_documents=True,verbose=False,combine_docs_chain_kwargs={"prompt": qa_prompt})
    
chat_history = []




def ask_alans_ai(query, vectordb):
    global chat_history
    result = pdf_qa(
        {"question": query, "chat_history": chat_history, "vectordb": vectordb})
    chat_history.append((query, result["answer"]))
    return result["answer"]


# Define Streamlit app
def main():
    st.title("Citizens Information AI Chatbot")
    
    # Text input for user queries

    
   
    with st.chat_message("assistant", avatar='./ci.png'):
        st.write("How can we help you today?")
        user_query = st.text_area()
    
    if st.button("Ask"):
        # Call your AI model function here with user_query as input
        ai_response = ask_alans_ai(user_query, vectordb)
        
        # Display the AI response
        st.write("Answer:")
        st.write(ai_response)


# Run the Streamlit app
if __name__ == "__main__":
    main()





# print("system_template is:", system_template, end="\n")
# print("pdf_qa is:", pdf_qa, end="\n")
# print("messages is:", messages, end="\n")
# print("qa_prompt is:", qa_prompt, end="\n")