File size: 7,082 Bytes
39de480
 
 
 
 
 
 
 
6ae91a0
39de480
 
 
 
 
 
 
 
 
027bfbf
39de480
 
 
 
 
 
 
 
 
027bfbf
 
 
 
39de480
027bfbf
39de480
 
 
 
 
 
 
 
6ae91a0
 
39de480
 
 
 
 
 
6ae91a0
39de480
 
 
6ae91a0
7bba285
6ae91a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a83a1b2
6ae91a0
 
 
 
 
 
 
 
 
 
 
 
 
39de480
 
a7b7b83
 
 
 
 
 
 
 
5c67c5c
39de480
 
 
 
 
 
 
 
6ae91a0
 
027bfbf
 
 
 
 
 
 
 
 
 
 
 
 
6ae91a0
027bfbf
 
6ae91a0
 
 
 
 
 
 
 
 
 
 
 
027bfbf
6ae91a0
 
 
 
 
 
 
027bfbf
6ae91a0
39de480
 
 
 
 
6ae91a0
69b7289
 
 
 
 
39de480
6ae91a0
 
 
69b7289
b68a115
69b7289
3715d20
 
39de480
 
6ae91a0
39de480
3715d20
 
027bfbf
3715d20
 
6ae91a0
39de480
 
 
 
 
 
 
 
9e2dc86
 
 
 
 
 
 
 
39de480
6ae91a0
39de480
 
 
 
 
 
 
6ae91a0
39de480
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a83a1b2
e910e3b
39de480
 
6ae91a0
39de480
 
027bfbf
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
"""
Python Backend API to chat with private data  

08/16/2023
D.M. Theekshana Samaradiwakara
"""

import os
import time
import streamlit as st
from streamlit.logger import get_logger

logger = get_logger(__name__)

from ui.htmlTemplates import css, bot_template, user_template, source_template
from config import MODELS, DATASETS

from qaPipeline import QAPipeline
import qaPipeline_functions
from faissDb import create_faiss

# loads environment variables
from dotenv import load_dotenv
load_dotenv()

isHuggingFaceHubEnabled = os.environ.get('ENABLE_HUGGINGFSCE_HUB_MODELS')
isOpenAiApiEnabled = os.environ.get('ENABLE_OPENAI_API_MODELS')

st.set_page_config(page_title="Chat with data",
                       page_icon=":books:")
st.write(css, unsafe_allow_html=True)

qaPipeline = QAPipeline()
# qaPipeline = qaPipeline_functions

def initialize_session_state():
    # Initialise all session state variables with defaults
    SESSION_DEFAULTS = {
        "model": MODELS["DEFAULT"],
        "dataset": DATASETS["DEFAULT"],
        "chat_history": None,
        "is_parameters_changed":False,
        "show_source_files": False,
        "user_question":'',
    }

    for k, v in SESSION_DEFAULTS.items():
        if k not in st.session_state:
            st.session_state[k] = v

def side_bar():
    with st.sidebar:
        st.subheader("Chat parameters")

        with st.form('param_form'):
            st.info('Info: use openai chat model for best results')
            chat_model = st.selectbox(
                "Chat model",
                MODELS,
                key="chat_model",
                help="Select the LLM model for the chat",
                # on_change=update_parameters_change,
            )

            # data_source = st.selectbox(
            #     "dataset",
            #     DATASETS,
            #     key="data_source",
            #     help="Select the private data_source for the chat",
            #     on_change=update_parameters_change,
            # )

            st.session_state.dataset =  "DEFAULT"

            show_source = st.checkbox(
                label="show source files",
                key="show_source",
                help="Select this to show relavant source files for the query",
                # on_change=update_parameters_change,
            )

            submitted = st.form_submit_button(
                "Save Parameters",
                # on_click=update_parameters_change
                )
       
            if submitted:
                parameters_change_button(chat_model, show_source)


            # if st.session_state.is_parameters_changed:
            #     st.button("Update", 
            #             on_click=parameters_change_button,
            #             args=[chat_model, show_source]
            #     )
                    
        st.markdown("\n")

        # if st.button("Create FAISS db"):
        #     try:
        #         with st.spinner('creating faiss vector store'):
        #             create_faiss()
        #             st.success('faiss saved')
        #     except Exception as e:
        #         st.error(f"Error : {e}")#, icon=":books:")
        #         return
                
        st.markdown(
            "### How to use\n"
            "1. Select the chat model\n"  # noqa: E501
            "2. Select \"show source files\" to show the source files related to the answer.📄\n"
            "3. Ask a question about the documents💬\n"
        )


def chat_body():
    st.header("Chat with your own data:")
    with st.form('chat_body'):

        user_question = st.text_input(
            "Ask a question about your documents:",
            placeholder="enter question",
            key='user_question',
            # on_change=submit_user_question,
        )

        submitted = st.form_submit_button(
            "Submit",
            # on_click=update_parameters_change
            )
    
        if submitted:
            submit_user_question()

    # if user_question:
    #     submit_user_question()
    #     # user_question = False

    

def submit_user_question():
    with st.spinner("Processing"):
        user_question = st.session_state.user_question
        # st.success(user_question)
        handle_userinput(user_question)
        # st.session_state.user_question=''


def main():
   
    initialize_session_state()
    
    side_bar()

    chat_body()

                
def update_parameters_change():
    st.session_state.is_parameters_changed = True


def parameters_change_button(chat_model, show_source):
    st.session_state.model = chat_model
    st.session_state.dataset = "DEFAULT"
    st.session_state.show_source_files = show_source
    st.session_state.is_parameters_changed = False

    alert =  st.success("chat parameters updated")
    time.sleep(1) # Wait for 3 seconds
    alert.empty() # Clear the alert

@st.cache_data
def get_answer_from_backend(query, model, dataset):
    # response = qaPipeline.run(query=query, model=model, dataset=dataset)
    response = qaPipeline.run_agent(query=query, model=model, dataset=dataset)
    return response


def show_query_response(query, response, show_source_files):
    docs = []
    if isinstance(response, dict):
        answer, docs = response['answer'], response['source_documents']
    else:
        answer = response

    st.write(user_template.replace(
                "{{MSG}}", query), unsafe_allow_html=True)
    st.write(bot_template.replace(
                "{{MSG}}",  answer ), unsafe_allow_html=True)
    
    if show_source_files:
        # st.write(source_template.replace(
        #         "{{MSG}}",  "source files" ), unsafe_allow_html=True)

        if len(docs)>0 :
            st.markdown("#### source files : ")
            for source in docs:
                # st.info(source.metadata)
                with st.expander(source.metadata["source"]):
                    st.markdown(source.page_content)
            
        # st.write(response)
    

def is_query_valid(query: str) -> bool:
    if (not query) or (query.strip() == ''):
        st.error("Please enter a question!")
        return False
    return True


def handle_userinput(query):
    # Get the answer from the chain
    try:
        if not is_query_valid(query):
            st.stop()

        model = MODELS[st.session_state.model]
        dataset = DATASETS[st.session_state.dataset]
        show_source_files = st.session_state.show_source_files

        # Try to access openai and deeplake
        print(f">\n model: {model} \n dataset : {dataset} \n show_source_files : {show_source_files}")

        response = get_answer_from_backend(query, model, dataset)

        show_query_response(query, response, show_source_files)


    except Exception as e:
        # logger.error(f"Answer retrieval failed with {e}")
        st.error(f"Error Occured! see log info for more details.")#, icon=":books:")
        print(f"> Error Occured! {e}.")#, icon=":books:")
        return


if __name__ == "__main__":
    main()

# initialize_session_state()

# side_bar()

# chat_body()