""" Python Backend API to chat with private data 08/16/2023 D.M. Theekshana Samaradiwakara """ import os import streamlit as st from streamlit.logger import get_logger logger = get_logger(__name__) from ui.htmlTemplates import css, bot_template, user_template, source_template from config import MODELS, DATASETS from qaPipeline import QAPipeline from faissDb import create_faiss # loads environment variables from dotenv import load_dotenv load_dotenv() isHuggingFaceHubEnabled = os.environ.get('ENABLE_HUGGINGFSCE_HUB_MODELS') isOpenAiApiEnabled = os.environ.get('ENABLE_OPENAI_API_MODELS') qaPipeline = QAPipeline() def initialize_session_state(): # Initialise all session state variables with defaults SESSION_DEFAULTS = { "model": MODELS["DEFAULT"], "dataset": DATASETS["DEFAULT"], "chat_history": None, "is_parameters_changed":False, "show_source_files": False } for k, v in SESSION_DEFAULTS.items(): if k not in st.session_state: st.session_state[k] = v def main(): st.set_page_config(page_title="Chat with data", page_icon=":books:") st.write(css, unsafe_allow_html=True) initialize_session_state() st.header("Chat with your own data:") user_question = st.text_input( "Ask a question about your documents:", placeholder="enter question", ) # Interactive questions and answers if user_question: with st.spinner("Processing"): handle_userinput(user_question) with st.sidebar: st.subheader("Chat parameters") chat_model = st.selectbox( "Chat model", MODELS, key="chat_model", help="Select the LLM model for the chat", on_change=update_parameters_change, ) # data_source = st.selectbox( # "dataset", # DATASETS, # key="data_source", # help="Select the private data_source for the chat", # on_change=update_parameters_change, # ) st.session_state.dataset = "DEFAULT" show_source = st.checkbox( label="show source files", key="show_source", help="Select this to show relavant source files for the query", on_change=update_parameters_change, ) if st.session_state.is_parameters_changed: if st.button("Update"): st.session_state.model = chat_model st.session_state.dataset = "DEFAULT" st.session_state.show_source_files = show_source st.success("done") st.session_state.is_parameters_changed = False return st.markdown("\n") if st.button("Create FAISS db"): with st.spinner('creating faiss vector store'): create_faiss() st.success('faiss saved') st.markdown( "### How to use\n" "1. Select the chat model\n" # noqa: E501 "2. Select \"show source files\" to show the source files related to the answer.📄\n" "3. Ask a question about the documents💬\n" ) def update_parameters_change(): st.session_state.is_parameters_changed = True def get_answer_from_backend(query, model, dataset): response = qaPipeline.run(query=query, model=model, dataset=dataset) return response def show_query_response(query, response, show_source_files): answer, docs = response['result'], response['source_documents'] st.write(user_template.replace( "{{MSG}}", query), unsafe_allow_html=True) st.write(bot_template.replace( "{{MSG}}", answer ), unsafe_allow_html=True) if show_source_files: # st.write(source_template.replace( # "{{MSG}}", "source files" ), unsafe_allow_html=True) st.markdown("#### source files : ") for source in docs: # st.info(source.metadata) with st.expander(source.metadata["source"]): st.markdown(source.page_content) # st.write(response) def is_query_valid(query: str) -> bool: if (not query) or (query.strip() == ''): st.error("Please enter a question!") return False return True def handle_userinput(query): # Get the answer from the chain try: if not is_query_valid(query): st.stop() model = MODELS[st.session_state.model] dataset = DATASETS[st.session_state.dataset] show_source_files = st.session_state.show_source_files # Try to access openai and deeplake print(f">\n model: {model} \n dataset : {dataset} \n show_source_files : {show_source_files}") response = get_answer_from_backend(query, model, dataset) show_query_response(query, response, show_source_files) except Exception as e: # logger.error(f"Answer retrieval failed with {e}") st.error(f"Error : {e}")#, icon=":books:") return if __name__ == "__main__": main()