# import for typing from langchain.chains import RetrievalQAWithSourcesChain # gradio import gradio as gr global qa from qa import qa ##### # # Gradio fns #### def create_gradio_interface(qa:RetrievalQAWithSourcesChain): """ Create a gradio interface for the QA model """ def add_text(history, text): history = history + [(text, None)] return history, "" def bot(history): response = infer(history[-1][0], history) sources = [doc.metadata.get("source") for doc in response['source_documents']] src_list = '\n'.join(sources) print_this = response['answer'] + "\n\n\n Sources: \n\n\n" + src_list history[-1][1] = print_this #response['answer'] return history def infer(question, history): query = question result = qa({"query": query, "history": history, "question": question}) return result def vote(data: gr.LikeData): if data.liked: print("You upvoted this response: ") else: print("You downvoted this response: ") css=""" #col-container {max-width: 700px; margin-left: auto; margin-right: auto;} """ title = """
This is a privately hosted Docs AI Buddy ;)