|
import core |
|
import openai |
|
import models |
|
import time |
|
import gradio as gr |
|
import os |
|
|
|
api_key = os.environ["OPENAI_API_KEY"] |
|
api_base = os.environ["OPENAI_API_BASE"] |
|
|
|
|
|
|
|
|
|
def chatbot_initialize(): |
|
retriever = core.retriever.ChromaRetriever(pdf_dir="", |
|
collection_name="langchain", |
|
split_args={"size": 2048, "overlap": 10}, |
|
embed_model=models.BiomedModel() |
|
) |
|
Chatbot = core.chatbot.RetrievalChatbot(retriever=retriever) |
|
return Chatbot |
|
|
|
def respond(query, chat_history, img_path, chat_history_string): |
|
global Chatbot |
|
response, logs = Chatbot.response(query, image_path=img_path, return_logs=True) |
|
chat_history.append((query, response)) |
|
if img_path is None: |
|
chat_history_string += "Query: " + query + "\nImage: None" + "\nRepsonse: " + response + "\n\n\n" |
|
else: |
|
chat_history_string += "Query: " + query + "\nImage: " + img_path + "\nRepsonse: " + response + "\n\n\n" |
|
return "", chat_history, logs, chat_history_string |
|
|
|
if __name__ == "__main__": |
|
global Chatbot |
|
Chatbot=chatbot_initialize() |
|
|
|
with gr.Blocks() as demo: |
|
with gr.Row(): |
|
with gr.Column(scale=2): |
|
chatbot = gr.Chatbot() |
|
msg = gr.Textbox(label="Query", show_label=True) |
|
img = gr.Image(type="filepath") |
|
clear = gr.ClearButton([msg, chatbot]) |
|
with gr.Column(scale=1): |
|
sidebar = gr.Textbox(label="Subquestions", show_label=True, show_copy_button=True, interactive=False, max_lines=30) |
|
history = gr.Textbox(label="Copy Chat History", show_label=True, show_copy_button=True, interactive=False, max_lines=5) |
|
msg.submit(respond, inputs=[msg, chatbot, img, history], outputs=[msg, chatbot, sidebar, history]) |
|
demo.queue().launch() |