Spaces:
Sleeping
Sleeping
File size: 3,971 Bytes
cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 d0c74c0 cc83df3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 |
import os
from typing import List, Tuple
import gradio as gr
from assistant import OAIAssistant
import utils
from dotenv import load_dotenv
load_dotenv()
class PPTChat:
def __init__(self) -> None:
assistant_id = os.environ.get("ASSISTANT_ID")
vector_store_id = os.environ.get("VECTORSTORE_ID")
self.assistant = OAIAssistant(
assistant_id=assistant_id, vectorstore_id=vector_store_id
)
self.thread_id = self.assistant.create_thread().id
def chat(self, message: str, history: List[str]):
response = self.assistant.chat(message, self.thread_id)
history.append((message, response["answer"]))
return (
"",
history,
)
def create_thread(self):
try:
self.assistant.delete_thread(self.thread_id)
self.thread_id = self.assistant.create_thread().id
gr.Info(message="New thread created. Start as New!")
except Exception as e:
gr.Error(message=f"Unable to create new thread. Error: {e}")
def clear_all(self):
# create new thread
# delete previous files
# create new chatbot
self.create_thread()
gr.Info(message="Created new thread")
try:
assistant_file_ids = self.assistant.get_files_list()
print(">>>>>>>>>>>>>>>>>>>>>>>>",assistant_file_ids)
for file_id in assistant_file_ids:
self.assistant.remove_file(file_id=file_id)
gr.Info(message="Deleted files in assistant")
except Exception as e:
gr.Error(message=f"Unable to delete files. Error: {e}")
gr.Info("Chat is cleared.")
return [("Clear Chatbot", "Chatbot cleared.")]
def add_file(self, file: gr.File):
self.assistant.add_file(file)
def add_message(
self, history: List[Tuple], message: dict
) -> Tuple[List[Tuple], gr.MultimodalTextbox]:
for file in message["files"]:
file_type = utils.file_type(file)
if file_type:
history.append((f"Uploaded {file_type.upper()} file: {file}", None))
self.add_file(file)
else:
history.append((f"Unsupported file type: {file}", None))
if message["text"]:
history.append((message["text"], None))
return history, gr.MultimodalTextbox(value=None, interactive=True)
def bot_response(self, history: List[Tuple]) -> List[Tuple]:
last_message = history[-1][0]
response = self.assistant.chat(last_message, self.thread_id)
history[-1] = (history[-1][0], response)
print(">>>>>>>>>>>>>>>>>>", response)
return history
def create_interface(self):
with gr.Blocks(fill_height=True) as demo:
chatbot = gr.Chatbot(
elem_id="chatbot",
bubble_full_width=False,
scale=1,
)
chat_input = gr.MultimodalTextbox(
interactive=True,
file_count="multiple",
placeholder="Enter message or upload file...",
show_label=False,
)
new_thread_button = gr.Button(value="Create New Thread")
clear_button = gr.Button(value="Clear All")
chat_msg = chat_input.submit(
self.add_message, [chatbot, chat_input], [chatbot, chat_input]
)
bot_msg = chat_msg.then(
self.bot_response, chatbot, chatbot, api_name="bot_response"
)
bot_msg.then(
lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input]
)
new_thread_button.click(self.create_thread)
clear_button.click(self.clear_all, outputs=chatbot)
return demo
if __name__ == "__main__":
chatbot = PPTChat()
interface = chatbot.create_interface()
interface.launch() |