Spaces:
Sleeping
Sleeping
import os | |
from typing import List, Tuple | |
import gradio as gr | |
from assistant import OAIAssistant | |
import utils | |
from dotenv import load_dotenv | |
load_dotenv() | |
class PPTChat: | |
def __init__(self) -> None: | |
assistant_id = os.environ.get("ASSISTANT_ID") | |
vector_store_id = os.environ.get("VECTORSTORE_ID") | |
self.assistant = OAIAssistant( | |
assistant_id=assistant_id, vectorstore_id=vector_store_id | |
) | |
self.thread_id = self.assistant.create_thread().id | |
def chat(self, message: str, history: List[str]): | |
response = self.assistant.chat(message, self.thread_id) | |
history.append((message, response["answer"])) | |
return ( | |
"", | |
history, | |
) | |
def create_thread(self): | |
try: | |
self.assistant.delete_thread(self.thread_id) | |
self.thread_id = self.assistant.create_thread().id | |
gr.Info(message="New thread created. Start as New!") | |
except Exception as e: | |
gr.Error(message=f"Unable to create new thread. Error: {e}") | |
def clear_all(self): | |
# create new thread | |
# delete previous files | |
# create new chatbot | |
self.create_thread() | |
gr.Info(message="Created new thread") | |
try: | |
assistant_file_ids = self.assistant.get_files_list() | |
print(">>>>>>>>>>>>>>>>>>>>>>>>",assistant_file_ids) | |
for file_id in assistant_file_ids: | |
self.assistant.remove_file(file_id=file_id) | |
gr.Info(message="Deleted files in assistant") | |
except Exception as e: | |
gr.Error(message=f"Unable to delete files. Error: {e}") | |
gr.Info("Chat is cleared.") | |
return [("Clear Chatbot", "Chatbot cleared.")] | |
def add_file(self, file: gr.File): | |
self.assistant.add_file(file) | |
def add_message( | |
self, history: List[Tuple], message: dict | |
) -> Tuple[List[Tuple], gr.MultimodalTextbox]: | |
for file in message["files"]: | |
file_type = utils.file_type(file) | |
if file_type: | |
history.append((f"Uploaded {file_type.upper()} file: {file}", None)) | |
self.add_file(file) | |
else: | |
history.append((f"Unsupported file type: {file}", None)) | |
if message["text"]: | |
history.append((message["text"], None)) | |
return history, gr.MultimodalTextbox(value=None, interactive=True) | |
def bot_response(self, history: List[Tuple]) -> List[Tuple]: | |
last_message = history[-1][0] | |
response = self.assistant.chat(last_message, self.thread_id) | |
history[-1] = (history[-1][0], response) | |
print(">>>>>>>>>>>>>>>>>>", response) | |
return history | |
def create_interface(self): | |
with gr.Blocks(fill_height=True) as demo: | |
chatbot = gr.Chatbot( | |
elem_id="chatbot", | |
bubble_full_width=False, | |
scale=1, | |
) | |
chat_input = gr.MultimodalTextbox( | |
interactive=True, | |
file_count="multiple", | |
placeholder="Enter message or upload file...", | |
show_label=False, | |
) | |
new_thread_button = gr.Button(value="Create New Thread") | |
clear_button = gr.Button(value="Clear All") | |
chat_msg = chat_input.submit( | |
self.add_message, [chatbot, chat_input], [chatbot, chat_input] | |
) | |
bot_msg = chat_msg.then( | |
self.bot_response, chatbot, chatbot, api_name="bot_response" | |
) | |
bot_msg.then( | |
lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input] | |
) | |
new_thread_button.click(self.create_thread) | |
clear_button.click(self.clear_all, outputs=chatbot) | |
return demo | |
if __name__ == "__main__": | |
chatbot = PPTChat() | |
interface = chatbot.create_interface() | |
interface.launch() |