# Import necessary libraries import os import time import openai from dotenv import load_dotenv # Load environment variables from .env file load_dotenv() # Set OpenAI API key from environment variable openai.api_key = os.getenv("OPENAI_API_KEY") import gradio as gr # Create an empty list to store chat messages messages = [] # Function to add user's text to chat history def add_user_text(chat_history, user_text): # Print the user's text from typing print('user_text_from_typing: ', user_text) global messages # Add user's text to the messages list with 'user' role messages += [{"role":'user', 'content': user_text}] # Add user's text to the chat history chat_history = chat_history + [(user_text, None)] # Return updated chat history and update the display without interaction return chat_history, gr.update(value="", interactive=False) # Function for the bot to respond def bot_respond(chat_history, openai_gpt_key, model_choice): global messages if openai_gpt_key is not "": openai.api_key = openai_gpt_key # Generate response from OpenAI Chat API using the selected model bot_response = openai.ChatCompletion.create( model=model_choice, messages=messages, ) bot_text = bot_response["choices"][0]["message"]["content"] # Print the bot's response print("bot_text: ", bot_text) # Add bot's response to the messages list with 'assistant' role messages = messages + [{"role":'assistant', 'content': bot_text}] # Clear the last entry in the chat history chat_history[-1][1] = "" # Yield the chat history with the bot's response character by character for character in bot_text: chat_history[-1][1] += character time.sleep(0.02) yield chat_history def save_chat_history(chat_history): global messages messages = [] # You could format this as you like. This will just create a text representation. formatted_chat = "\n".join([f"User: {message[0]}\nBot: {message[1]}" for message in chat_history]) # Use a timestamp for a unique filename for each conversation timestamp = time.strftime("%Y%m%d-%H%M%S") with open(f'chat_history_{timestamp}.txt', 'w') as f: f.write(formatted_chat) # Create a Gradio interface with gr.Blocks() as demo: # Textbox for OpenAI GPT API Key openai_gpt_key = gr.Textbox(label="OpenAI GPT API Key", value="", placeholder="sk..") # Dropdown menu for selecting the model model_choice = gr.Dropdown(label="Model Options", choices=['gpt-3.5-turbo', 'gpt-4']) # Button to clear the chat history and restart clear_btn = gr.Button("Clear for Restart") # Chat history display chat_history = gr.Chatbot([], elem_id="chat_history").style(height=500) with gr.Box(): # Textbox for user input user_text = gr.Textbox( show_label=False, placeholder="Enter text and press enter", ).style(container=False) # Handle user input and bot response user_text.submit( add_user_text, [chat_history, user_text], [chat_history, user_text], queue=False).then( bot_respond, [chat_history, openai_gpt_key, model_choice], chat_history).then( lambda: gr.update(interactive=True), None, [user_text], queue=False) # Clear button click event clear_btn.click( lambda: save_chat_history(chat_history.get_value()), None, chat_history, queue=False ) if __name__ == "__main__": # Queue the Gradio interface demo.queue() # Launch the Gradio interface demo.launch()