Joe_Chip_Alpha / app.py
hectorjelly's picture
Update app.py
6046c39
# Import necessary libraries
import os
import time
import openai
from dotenv import load_dotenv
import wandb
WANDB_API_KEY = "97a45c27a4ead60eb8eff145c37ec8c39cf2fe63"
wandb.login(key="97a45c27a4ead60eb8eff145c37ec8c39cf2fe63")
wandb.init(project="Joe 0.1", job_type= "generation", name='Finn')
# Load environment variables from .env file
load_dotenv()
# Set OpenAI API key from environment variable
openai.api_key = os.getenv("OPENAI_API_KEY")
import gradio as gr
with open('initial_instructions.txt', 'r') as f:
initial_instructions_text = f.read()
initial_instructions = {
"role": "system",
"content": initial_instructions_text
}
# Create an empty list to store chat messages
messages = [initial_instructions]
# Function to add user's text to chat history
def add_user_text(chat_history, user_text):
# Print the user's text from typing
print('user_text_from_typing: ', user_text)
global messages
# Add user's text to the messages list with 'user' role
messages += [{"role":'user', 'content': user_text}]
# Add user's text to the chat history
chat_history = chat_history + [(user_text, None)]
# Return updated chat history and update the display without interaction
return chat_history, gr.update(value="", interactive=False)
# Function for the bot to respond
def bot_respond(chat_history, openai_gpt_key, model_choice):
global messages
if openai_gpt_key is not "":
openai.api_key = openai_gpt_key
# Generate response from OpenAI Chat API using the selected model
bot_response = openai.ChatCompletion.create(
model=model_choice,
messages=messages,
)
bot_text = bot_response["choices"][0]["message"]["content"]
# Print the bot's response
print("bot_text: ", bot_text)
# Add bot's response to the messages list with 'assistant' role
messages = messages + [{"role":'assistant', 'content': bot_text}]
# Clear the last entry in the chat history
chat_history[-1][1] = ""
# Yield the chat history with the bot's response character by character
for character in bot_text:
chat_history[-1][1] += character
time.sleep(0.02)
yield chat_history
def save_chat_history():
global messages
# Reset messages after saving the history
formatted_chat = "\n".join([f"{message['role']}: {message['content']}" for message in messages])
# Use a timestamp for a unique filename for each conversation
timestamp = time.strftime("%Y%m%d-%H%M%S")
with open(f'chat_history_{timestamp}.txt', 'w') as f:
f.write(formatted_chat)
# Clear the messages list for a new conversation
messages = []
# Create a Gradio interface
with gr.Blocks() as demo:
# Textbox for OpenAI GPT API Key
openai_gpt_key = gr.Textbox(label="OpenAI GPT API Key", value="", placeholder="sk..")
# Dropdown menu for selecting the model
model_choice = gr.Dropdown(label="Model Options", choices=['gpt-3.5-turbo', 'gpt-4'])
# Button to clear the chat history and restart
clear_btn = gr.Button("Clear for Restart")
# Chat history display
chat_history = gr.Chatbot([], elem_id="chat_history").style(height=500)
with gr.Box():
# Textbox for user input
user_text = gr.Textbox(
show_label=False,
placeholder="Enter text and press enter",
).style(container=False)
# Handle user input and bot response
user_text.submit(
add_user_text, [chat_history, user_text], [chat_history, user_text], queue=False).then(
bot_respond, [chat_history, openai_gpt_key, model_choice], chat_history).then(
lambda: gr.update(interactive=True), None, [user_text], queue=False)
# Clear button click event
clear_btn.click(
lambda: clear_and_restart(),
None,
chat_history,
queue=False
)
def clear_and_restart():
global messages
save_chat_history() # Save the chat history as before
messages = [initial_instructions] # Reset messages to just the initial instructions
if __name__ == "__main__":
# Queue the Gradio interface
demo.queue()
# Launch the Gradio interface
demo.launch()