File size: 3,675 Bytes
0ef7602
10c1657
39ee2a2
1d54e0d
10c1657
 
6762a06
 
10c1657
6762a06
 
10c1657
 
 
 
0ef7602
10c1657
 
0ef7602
10c1657
6762a06
10c1657
 
 
6762a06
10c1657
 
6762a06
10c1657
6762a06
10c1657
 
0ef7602
8e05cfe
10c1657
 
 
 
0ef7602
8e05cfe
10c1657
8e05cfe
0ef7602
10c1657
 
6762a06
0ef7602
10c1657
6762a06
10c1657
 
6762a06
10c1657
0ef7602
 
10c1657
 
 
 
 
0d2f728
 
 
 
 
 
 
 
 
 
 
 
 
 
0ef7602
10c1657
6762a06
cda2760
6762a06
 
 
 
 
10c1657
6762a06
 
10c1657
 
 
6762a06
10c1657
 
 
 
0ef7602
 
10c1657
 
8e05cfe
10c1657
 
0d2f728
 
 
 
6762a06
0d2f728
 
 
 
 
 
 
10c1657
 
6762a06
10c1657
6762a06
10c1657
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
# Import necessary libraries
import os 
import time

import openai
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

# Set OpenAI API key from environment variable
openai.api_key = os.getenv("OPENAI_API_KEY")

import gradio as gr

# Create an empty list to store chat messages
messages = []

# Function to add user's text to chat history
def add_user_text(chat_history, user_text):
    # Print the user's text from typing
    print('user_text_from_typing: ', user_text)

    global messages
    # Add user's text to the messages list with 'user' role
    messages += [{"role":'user', 'content': user_text}]

    # Add user's text to the chat history
    chat_history = chat_history + [(user_text, None)]
    # Return updated chat history and update the display without interaction
    return chat_history, gr.update(value="", interactive=False)

# Function for the bot to respond
def bot_respond(chat_history, openai_gpt_key, model_choice):
    global messages

    if openai_gpt_key is not "":
        openai.api_key = openai_gpt_key

    # Generate response from OpenAI Chat API using the selected model
    bot_response = openai.ChatCompletion.create(
            model=model_choice,
            messages=messages,
        )
    bot_text = bot_response["choices"][0]["message"]["content"]
    # Print the bot's response
    print("bot_text: ", bot_text)

    # Add bot's response to the messages list with 'assistant' role
    messages = messages + [{"role":'assistant', 'content': bot_text}]

    # Clear the last entry in the chat history
    chat_history[-1][1] = ""

    # Yield the chat history with the bot's response character by character
    for character in bot_text:
        chat_history[-1][1] += character
        time.sleep(0.02)
        yield chat_history

def save_chat_history(chat_history):
    global messages
    messages = []
    # You could format this as you like. This will just create a text representation.
    formatted_chat = "\n".join([f"User: {message[0]}\nBot: {message[1]}" for message in chat_history])
    
    # Use a timestamp for a unique filename for each conversation
    timestamp = time.strftime("%Y%m%d-%H%M%S")
    with open(f'chat_history_{timestamp}.txt', 'w') as f:
        f.write(formatted_chat)



        
# Create a Gradio interface
with gr.Blocks() as demo:
    # Textbox for OpenAI GPT API Key
    openai_gpt_key = gr.Textbox(label="OpenAI GPT API Key", value="", placeholder="sk..")
    
    # Dropdown menu for selecting the model
    model_choice = gr.Dropdown(label="Model Options", choices=['gpt-3.5-turbo', 'gpt-4'])
    
    # Button to clear the chat history and restart
    clear_btn = gr.Button("Clear for Restart")
    
    # Chat history display
    chat_history = gr.Chatbot([], elem_id="chat_history").style(height=500)

    with gr.Box():
        # Textbox for user input
        user_text = gr.Textbox(
            show_label=False,
            placeholder="Enter text and press enter",
        ).style(container=False)

    # Handle user input and bot response
    user_text.submit(
        add_user_text, [chat_history, user_text], [chat_history, user_text], queue=False).then(
            bot_respond, [chat_history, openai_gpt_key, model_choice], chat_history).then(
                lambda: gr.update(interactive=True), None, [user_text], queue=False)

    

    

    # Clear button click event
    clear_btn.click(
    lambda: save_chat_history(chat_history.get_value()), 
    None, 
    chat_history, 
    queue=False
    )


if __name__ == "__main__":
    # Queue the Gradio interface
    demo.queue()
    # Launch the Gradio interface
    demo.launch()