Spaces:
Sleeping
Sleeping
Update app.py
Browse files- integrated the OpenAI API to use GPT-3.5 for general conversation.
- added a new function `get_gpt_response()` that handles the interaction with the GPT-3.5 model.
- updated the `chatbot()` function to use GPT-3.5 for responses that aren't related to the quiz or card generation.
- added the `openai` library to the requirements.
app.py
CHANGED
@@ -2,6 +2,11 @@ import gradio as gr
|
|
2 |
import random
|
3 |
from diffusers import StableDiffusionPipeline
|
4 |
import torch
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
# Quiz questions and answers
|
7 |
christmas_quiz = [
|
@@ -41,9 +46,28 @@ def generate_image(prompt):
|
|
41 |
image = pipe(prompt, num_inference_steps=50).images[0]
|
42 |
return image
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
def chatbot(message, history):
|
45 |
if not history:
|
46 |
-
return "Ho ho ho! Merry Christmas! I'm
|
47 |
|
48 |
last_response = history[-1][1].lower()
|
49 |
|
@@ -60,17 +84,18 @@ def chatbot(message, history):
|
|
60 |
if q['question'] in history[-2][1]: # Find the question in the history
|
61 |
user_answer = q['options'][int(message) - 1]
|
62 |
if user_answer == q['answer']:
|
63 |
-
return f"Correct! {q['answer']} is the right answer. Would you like another question
|
64 |
else:
|
65 |
-
return f"Sorry, that's not correct. The right answer is {q['answer']}. Would you like another question
|
66 |
|
67 |
elif "card" in last_response:
|
68 |
image = generate_image(f"Christmas card scene: {message}")
|
69 |
return (f"I've created a Christmas card based on your description: '{message}'. You can see it in the image box below. "
|
70 |
-
f"Would you like to create another card
|
71 |
|
72 |
else:
|
73 |
-
|
|
|
74 |
|
75 |
# Gradio interface
|
76 |
with gr.Blocks() as demo:
|
@@ -79,7 +104,7 @@ with gr.Blocks() as demo:
|
|
79 |
Welcome to the Christmas Quiz and Card Generator Chatbot!
|
80 |
- Type 'quiz' to start a Christmas quiz.
|
81 |
- Type 'card' to create a custom Christmas card.
|
82 |
-
- Or just
|
83 |
""")
|
84 |
chatbot = gr.Chatbot()
|
85 |
msg = gr.Textbox(label="Type your message here")
|
|
|
2 |
import random
|
3 |
from diffusers import StableDiffusionPipeline
|
4 |
import torch
|
5 |
+
import openai
|
6 |
+
import os
|
7 |
+
|
8 |
+
# Set up OpenAI API
|
9 |
+
openai.api_key = os.environ.get("OPENAI_API_KEY")
|
10 |
|
11 |
# Quiz questions and answers
|
12 |
christmas_quiz = [
|
|
|
46 |
image = pipe(prompt, num_inference_steps=50).images[0]
|
47 |
return image
|
48 |
|
49 |
+
def get_gpt_response(prompt, history):
|
50 |
+
messages = [
|
51 |
+
{"role": "system", "content": "You are a helpful Christmas-themed chatbot named Holly. You can answer questions about Christmas, offer holiday tips, and engage in festive conversation. You also know about the Christmas quiz and card generation features of this application."},
|
52 |
+
]
|
53 |
+
for h in history:
|
54 |
+
messages.append({"role": "user", "content": h[0]})
|
55 |
+
messages.append({"role": "assistant", "content": h[1]})
|
56 |
+
messages.append({"role": "user", "content": prompt})
|
57 |
+
|
58 |
+
response = openai.ChatCompletion.create(
|
59 |
+
model="gpt-3.5-turbo",
|
60 |
+
messages=messages,
|
61 |
+
max_tokens=150,
|
62 |
+
n=1,
|
63 |
+
stop=None,
|
64 |
+
temperature=0.7,
|
65 |
+
)
|
66 |
+
return response.choices[0].message['content'].strip()
|
67 |
+
|
68 |
def chatbot(message, history):
|
69 |
if not history:
|
70 |
+
return "Ho ho ho! Merry Christmas! I'm Holly, your Christmas helper. Would you like to take a Christmas quiz, create a Christmas card, or chat about the holidays?"
|
71 |
|
72 |
last_response = history[-1][1].lower()
|
73 |
|
|
|
84 |
if q['question'] in history[-2][1]: # Find the question in the history
|
85 |
user_answer = q['options'][int(message) - 1]
|
86 |
if user_answer == q['answer']:
|
87 |
+
return f"Correct! {q['answer']} is the right answer. Would you like another question, to create a Christmas card, or to chat about something else?"
|
88 |
else:
|
89 |
+
return f"Sorry, that's not correct. The right answer is {q['answer']}. Would you like another question, to create a Christmas card, or to chat about something else?"
|
90 |
|
91 |
elif "card" in last_response:
|
92 |
image = generate_image(f"Christmas card scene: {message}")
|
93 |
return (f"I've created a Christmas card based on your description: '{message}'. You can see it in the image box below. "
|
94 |
+
f"Would you like to create another card, take a quiz, or chat about something else?", image)
|
95 |
|
96 |
else:
|
97 |
+
# Use GPT-3.5 for general conversation
|
98 |
+
return get_gpt_response(message, history)
|
99 |
|
100 |
# Gradio interface
|
101 |
with gr.Blocks() as demo:
|
|
|
104 |
Welcome to the Christmas Quiz and Card Generator Chatbot!
|
105 |
- Type 'quiz' to start a Christmas quiz.
|
106 |
- Type 'card' to create a custom Christmas card.
|
107 |
+
- Or just chat about anything Christmas-related!
|
108 |
""")
|
109 |
chatbot = gr.Chatbot()
|
110 |
msg = gr.Textbox(label="Type your message here")
|