Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,16 @@
|
|
1 |
import gradio as gr
|
2 |
import random
|
3 |
-
from diffusers import StableDiffusionPipeline
|
4 |
-
import torch
|
5 |
import os
|
6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
try:
|
8 |
import openai
|
9 |
openai.api_key = os.environ.get("OPENAI_API_KEY")
|
@@ -12,7 +19,46 @@ except ImportError:
|
|
12 |
print("OpenAI library not found. Falling back to basic responses.")
|
13 |
USE_GPT = False
|
14 |
|
15 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
def get_gpt_response(prompt, history):
|
18 |
if not USE_GPT:
|
@@ -40,4 +86,71 @@ def get_gpt_response(prompt, history):
|
|
40 |
print(f"Error in GPT response: {e}")
|
41 |
return "I'm having trouble connecting to my knowledge base right now. Can I help you with the Christmas quiz or card generation instead?"
|
42 |
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import random
|
|
|
|
|
3 |
import os
|
4 |
|
5 |
+
# Attempt to import required libraries
|
6 |
+
try:
|
7 |
+
from diffusers import StableDiffusionPipeline
|
8 |
+
import torch
|
9 |
+
STABLE_DIFFUSION_AVAILABLE = True
|
10 |
+
except ImportError as e:
|
11 |
+
print(f"Error importing Stable Diffusion dependencies: {e}")
|
12 |
+
STABLE_DIFFUSION_AVAILABLE = False
|
13 |
+
|
14 |
try:
|
15 |
import openai
|
16 |
openai.api_key = os.environ.get("OPENAI_API_KEY")
|
|
|
19 |
print("OpenAI library not found. Falling back to basic responses.")
|
20 |
USE_GPT = False
|
21 |
|
22 |
+
# Quiz questions and answers
|
23 |
+
christmas_quiz = [
|
24 |
+
{
|
25 |
+
"question": "What is the traditional Christmas flower?",
|
26 |
+
"options": ["Rose", "Poinsettia", "Tulip", "Daisy"],
|
27 |
+
"answer": "Poinsettia"
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"question": "In which country did the tradition of putting up a Christmas tree originate?",
|
31 |
+
"options": ["USA", "England", "Germany", "France"],
|
32 |
+
"answer": "Germany"
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"question": "What is the name of the ballet often performed at Christmas?",
|
36 |
+
"options": ["Swan Lake", "The Nutcracker", "Sleeping Beauty", "Giselle"],
|
37 |
+
"answer": "The Nutcracker"
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"question": "Which company was the first to use Santa Claus in advertising?",
|
41 |
+
"options": ["Pepsi", "Coca-Cola", "McDonald's", "Walmart"],
|
42 |
+
"answer": "Coca-Cola"
|
43 |
+
},
|
44 |
+
{
|
45 |
+
"question": "What is the most popular Christmas dinner in Japan?",
|
46 |
+
"options": ["Turkey", "Ham", "KFC Chicken", "Roast Beef"],
|
47 |
+
"answer": "KFC Chicken"
|
48 |
+
}
|
49 |
+
]
|
50 |
+
|
51 |
+
# Initialize the Stable Diffusion pipeline if available
|
52 |
+
if STABLE_DIFFUSION_AVAILABLE:
|
53 |
+
model_id = "runwayml/stable-diffusion-v1-5"
|
54 |
+
pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float32)
|
55 |
+
|
56 |
+
def generate_image(prompt):
|
57 |
+
if not STABLE_DIFFUSION_AVAILABLE:
|
58 |
+
return None
|
59 |
+
with torch.no_grad():
|
60 |
+
image = pipe(prompt, num_inference_steps=50).images[0]
|
61 |
+
return image
|
62 |
|
63 |
def get_gpt_response(prompt, history):
|
64 |
if not USE_GPT:
|
|
|
86 |
print(f"Error in GPT response: {e}")
|
87 |
return "I'm having trouble connecting to my knowledge base right now. Can I help you with the Christmas quiz or card generation instead?"
|
88 |
|
89 |
+
def chatbot(message, history):
|
90 |
+
if not history:
|
91 |
+
return "Ho ho ho! Merry Christmas! I'm Holly, your Christmas helper. Would you like to take a Christmas quiz, create a Christmas card, or chat about the holidays?"
|
92 |
+
|
93 |
+
last_response = history[-1][1].lower()
|
94 |
+
|
95 |
+
if "quiz" in message.lower():
|
96 |
+
question = random.choice(christmas_quiz)
|
97 |
+
options_text = "\n".join([f"{i+1}. {opt}" for i, opt in enumerate(question['options'])])
|
98 |
+
return f"Great! Here's your Christmas quiz question:\n\n{question['question']}\n\n{options_text}\n\nPlease enter the number of your answer."
|
99 |
+
|
100 |
+
elif "card" in message.lower():
|
101 |
+
if STABLE_DIFFUSION_AVAILABLE:
|
102 |
+
return "Wonderful! Let's create a Christmas card. Please describe the scene you'd like on your card, and I'll generate it for you using AI."
|
103 |
+
else:
|
104 |
+
return "I'm sorry, but the card generation feature is currently unavailable. Would you like to take a Christmas quiz instead?"
|
105 |
+
|
106 |
+
elif any(str(i) in message for i in range(1, 5)): # Check if the message is a quiz answer
|
107 |
+
for q in christmas_quiz:
|
108 |
+
if q['question'] in history[-2][1]: # Find the question in the history
|
109 |
+
user_answer = q['options'][int(message) - 1]
|
110 |
+
if user_answer == q['answer']:
|
111 |
+
return f"Correct! {q['answer']} is the right answer. Would you like another question, to create a Christmas card, or to chat about something else?"
|
112 |
+
else:
|
113 |
+
return f"Sorry, that's not correct. The right answer is {q['answer']}. Would you like another question, to create a Christmas card, or to chat about something else?"
|
114 |
+
|
115 |
+
elif "card" in last_response and STABLE_DIFFUSION_AVAILABLE:
|
116 |
+
image = generate_image(f"Christmas card scene: {message}")
|
117 |
+
if image:
|
118 |
+
return (f"I've created a Christmas card based on your description: '{message}'. You can see it in the image box below. "
|
119 |
+
f"Would you like to create another card, take a quiz, or chat about something else?", image)
|
120 |
+
else:
|
121 |
+
return "I'm sorry, I couldn't generate the image. Would you like to try again, take a quiz, or chat about something else?"
|
122 |
+
|
123 |
+
else:
|
124 |
+
# Use GPT-3.5 for general conversation
|
125 |
+
return get_gpt_response(message, history)
|
126 |
+
|
127 |
+
# Gradio interface
|
128 |
+
with gr.Blocks() as demo:
|
129 |
+
gr.Markdown("# Christmas Quiz and Card Generator Chatbot")
|
130 |
+
gr.Markdown("""
|
131 |
+
Welcome to the Christmas Quiz and Card Generator Chatbot!
|
132 |
+
- Type 'quiz' to start a Christmas quiz.
|
133 |
+
- Type 'card' to create a custom Christmas card.
|
134 |
+
- Or just chat about anything Christmas-related!
|
135 |
+
""")
|
136 |
+
chatbot = gr.Chatbot()
|
137 |
+
msg = gr.Textbox(label="Type your message here")
|
138 |
+
clear = gr.Button("Clear")
|
139 |
+
|
140 |
+
def user(user_message, history):
|
141 |
+
return "", history + [[user_message, None]]
|
142 |
+
|
143 |
+
def bot(history):
|
144 |
+
bot_message = chatbot(history[-1][0], history[:-1])
|
145 |
+
history[-1][1] = bot_message
|
146 |
+
if isinstance(bot_message, tuple):
|
147 |
+
return history, bot_message[1]
|
148 |
+
return history, None
|
149 |
+
|
150 |
+
image_output = gr.Image()
|
151 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
152 |
+
bot, chatbot, [chatbot, image_output]
|
153 |
+
)
|
154 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
155 |
+
|
156 |
+
demo.launch()
|