RLealz commited on
Commit
0143a36
·
verified ·
1 Parent(s): f71a23b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +101 -0
app.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import random
3
+ from diffusers import StableDiffusionPipeline
4
+ import torch
5
+
6
+ # Quiz questions and answers
7
+ christmas_quiz = [
8
+ {
9
+ "question": "What is the traditional Christmas flower?",
10
+ "options": ["Rose", "Poinsettia", "Tulip", "Daisy"],
11
+ "answer": "Poinsettia"
12
+ },
13
+ {
14
+ "question": "In which country did the tradition of putting up a Christmas tree originate?",
15
+ "options": ["USA", "England", "Germany", "France"],
16
+ "answer": "Germany"
17
+ },
18
+ {
19
+ "question": "What is the name of the ballet often performed at Christmas?",
20
+ "options": ["Swan Lake", "The Nutcracker", "Sleeping Beauty", "Giselle"],
21
+ "answer": "The Nutcracker"
22
+ },
23
+ {
24
+ "question": "Which company was the first to use Santa Claus in advertising?",
25
+ "options": ["Pepsi", "Coca-Cola", "McDonald's", "Walmart"],
26
+ "answer": "Coca-Cola"
27
+ },
28
+ {
29
+ "question": "What is the most popular Christmas dinner in Japan?",
30
+ "options": ["Turkey", "Ham", "KFC Chicken", "Roast Beef"],
31
+ "answer": "KFC Chicken"
32
+ }
33
+ ]
34
+
35
+ # Initialize the Stable Diffusion pipeline
36
+ model_id = "runwayml/stable-diffusion-v1-5"
37
+ pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
38
+ pipe = pipe.to("cuda")
39
+
40
+ def generate_image(prompt):
41
+ image = pipe(prompt).images[0]
42
+ return image
43
+
44
+ def chatbot(message, history):
45
+ if not history:
46
+ return "Ho ho ho! Merry Christmas! I'm Santa's helper. Would you like to take a Christmas quiz or create a Christmas card?"
47
+
48
+ last_response = history[-1][1].lower()
49
+
50
+ if "quiz" in last_response:
51
+ question = random.choice(christmas_quiz)
52
+ options_text = "\n".join([f"{i+1}. {opt}" for i, opt in enumerate(question['options'])])
53
+ return f"Great! Here's your Christmas quiz question:\n\n{question['question']}\n\n{options_text}\n\nPlease enter the number of your answer."
54
+
55
+ elif "card" in last_response:
56
+ return "Wonderful! Let's create a Christmas card. Please describe the scene you'd like on your card, and I'll generate it for you using AI."
57
+
58
+ elif any(str(i) in message for i in range(1, 5)): # Check if the message is a quiz answer
59
+ for q in christmas_quiz:
60
+ if q['question'] in history[-2][1]: # Find the question in the history
61
+ user_answer = q['options'][int(message) - 1]
62
+ if user_answer == q['answer']:
63
+ return f"Correct! {q['answer']} is the right answer. Would you like another question or to create a Christmas card?"
64
+ else:
65
+ return f"Sorry, that's not correct. The right answer is {q['answer']}. Would you like another question or to create a Christmas card?"
66
+
67
+ else:
68
+ image = generate_image(f"Christmas card scene: {message}")
69
+ return (f"I've created a Christmas card based on your description: '{message}'. You can see it in the image box below. "
70
+ f"Would you like to create another card or take a quiz?", image)
71
+
72
+ # Gradio interface
73
+ with gr.Blocks() as demo:
74
+ gr.Markdown("# Christmas Quiz and Card Generator Chatbot")
75
+ gr.Markdown("""
76
+ Welcome to the Christmas Quiz and Card Generator Chatbot!
77
+ - Type 'quiz' to start a Christmas quiz.
78
+ - Type 'card' to create a custom Christmas card.
79
+ - Or just say hello to get started!
80
+ """)
81
+ chatbot = gr.Chatbot()
82
+ msg = gr.Textbox(label="Type your message here")
83
+ clear = gr.Button("Clear")
84
+
85
+ def user(user_message, history):
86
+ return "", history + [[user_message, None]]
87
+
88
+ def bot(history):
89
+ bot_message, image = chatbot(history[-1][0], history[:-1])
90
+ history[-1][1] = bot_message
91
+ if image:
92
+ return history, image
93
+ return history, None
94
+
95
+ image_output = gr.Image()
96
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
97
+ bot, chatbot, [chatbot, image_output]
98
+ )
99
+ clear.click(lambda: None, None, chatbot, queue=False)
100
+
101
+ demo.launch()