Spaces:
Running
Running
import gradio as gr | |
from typing import List, Tuple, Optional, Union | |
import os | |
from openai import OpenAI | |
import json | |
class ChristmasBot: | |
def __init__(self): | |
"""Initialize the Christmas chatbot with OpenAI.""" | |
self.stable_diffusion_available = False | |
# For Hugging Face Spaces, get the API key from secrets | |
self.client = OpenAI(api_key=os.environ.get('OPENAI_API_KEY')) | |
self.system_prompt = """You are Holly, a cheerful Christmas helper chatbot. | |
You love everything about Christmas and respond in a warm, festive manner. | |
Keep your responses concise but friendly. | |
If users ask about sensitive topics, guide the conversation back to Christmas-related subjects.""" | |
def _generate_image(self, prompt: str) -> Optional[str]: | |
"""Generate an image using DALL-E.""" | |
try: | |
response = self.client.images.generate( | |
model="dall-e-3", | |
prompt=f"Christmas themed illustration: {prompt}, festive, cheerful, holiday spirit", | |
size="1024x1024", | |
quality="standard", | |
n=1, | |
) | |
return response.data[0].url | |
except Exception as e: | |
print(f"Image generation error: {e}") | |
return None | |
def _get_llm_response(self, message: str, history: List[List[str]]) -> str: | |
"""Get response from OpenAI.""" | |
try: | |
messages = [{"role": "system", "content": self.system_prompt}] | |
for h in history: | |
if h[0]: | |
messages.append({"role": "user", "content": h[0]}) | |
if h[1]: | |
messages.append({"role": "assistant", "content": h[1]}) | |
messages.append({"role": "user", "content": message}) | |
response = self.client.chat.completions.create( | |
model="gpt-3.5-turbo", | |
messages=messages, | |
temperature=0.7, | |
max_tokens=150 | |
) | |
return response.choices[0].message.content | |
except Exception as e: | |
print(f"OpenAI API error: {e}") | |
return "Ho ho ho! I seem to be having trouble with my Christmas magic. Could you try again?" | |
def process_message(self, message: str, history: List[List[str]]) -> Union[str, Tuple[str, str]]: | |
"""Process user message and return appropriate response.""" | |
if not history: | |
return "Ho ho ho! Merry Christmas! I'm Holly, your Christmas helper. Would you like to create a Christmas card or chat about the holidays?" | |
message = message.lower() | |
last_response = history[-1][1].lower() if history else "" | |
if "card" in message: | |
if self.stable_diffusion_available: | |
return "Wonderful! Let's create a Christmas card. Please describe the scene you'd like on your card, and I'll generate it using DALL-E." | |
return "I'm sorry, but the card generation feature is currently unavailable. Let's chat about Christmas instead!" | |
if "card" in last_response and self.stable_diffusion_available: | |
image = self._generate_image(message) | |
if image: | |
return ( | |
f"I've created a Christmas card based on your description: '{message}'. " | |
f"Would you like to create another card or chat about something else?", | |
image | |
) | |
return "I'm sorry, I couldn't generate the image. Would you like to try again or chat about something else?" | |
return self._get_llm_response(message, history) | |
with gr.Blocks(css=""" | |
:root { | |
--holly-green: #146B3A; | |
--christmas-red: #EA4630; | |
--snow-white: #F8F9FA; | |
--gold: #FFC107; | |
} | |
.message.user { | |
background-color: var(--holly-green) !important; | |
color: white !important; | |
border-radius: 15px 15px 5px 15px !important; | |
} | |
.message.bot { | |
background-color: var(--christmas-red) !important; | |
color: white !important; | |
border-radius: 15px 15px 15px 5px !important; | |
} | |
textarea { | |
border: 2px solid var(--holly-green) !important; | |
border-radius: 8px !important; | |
} | |
button { | |
background-color: var(--holly-green) !important; | |
color: white !important; | |
border: none !important; | |
border-radius: 8px !important; | |
transition: background-color 0.2s !important; | |
} | |
button:hover { | |
background-color: var(--christmas-red) !important; | |
} | |
""") as demo: | |
gr.Markdown("# π Christmas Chatbot & Card Generator π ") | |
gr.Markdown(""" | |
Welcome to the Christmas Chatbot! | |
- Chat about anything Christmas-related | |
- Type 'card' to create a custom Christmas card with DALL-E | |
""") | |
bot = ChristmasBot() | |
chatbot = gr.Chatbot( | |
bubble_full_width=False, | |
avatar_images=("π€", "π "), | |
height=400 | |
) | |
msg = gr.Textbox( | |
label="Type your message here", | |
placeholder="Ask me anything about Christmas or request a card!", | |
show_label=True | |
) | |
clear = gr.Button("Clear Chat") | |
image_output = gr.Image(label="Generated Card", visible=False) | |
def user(user_message: str, history: List[List[str]]) -> Tuple[str, List[List[str]]]: | |
return "", history + [[user_message, None]] | |
def bot_response(history: List[List[str]]) -> Tuple[List[List[str]], Optional[str]]: | |
bot_message = bot.process_message(history[-1][0], history[:-1]) | |
if isinstance(bot_message, tuple): | |
history[-1][1] = bot_message[0] | |
return history, bot_message[1] | |
history[-1][1] = bot_message | |
return history, None | |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
bot_response, chatbot, [chatbot, image_output] | |
) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
if __name__ == "__main__": | |
demo.launch() |