File size: 6,115 Bytes
0143a36
8eba130
cb0688f
4950ffa
 
8eba130
 
4950ffa
 
8eba130
4950ffa
 
8eba130
 
 
 
 
4950ffa
8eba130
4950ffa
 
 
 
 
 
 
 
 
 
 
 
8eba130
cb0688f
8eba130
4950ffa
8eba130
4950ffa
8eba130
4950ffa
 
 
 
 
8eba130
4950ffa
 
 
 
 
8eba130
4950ffa
8eba130
 
4950ffa
8eba130
 
4950ffa
 
8eba130
 
 
 
 
 
 
 
 
 
 
4950ffa
8eba130
 
 
4950ffa
8eba130
 
 
 
 
 
 
 
 
 
4950ffa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8eba130
 
8d6dc6e
8eba130
 
 
8d6dc6e
8eba130
8d6dc6e
8eba130
8d6dc6e
8eba130
4950ffa
 
 
 
8d6dc6e
8eba130
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import gradio as gr
from typing import List, Tuple, Optional, Union
import os
from openai import OpenAI
import json

class ChristmasBot:
    def __init__(self):
        """Initialize the Christmas chatbot with OpenAI."""
        self.stable_diffusion_available = False
        # For Hugging Face Spaces, get the API key from secrets
        self.client = OpenAI(api_key=os.environ.get('OPENAI_API_KEY'))
        
        self.system_prompt = """You are Holly, a cheerful Christmas helper chatbot. 
        You love everything about Christmas and respond in a warm, festive manner. 
        Keep your responses concise but friendly. 
        If users ask about sensitive topics, guide the conversation back to Christmas-related subjects."""
    
    def _generate_image(self, prompt: str) -> Optional[str]:
        """Generate an image using DALL-E."""
        try:
            response = self.client.images.generate(
                model="dall-e-3",
                prompt=f"Christmas themed illustration: {prompt}, festive, cheerful, holiday spirit",
                size="1024x1024",
                quality="standard",
                n=1,
            )
            return response.data[0].url
        except Exception as e:
            print(f"Image generation error: {e}")
            return None
    
    def _get_llm_response(self, message: str, history: List[List[str]]) -> str:
        """Get response from OpenAI."""
        try:
            messages = [{"role": "system", "content": self.system_prompt}]
            
            for h in history:
                if h[0]:
                    messages.append({"role": "user", "content": h[0]})
                if h[1]:
                    messages.append({"role": "assistant", "content": h[1]})
            
            messages.append({"role": "user", "content": message})
            
            response = self.client.chat.completions.create(
                model="gpt-3.5-turbo",
                messages=messages,
                temperature=0.7,
                max_tokens=150
            )
            
            return response.choices[0].message.content
            
        except Exception as e:
            print(f"OpenAI API error: {e}")
            return "Ho ho ho! I seem to be having trouble with my Christmas magic. Could you try again?"
    
    def process_message(self, message: str, history: List[List[str]]) -> Union[str, Tuple[str, str]]:
        """Process user message and return appropriate response."""
        if not history:
            return "Ho ho ho! Merry Christmas! I'm Holly, your Christmas helper. Would you like to create a Christmas card or chat about the holidays?"
        
        message = message.lower()
        last_response = history[-1][1].lower() if history else ""
        
        if "card" in message:
            if self.stable_diffusion_available:
                return "Wonderful! Let's create a Christmas card. Please describe the scene you'd like on your card, and I'll generate it using DALL-E."
            return "I'm sorry, but the card generation feature is currently unavailable. Let's chat about Christmas instead!"
        
        if "card" in last_response and self.stable_diffusion_available:
            image = self._generate_image(message)
            if image:
                return (
                    f"I've created a Christmas card based on your description: '{message}'. "
                    f"Would you like to create another card or chat about something else?",
                    image
                )
            return "I'm sorry, I couldn't generate the image. Would you like to try again or chat about something else?"
        
        return self._get_llm_response(message, history)

with gr.Blocks(css="""
    :root {
        --holly-green: #146B3A;
        --christmas-red: #EA4630;
        --snow-white: #F8F9FA;
        --gold: #FFC107;
    }
    .message.user {
        background-color: var(--holly-green) !important;
        color: white !important;
        border-radius: 15px 15px 5px 15px !important;
    }
    .message.bot {
        background-color: var(--christmas-red) !important;
        color: white !important;
        border-radius: 15px 15px 15px 5px !important;
    }
    textarea {
        border: 2px solid var(--holly-green) !important;
        border-radius: 8px !important;
    }
    button {
        background-color: var(--holly-green) !important;
        color: white !important;
        border: none !important;
        border-radius: 8px !important;
        transition: background-color 0.2s !important;
    }
    button:hover {
        background-color: var(--christmas-red) !important;
    }
""") as demo:
    gr.Markdown("# πŸŽ„ Christmas Chatbot & Card Generator πŸŽ…")
    gr.Markdown("""
    Welcome to the Christmas Chatbot!
    - Chat about anything Christmas-related
    - Type 'card' to create a custom Christmas card with DALL-E
    """)
    
    bot = ChristmasBot()
    
    chatbot = gr.Chatbot(
        bubble_full_width=False,
        avatar_images=("πŸ‘€", "πŸŽ…"),
        height=400
    )
    msg = gr.Textbox(
        label="Type your message here",
        placeholder="Ask me anything about Christmas or request a card!",
        show_label=True
    )
    clear = gr.Button("Clear Chat")
    image_output = gr.Image(label="Generated Card", visible=False)
    
    def user(user_message: str, history: List[List[str]]) -> Tuple[str, List[List[str]]]:
        return "", history + [[user_message, None]]
    
    def bot_response(history: List[List[str]]) -> Tuple[List[List[str]], Optional[str]]:
        bot_message = bot.process_message(history[-1][0], history[:-1])
        if isinstance(bot_message, tuple):
            history[-1][1] = bot_message[0]
            return history, bot_message[1]
        history[-1][1] = bot_message
        return history, None
    
    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot_response, chatbot, [chatbot, image_output]
    )
    clear.click(lambda: None, None, chatbot, queue=False)

if __name__ == "__main__":
    demo.launch()