Spaces:
Running
Running
import gradio as gr | |
from huggingface_hub import InferenceClient | |
from typing import List, Tuple | |
# Default settings | |
class ChatConfig: | |
MODEL = "google/gemma-3-27b-it" | |
DEFAULT_SYSTEM_MSG = "You are a super intelligent and useful Chatbot." | |
DEFAULT_MAX_TOKENS = 512 | |
DEFAULT_TEMP = 0.3 | |
DEFAULT_TOP_P = 0.95 | |
client = InferenceClient(ChatConfig.MODEL) | |
def generate_response( | |
message: str, | |
history: List[Tuple[str, str]], | |
system_message: str = ChatConfig.DEFAULT_SYSTEM_MSG, | |
max_tokens: int = ChatConfig.DEFAULT_MAX_TOKENS, | |
temperature: float = ChatConfig.DEFAULT_TEMP, | |
top_p: float = ChatConfig.DEFAULT_TOP_P | |
) -> str: | |
messages = [{"role": "system", "content": system_message}] | |
# Conversation history | |
for user_msg, bot_msg in history: | |
if user_msg: | |
messages.append({"role": "user", "content": user_msg}) | |
if bot_msg: | |
messages.append({"role": "assistant", "content": bot_msg}) | |
messages.append({"role": "user", "content": message}) | |
response = "" | |
for chunk in client.chat_completion( | |
messages, | |
max_tokens=max_tokens, | |
stream=True, | |
temperature=temperature, | |
top_p=top_p, | |
): | |
token = chunk.choices[0].delta.content or "" | |
response += token | |
yield response | |
def create_interface() -> gr.ChatInterface: | |
"""Create and configure the chat interface.""" | |
# Custom CSS for a modern look | |
custom_css = """ | |
.chatbot .message { | |
border-radius: 12px; | |
margin: 5px; | |
padding: 10px; | |
} | |
.chatbot .user-message { | |
background-color: #e3f2fd; | |
} | |
.chatbot .bot-message { | |
background-color: #f5f5f5; | |
} | |
.gr-button { | |
border-radius: 8px; | |
padding: 8px 16px; | |
} | |
""" | |
# Custom chatbot | |
chatbot = gr.Chatbot( | |
label="Gemma Chat", | |
avatar_images=("./user.png", "./botge.png"), | |
height=450, | |
show_copy_button=True | |
) | |
# Chat interface | |
interface = gr.ChatInterface( | |
fn=generate_response, | |
chatbot=chatbot, | |
title="Tomoniai's chat with Google-Gemma-3", | |
theme=gr.themes.Soft(), | |
css=custom_css, | |
additional_inputs=[ | |
gr.Textbox( | |
value=ChatConfig.DEFAULT_SYSTEM_MSG, | |
label="System Prompt", | |
lines=2, | |
placeholder="Enter system message..." | |
), | |
gr.Slider( | |
minimum=1, | |
maximum=8192, | |
value=ChatConfig.DEFAULT_MAX_TOKENS, | |
step=1, | |
label="Max Tokens", | |
info="Controls response length" | |
), | |
gr.Slider( | |
minimum=0.1, | |
maximum=1.0, | |
value=ChatConfig.DEFAULT_TEMP, | |
step=0.1, | |
label="Temperature", | |
info="Controls randomness" | |
), | |
gr.Slider( | |
minimum=0.1, | |
maximum=1.0, | |
value=ChatConfig.DEFAULT_TOP_P, | |
step=0.05, | |
label="Top-P", | |
info="Controls diversity" | |
) | |
], | |
additional_inputs_accordion=gr.Accordion(label="Advanced Settings", open=False) | |
) | |
return interface | |
def main(): | |
app = create_interface() | |
app.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
show_api=False, | |
show_error=True, | |
debug=True | |
) | |
if __name__ == "__main__": | |
main() |