chatQwenne / app.py
K00B404's picture
Update app.py
3be190c verified
import gradio as gr
from huggingface_hub import InferenceClient
client = InferenceClient(
"meta-llama/Llama-3.2-1B-Instruct" #"HuggingFaceH4/zephyr-7b-beta"
)
# Background effect from CodePen
background_html = """
<p class="codepen" data-height="300" data-theme-id="dark" data-default-tab="html,result" data-slug-hash="LYwLqVv" data-pen-title="WebGL Eye Pattern" data-user="bxck75" style="height: 300px; box-sizing: border-box; display: flex; align-items: center; justify-content: center; border: 2px solid; margin: 1em 0; padding: 1em;">
<span>See the Pen <a href="https://codepen.io/bxck75/pen/LYwLqVv">
WebGL Eye Pattern</a> by boudewijn (<a href="https://codepen.io/bxck75">@bxck75</a>)
on <a href="https://codepen.io">CodePen</a>.</span>
</p>
<script async src="https://cpwebassets.codepen.io/assets/embed/ei.js"></script>
"""
title_html="""
<center>
<div id="title-container">
<h1 id="title-text">ChatBot</h1>
</div>
</center>
"""
css = """
.gradio-container {
background: url(https://huggingface.co/spaces/K00B404/FLUX.1-Dev-Serverless-darn-enhanced-prompt/resolve/main/edge.png);
background-size: 1800px 1000px;
background-repeat: no-repeat;
background-position: center;
background-attachment: fixed;
color:#111;
}
.dark\:bg-gray-950:is(.dark *) {
--tw-bg-opacity: 1;
background-color: rgb(157, 17, 142);
}
.gradio-container-4-41-0 .prose :last-child {
margin-top: 8px !important;
}
.gradio-container-4-41-0 .prose :last-child {
margin-bottom: -7px !important;
}
.dark {
--button-primary-background-fill: #09e60d70;
--button-primary-background-fill-hover: #00000070;
--background-fill-primary: #000;
--background-fill-secondary: #000;
}
.hide-container {
margin-top;-2px;
}
#app-container3 {
background-color: rgba(255, 255, 255, 0.001); /* Corrected to make semi-transparent */
max-width: 600px;
margin-left: auto;
margin-right: auto;
margin-bottom: 10px;
border-radius: 125px;
box-shadow: 0 0 10px rgba(0,0,0,0.1); /* Adjusted shadow opacity */
}
#app-container {
background-color: rgba(255, 255, 255, 0.001); /* Semi-transparent background */
max-width: 600px;
margin: 0 auto; /* Center horizontally */
padding-bottom: 10px;
border-radius: 25px;
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); /* Adjusted shadow opacity */
}
.panel-container {
background-image: url('your-neon-border-image.png');
background-size: 100% 100%; /* Adjust the size to cover the container */
background-repeat: no-repeat;
background-position: center;
}
#title-container {
display: flex;
align-items: center
margin-bottom:10px;
justify-content: center;
}
#title-icon {
width: 32px;
height: auto;
margin-right: 10px;
}
#title-text {
font-size: 30px;
font-weight: bold;
color: #111;
}
:root {
--panel-size: 300px;
--border-width: 4px;
--glow-blur: 15px;
}
body {
background-color: #000;
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
margin: 0;
}
.neon-panel {
width: var(--panel-size);
height: var(--panel-size);
background-color: #000;
position: relative;
border-radius: 20px;
overflow: hidden;
}
.neon-panel::before,
.neon-panel::after {
content: '';
position: absolute;
left: -2px;
top: -2px;
background: linear-gradient(
124deg,
#ff2400, #e81d1d, #e8b71d, #e3e81d, #1de840,
#1ddde8, #2b1de8, #dd00f3, #dd00f3
);
background-size: 300% 300%;
width: calc(100% + 4px);
height: calc(100% + 4px);
z-index: -1;
animation: moveGradient 10s ease infinite;
}
.neon-panel::after {
filter: blur(var(--glow-blur));
}
.neon-panel-content {
position: absolute;
top: var(--border-width);
left: var(--border-width);
right: var(--border-width);
bottom: var(--border-width);
background-color: #000;
border-radius: 16px;
z-index: 1;
}
@keyframes moveGradient {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
@media (max-width: 768px) {
:root {
--panel-size: 250px;
--glow-blur: 10px;
}
}
@media (prefers-reduced-motion: reduce) {
.neon-panel::before,
.neon-panel::after {
animation: none;
}
}
"""
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
with gr.Blocks(css=css) as demo:
gr.HTML(title_html) # Insert the background effect
gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
demo.launch()