Chatbot-101 / app.py
Ayeshanoor101's picture
Update app.py
e011fe4 verified
raw
history blame
No virus
3.71 kB
import gradio as gr
from huggingface_hub import InferenceClient
# Markdown description
DESCRIPTION = '''
<div>
<h1 style="text-align: center;">zephyr-7b-beta</h1>
<p>This Space demonstrates the instruction-tuned model<b>zephyr-7b-beta by Hugging face</b></a>. zephyr-7b-beta is the new open 7B parameter GPT-like model fine-tuned on a mix of publicly available, synthetic datasets. Feel free to play with it, or duplicate to run privately!</p>
</div>
'''
# License markdown
LICENSE = """
<p/>
---
Built with zephyr-7b-beta
"""
# Placeholder HTML
PLACEHOLDER = """
<div style="padding: 30px; text-align: center; display: flex; flex-direction: column; align-items: center;">
<img src="https://ysharma-dummy-chat-app.hf.space/file=/tmp/gradio/8e75e61cc9bab22b7ce3dec85ab0e6db1da5d107/Meta_lockup_positive%20primary_RGB.jpg" style="width: 80%; max-width: 550px; height: auto; opacity: 0.55; ">
<h1 style="font-size: 28px; margin-bottom: 2px; opacity: 0.55;">zephyr-7b-beta</h1>
<p style="font-size: 18px; margin-bottom: 2px; opacity: 0.65;">Ask me anything...</p>
</div>
"""
# CSS styles
css = """
h1 {
text-align: center;
display: block;
}
#duplicate-button {
margin: auto;
color: white;
background: #1565c0;
border-radius: 100vh;
}
"""
# Initialize InferenceClient
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Function to respond to user messages
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
# Create a Chatbot
chatbot=gr.Chatbot(height=450, placeholder=PLACEHOLDER, label='Gradio ChatInterface')
# Define the interface layout
with gr.Blocks(css=css) as demo:
# Add description markdown
gr.Markdown(DESCRIPTION)
# Add duplicate button
gr.DuplicateButton(value="Duplicate Space for private use", elem_id="duplicate-button")
# Add chat interface
gr.ChatInterface(
fn=respond,
chatbot=chatbot,
examples=[
['How to setup a human base on Mars? Give short answer.'],
['Explain theory of relativity to me like I’m 8 years old.'],
['What is 9,000 * 9,000?'],
['Write a pun-filled happy birthday message to my friend Alex.'],
['Justify why a penguin might make a good king of the jungle.']
],
additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False, render=False),
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
cache_examples=False,
)
# Add license markdown
gr.Markdown(LICENSE)
# Launch the interface
if __name__ == "__main__":
demo.launch()