aditijuluri's picture
Update app.py
7793561 verified
raw
history blame contribute delete
814 Bytes
from huggingface_hub import InferenceClient
import gradio as gr
import random
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
def respond(message, history):
# responses = ["Yes", "No"]
# return random.choice(responses)
messages = [
{"role":"system",
"content":"you are a chatbot who loves bugs and somehow relate everything to them."
}
]
if history:
messages.extend(history)
messages.append(
{"role":"user",
"content":message}
)
response = client.chat_completion(
messages, max_tokens=1000, temperature=1.3, top_p=0.3
)
# temperature and top_p control randomness
return response['choices'][0]['message']['content'].strip()
chatbot = gr.ChatInterface(respond, type="messages")
chatbot.launch()