ChatwithDolly / app.py
john
Update app.py
2f38056
raw
history blame
699 Bytes
import gradio as gr
import torch
from transformers import pipeline
generate_text = pipeline(model="databricks/dolly-v2-3b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
with gr.Blocks(theme=gr.themes.Soft()) as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.Button("Clear")
def respond(message, chat_history):
res = generate_text(message)
a = (res[0]["generated_text"])
b = str(a)
bot_message = b
chat_history.append((message, bot_message))
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch()