import requests import gradio as gr from huggingface_hub import from_pretrained_fastai import torch from fastai.text.all import * from blurr.text.data.all import * from blurr.text.modeling.all import * # load model repo_id = 'matteopilotto/deberta-v3-base-tweet_eval-emotion' load_learner = from_pretrained_fastai(repo_id) # define labels labels_url = 'https://huggingface.co/matteopilotto/deberta-v3-base-tweet_eval-emotion/raw/main/class_names.txt' labels = requests.get(labels_url).text.splitlines() # define function to pass to gradio.Interface def predict(prompt): out = load_learner.blurr_predict(prompt)[0] confidences = {label: prob for label, prob in zip(labels, out['probs'])} return confidences # define input texbox to pass to gradio.Interface textbox = gr.Textbox( label='input', placeholder=None, lines=2 ) # define exables to pass to gradio.Interface examples = ["hey @user #fields in #skibbereen give your online delivery service a horrible name. 1.5 hours late on the 1 hour delivery window.", "when you only meet each other once at an interview and you recognise each other on the streets 🙆 i don't even know what's your name 😂"] gr.Interface( fn=predict, inputs=textbox, outputs=gr.Label(), title='Emotion in tweets', description="""
""", examples = examples, live=True ).launch()