File size: 1,510 Bytes
1d4ad2c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import requests
import gradio as gr
from huggingface_hub import from_pretrained_fastai
import torch
from fastai.text.all import *
from blurr.text.data.all import *
from blurr.text.modeling.all import *

# load model
repo_id = 'matteopilotto/deberta-v3-base-tweet_eval-emotion'
load_learner = from_pretrained_fastai(repo_id)

# define labels
labels_url = 'https://huggingface.co/matteopilotto/deberta-v3-base-tweet_eval-emotion/raw/main/class_names.txt'
labels = requests.get(labels_url).text.splitlines()

# define function to pass to gradio.Interface
def predict(prompt):
  out = load_learner.blurr_predict(prompt)[0]
  confidences = {label: prob for label, prob in zip(labels, out['probs'])}
  return confidences

# define input texbox to pass to gradio.Interface
textbox = gr.Textbox(
    label='input',
    placeholder=None,
    lines=2
)

# define exables to pass to gradio.Interface
examples = ["hey @user #fields in #skibbereen give your online delivery service a horrible name. 1.5 hours late on the 1 hour delivery window.",
             "when you only meet each other once at an interview and you recognise each other on the streets 🙆 i don't even know what's your name 😂"]

gr.Interface(
    fn=predict,
    inputs=textbox,
    outputs=gr.Label(),
    title='Emotion in tweets',
    description="""<center><img src="https://huggingface.co/matteopilotto/deberta-v3-base-tweet_eval-emotion/resolve/main/emoji_image.png" width=600px></center>""",
    examples = examples,
    live=True
).launch()