File size: 1,804 Bytes
e26aa98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from transformers import AutoModelForSequenceClassification
from transformers import TFAutoModelForSequenceClassification
from transformers import AutoTokenizer, AutoConfig
import numpy as np
import gradio as gr



from transformers import AutoModelForSequenceClassification, AutoTokenizer

# Specifying the model path, which points to the Hugging Face Model Hub
model_path = f'Mbabazi/twitter-roberta-base-sentiment-latest'
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForSequenceClassification.from_pretrained(model_path)


# Function to predict sentiment of a given tweet
def predict_tweet(tweet):
    # Tokenize the input tweet using the specified tokenizer
    inputs = tokenizer(tweet, return_tensors="pt", padding=True, truncation=True, max_length=128)
    
    # Passing the tokenized input through the pre-trained sentiment analysis model
    outputs = model(**inputs)
    
    # Applying softmax to obtain probabilities for each sentiment class
    probs = outputs.logits.softmax(dim=-1)
    
    # Defining sentiment classes
    sentiment_classes = ['Negative', 'Neutral', 'Positive']
    
    # Creating a dictionary with sentiment classes as keys and their corresponding probabilities as values
    return {sentiment_classes[i]: float(probs.squeeze()[i]) for i in range(len(sentiment_classes))}


# Create a Gradio Interface for the tweet sentiment prediction function
iface = gr.Interface(
    fn=predict_tweet,  # Set the prediction function
    inputs="text",  # Specify input type as text
    outputs="label",  # Specify output type as label
    title="Tweet Sentiment Classifier",  # Set the title of the interface
    description="Enter a tweet to determine if the sentiment is negative, neutral, or positive."  # Provide a brief description
)

iface.launch()