FKBaffour's picture
Update app.py
6814519
raw
history blame
1.64 kB
from transformers import AutoModelForSequenceClassification
from transformers import TFAutoModelForSequenceClassification
from transformers import AutoTokenizer, AutoConfig
import numpy as np
from scipy.special import softmax
import gradio as gr
# Requirements
model_path = f"FKBaffour/fine-tuned_bert_based_model_for_sentiment_analysis"
tokenizer = AutoTokenizer.from_pretrained('bert-base-cased')
config = AutoConfig.from_pretrained(model_path)
model = AutoModelForSequenceClassification.from_pretrained(model_path)
# Preprocess text (username and link placeholders)
def preprocess(text):
new_text = []
for t in text.split(" "):
t = '@user' if t.startswith('@') and len(t) > 1 else t
t = 'http' if t.startswith('http') else t
new_text.append(t)
return " ".join(new_text)
def sentiment_analysis(text):
text = preprocess(text)
# PyTorch-based models
encoded_input = tokenizer(text, return_tensors='pt')
output = model(**encoded_input)
scores_ = output[0][0].detach().numpy()
scores_ = softmax(scores_)
# Format output dict of scores
labels = ['Negative', 'Neutral', 'Positive']
scores = {l:float(s) for (l,s) in zip(labels, scores_) }
return scores
demo = gr.Interface(
# Setting Titles for App
gr.Markdown("<h2 style='text-align: center;'> Sentiment Analysis App </h2> ", unsafe_allow_html=True)
fn=sentiment_analysis,
inputs=gr.Textbox(placeholder="Copy and paste/Write a tweet here..."),
outputs="label",
interpretation="default",
examples=[["It is very good to vaccinate your child!"]])
demo.launch()