Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
| import torch | |
| # Load model and tokenizer | |
| model_name = "cardiffnlp/twitter-roberta-base-sentiment-latest" | |
| model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| def predict_sentiment(text): | |
| inputs = tokenizer(text, return_tensors="pt", truncation=True, max_length=512) | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| predictions = torch.nn.functional.softmax(outputs.logits, dim=-1) | |
| sentiments = ['Negative', 'Neutral', 'Positive'] | |
| result = {sentiments[i]: float(predictions[0][i]) for i in range(len(sentiments))} | |
| return result | |
| # Create Gradio interface | |
| iface = gr.Interface(fn=predict_sentiment, | |
| inputs=gr.Textbox(lines=2, placeholder="Type your sentence here..."), | |
| outputs=gr.Label(), | |
| title="Sentiment Analysis", | |
| description="Analyze the sentiment of your text. Powered by CardiffNLP's RoBERTa model.", | |
| article="<p style='text-align: center'>Enter a sentence to get its sentiment. The model categorizes sentiments into Negative, Neutral, and Positive.</p>", | |
| theme="huggingface") # Using a built-in theme | |
| if __name__ == "__main__": | |
| iface.launch() | |