dhruvsaxena11's picture
Update app.py
4d3b643 verified
raw
history blame
1.51 kB
# -*- coding: utf-8 -*-
"""Emotion Recognition_Fine Tuning
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1pZgt5n6943GB5oq_h43LjAYoA4yi-EST
"""
"""Our Application"""
import numpy as np
import tensorflow as tf # Apply softmax using tf.nn.softmax
# Load the fine-tuned model from the saved directory
# Load model directly
from transformers import AutoTokenizer, AutoModelForSequenceClassification
loaded_model = AutoModelForSequenceClassification.from_pretrained("dhruvsaxena11/emoton_model_dhruv")
# loaded_model = TFBertForSequenceClassification.from_pretrained("https://huggingface.co/spaces/dhruvsaxena11/Emotion_Recognition_in_Text/blob/main/tf_model.h5")
loaded_tokenizer=AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")
def predict_emotion(text):
text_token=loaded_tokenizer(text,padding=True,return_tensors="np")
outputs=loaded_model(text_token)
probabilities = tf.nn.softmax(outputs.logits)
final=probabilities.numpy()
labels=["sadness","joy","love","anger","fear","surprise"]
final=final.tolist()
result_dict = {k: v for k, v in zip(labels,final[0])}
return result_dict
predict_emotion("dhruv")
my_labels=["sadness","joy","love","anger","fear","surprise"]
import gradio as gr
inputs = gr.Textbox(lines=1, label="Input Text")
outputs = gr.Label(num_top_classes=6)
interface = gr.Interface(fn=predict_emotion, inputs=inputs, outputs=outputs,title="Emotion Recognition in Text - NLP")
interface.launch()