File size: 1,516 Bytes
584ad1e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d8eb525
ad3307b
d8eb525
ad3307b
d8eb525
584ad1e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2597bd8
584ad1e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# -*- coding: utf-8 -*-
"""Emotion Recognition_Fine Tuning

Automatically generated by Colab.

Original file is located at
    https://colab.research.google.com/drive/1pZgt5n6943GB5oq_h43LjAYoA4yi-EST
"""


"""Our Application"""


import numpy as np

import tensorflow as tf # Apply softmax using tf.nn.softmax

# Load the fine-tuned model from the saved directory
# Load model directly
from transformers import AutoTokenizer, TFAutoModelForSequenceClassification

loaded_model = TFAutoModelForSequenceClassification.from_pretrained("dhruvsaxena11/emoton_model_dhruv")
# loaded_model = TFBertForSequenceClassification.from_pretrained("https://huggingface.co/spaces/dhruvsaxena11/Emotion_Recognition_in_Text/blob/main/tf_model.h5")
loaded_tokenizer=AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")

def predict_emotion(text):

  text_token=loaded_tokenizer(text,padding=True,return_tensors="np")
  outputs=loaded_model(text_token)
  probabilities = tf.nn.softmax(outputs.logits)
  final=probabilities.numpy()
  labels=["sadness","joy","love","anger","fear","surprise"]
  final=final.tolist()
  result_dict = {k: v for k, v in zip(labels,final[0])}
  return result_dict

predict_emotion("dhruv")

my_labels=["sadness","joy","love","anger","fear","surprise"]


import gradio as gr
inputs = gr.Textbox(lines=1, label="Input Text")
outputs = gr.Label(num_top_classes=6)
interface = gr.Interface(fn=predict_emotion, inputs=inputs, outputs=outputs,title="Emotion Recognition in Text - NLP")
interface.launch()