emotion-analysis-demo / grad_creation_sent_analysis_ver_1.py
Rai007-is-here's picture
initial commit
47c2fe8
import gradio as gr
from transformers import pipeline
import requests
import json
from transformers import AutoTokenizer, AutoModelForTokenClassification
from nltk.tokenize import sent_tokenize
tokenizer = AutoTokenizer.from_pretrained("dslim/bert-base-NER")
model = AutoModelForTokenClassification.from_pretrained("dslim/bert-base-NER")
nlp = pipeline("ner", model=model, tokenizer=tokenizer)
classifier = pipeline("text-classification",model='bhadresh-savani/distilbert-base-uncased-emotion', return_all_scores=True)
def detect_emotion(lista, emotion_threshold, minimum_words):
thresh = emotion_threshold
mini = minimum_words
prediction = classifier(lista)
listb = []
for i in range(len(prediction)):
for k in prediction[i]:
if k["score"]>thresh and len(lista[i].split())>mini:
a = (i,k["label"],k["score"])
listb.append(a)
listc = []
for i in listb:
val = (i[0],lista[i[0]],i[1],i[2])
listc.append(val)
return listc
def detect_entity3(lista):
ner_results = nlp(lista)
listb = []
entity = []
for i in range(len(ner_results)):
end = -10
old = None
for j in range(len(ner_results[i])):
if int(ner_results[i][j]["start"])==int(end):
if j==len(ner_results[i])-1:
appen = ner_results[i][j]["word"].replace("#", "")
old["word"] = old["word"] + appen
if old["score"]< ner_results[i][j]["score"]:
old["score"] = ner_results[i][j]["score"]
val = (i,old["word"],old["score"],ner_results[i][j]['entity'],old["start"]) #can do j-1 also here
entity.append(val)
else:
appen = ner_results[i][j]["word"].replace("#", "")
old["word"] = old["word"] + appen
if old["score"]< ner_results[i][j]["score"]:
old["score"] = ner_results[i][j]["score"]
end = ner_results[i][j]["end"]
else:
if old is not None:
if j==len(ner_results[i])-1:
val = (i,old["word"],old["score"],ner_results[i][j-1]['entity'],old["start"])
entity.append(val)
#print("\n")
old["word"] = ner_results[i][j]["word"]
old["score"] = ner_results[i][j]["score"]
old["start"] = ner_results[i][j]["start"]
val = (i,old["word"],old["score"],ner_results[i][j]['entity'],old["start"])
entity.append(val)
else:
val = (i,old["word"],old["score"],ner_results[i][j-1]['entity'],old["start"])
entity.append(val)
#print("\n")
old["word"] = ner_results[i][j]["word"]
old["score"] = ner_results[i][j]["score"]
old["start"] = ner_results[i][j]["start"]
end = ner_results[i][j]["end"]
else:
old = {}
old["word"] = ner_results[i][j]["word"]
old["score"] = ner_results[i][j]["score"]
old["start"] = ner_results[i][j]["start"]
end = ner_results[i][j]["end"]
listc = []
for i in entity:
val = (i[0],lista[i[0]],i[1],i[2],i[4])
listc.append(val)
return listc
def compare_and_print(output1,output2):
dicta = {}
for i in output1:
dicta[i[0]] = "No"
for i in output2:
if i[0] in dicta:
dicta[i[0]] = "Yes"
flag = 0
both = []
for i in dicta:
if dicta[i]=="Yes":
flag=1
both.append(i)
return both
def detect_tam(entity_output, tam_list):
dicta = {}
for i in entity_output:
for j in range(len(tam_list)):
comp = tam_list[j].split()
if i[2].lower() == comp[0].lower():
if i[1][i[4]:i[4]+len(tam_list[j])].lower()==tam_list[j].lower():
if i[0] not in dicta:
dicta[i[0]] = []
dicta[i[0]].append(j)
else:
if j in dicta[i[0]]:
pass
else:
dicta[i[0]].append(j)
return dicta
def myFunc(e):
return e[1]
def integrate_all(text,threshold, min_words, max_detection, max_tam_detection):
out_text = ""
emotion_threshold = threshold
minimum_words = min_words
emotion_number = max_detection #3
both_number = max_tam_detection #5
emotion_number = int(emotion_number)
both_number = int(both_number)
#tam_number = 4
lista = sent_tokenize(text)
emotion_out = detect_emotion(lista,emotion_threshold, minimum_words)
out_text = out_text + "##Selected based on emotions##"
out_text = out_text + "\n---------------------------"
dicta = {}
for i in emotion_out:
if i[2] not in dicta:
dicta[i[2]] = []
dicta[i[2]].append((i[0],i[3]))
else:
dicta[i[2]].append((i[0],i[3]))
for i in dicta:
dicta[i].sort(reverse=True,key=myFunc)
#print(dicta)
emotion_selected = []
for i in dicta:
val_em = 0
if len(dicta[i])<emotion_number:
em_loop = len(dicta[i])
else:
em_loop = emotion_number
#em_loop = int(em_loop)
for j in range(em_loop):
emotion_selected.append(dicta[i][j])
for i in emotion_selected:
if len(lista)>=i[0]-2+8+1:
sel_val = 8
else:
sel_val = len(lista)-1-(i[0]-2+8)
for j in range(sel_val):
out_text = out_text + "\n" + str(-2+j) + ", " + str(lista[i[0]-2+j])
out_text = out_text + "\n---------------------------"
out_text = out_text + "\n---------------------------"
out_text = out_text + "\n##Selected based on presence of both named entity then followed by highest emotions##"
out_text = out_text + "\n---------------------------"
entity_out = detect_entity3(lista)
both = compare_and_print(emotion_out,entity_out)
both_selected = []
if len(both)<=both_number:
for i in both:
both_selected.append((i,None))
else:
list_em = []
for i in both:
for j in emotion_out:
if i==j[0]:
list_em.append((i,j[3]))
list_em.sort(reverse=True, key=myFunc)
for i in range(both_number):
both_selected.append(list_em[i])
for i in both_selected:
if len(lista)>=i[0]-2+8+1:
sel_val = 8
else:
sel_val = len(lista)-1-(i[0]-2+8)
for j in range(sel_val):
out_text = out_text + "\n" + str(-2+j) + ", " + str(lista[i[0]-2+j])
out_text = out_text + "\n---------------------------"
out_text = out_text + "\n---------------------------"
"""print("##Selected based on presence of no. of TAM##")
print("-------------------------------")
if len(tam_list)==0:
print("No TAM list provided hence no selection based on TAM")
tam_output = detect_tam(entity_out, tam_list)
no_of_tam = {}
count = 0
tam_numbers=[]
for i in tam_output:
count = count+1
if len(tam_output[i]) not in no_of_tam:
no_of_tam[len(tam_output[i])] = [i]
tam_numbers.append(len(tam_output[i]))
else:
no_of_tam[len(tam_output[i])].append(i)
tam_numbers.sort(reverse=True)
if count<tam_number:
tam_number = count
tam_output = []
for i in tam_numbers:
if tam_number<=0:
break
local_lista = no_of_tam[i]
for i in local_lista:
tam_output.append(i)
tam_number = tam_number-1
for i in tam_output:
print(lista[i])"""
return out_text
def generateMessage(text, threshold, min_words, max_detection, max_tam_detection):
text2 = text
try:
text1 = integrate_all(text2, threshold, min_words, max_detection, max_tam_detection)
except:
text1 = ""
return text1
iface = gr.Interface(fn=generateMessage, inputs=[gr.inputs.Textbox(label="Input Description", default="Ladies and gentlemen, Elon Musk. Thank you, thank you. Thank you very much. It's an honor to be hosting Saturday Night Live. I mean that sometimes after I say something, I have to say I mean that so people really know that I mean it. That's because I don't always have a lot of intonation or variation in how I speak, which I'm told makes for great comedy. I'm actually making history. Not as the first person with Asperger's to host SNL, or at least the first to admit it. So I won't make a lot of eye contact with the cast tonight, but all right, I'm pretty good at running human in emulation mode. I'd first like to share with you my vision for the future. I believe in a renewable energy future. I believe that humanity must become a multi planetary, space bearing civilization. Those seem like exciting goals, don't they? I think if I just posted that on Twitter, I'd be fine. But I also write things like 69 days after 420 again, haha. I don't know. I thought it was funny. That's why I read Haha at the end. Look, I know I sometimes say or post strange things, but that's just how my brain works. To anyone I'm offended, I just want to say I reinvented electric cars and I'm sending people to Mars in a rocket ship. Did you think I was also going to be a chill, normal dude? Now, a lot of times people are reduced to the dumbest thing they ever did. Like one time I smoked weed on Joe Rogan's podcast, and now all the time I hear Elon Musk. All he ever does is smoke weed on podcasts. Like I go from podcast to podcast lighting up joints. It happened once. It's like reducing OJ. Simpson to murderer. That was one time. Fun fact, OJ. Also hosted the show in 79, anna Gain in 96. Killed both times. One reason I've always loved SNL is because it's genuinely live. A lot of people don't realize that we're actually live right now. Which means I could say something truly shocking, like I drive a Prius. SNL is also a great way to learn something new about the host. For example, this is my son's name. It's pronounced cat running across keyboard. Another thing people want to know is what was I like as a kid? The answer is pretty much the same as now, but tomorrow. But we can also ask my mother, who's here tonight. Her name is May, like the month, but with an E at the end. Thanks for spelling my name, Elon. Mom, do you remember when I was twelve and I created my own video game called Blastar about a spaceship that battles aliens? I do. I remember they paid you $500. But you were too young to open a bank account, so I had to open one for you. That's right. Whatever happened to that bank account? That's not important. You turn that video game about space into reality. Unless you consider that our reality might be a video game and we're all just computer simulations being played by a teenager in another planet. That's great, Elon. Well, break your leg tonight. I love you very much. I'll be tomorrow. And I'm excited for my Mother's Day gift. I just hope it's not ghost coin. It is. It sure is. Okay, we've got a great show for you tonight. Miley Cyrus is here, so stick around and we're be right back."),
gr.inputs.Number(label="Threshold", default=0.995),
gr.inputs.Number(label="Min no. of words", default=5),
gr.inputs.Number(label="Max detection from each emotion type", default=3),
gr.inputs.Number(label="Max TAM detection lines", default=4)
], outputs=["textbox"],
title = "Emotion sensitive lines detection",
allow_screenshot = False, allow_flagging = False
)
iface.launch(server_name="0.0.0.0", auth=("vidyo", "pass_qwertyindra"), share=True)