sergeyfeldman's picture
Update app.py
43f72c8 verified
raw
history blame contribute delete
No virus
1.78 kB
from transformers import RobertaTokenizer, AutoModelForSequenceClassification
from scipy.special import expit
import numpy as np
import os
import gradio as gr
import requests
from datetime import datetime
# set up model
authtoken = os.environ.get("TOKEN")
tokenizer = RobertaTokenizer.from_pretrained("guidecare/feelings_and_issues_large_v2", token=authtoken, use_safetensors=True)
tokenizer.do_lower_case = True
model = AutoModelForSequenceClassification.from_pretrained("guidecare/feelings_and_issues_large_v2", token=authtoken, use_safetensors=True)
all_label_names = list(model.config.id2label.values())
def predict(text):
probs = expit(model(**tokenizer([text], return_tensors="pt", padding=True)).logits.detach().numpy())
# can't use numpy for whatever reason
probs = [float(np.round(i, 2)) for i in probs[0]]
# break out issue, harm, sentiment, feeling
zipped_list = list(zip(all_label_names, probs))
print(text, zipped_list)
issues = [(i, j) for i, j in zipped_list if i.startswith('issue')]
feelings = [(i, j) for i, j in zipped_list if i.startswith('feeling')]
harm = [(i, j) for i, j in zipped_list if i.startswith('harm')]
sentiment = [(i, j) for i, j in zipped_list if i.startswith('sentiment')]
# keep tops for each one
issues = sorted(issues, key=lambda x: x[1])[::-1]
feelings = sorted(feelings, key=lambda x: x[1])[::-1]
harm = sorted(harm, key=lambda x: x[1])[::-1]
sentiment = sorted(sentiment, key=lambda x: x[1])[::-1]
# top is the combo of these
top = issues + feelings + harm + sentiment
d = {i: j for i, j in top}
return d
iface = gr.Interface(
fn=predict,
inputs="text",
outputs="label",
#examples=["This test tomorrow is really freaking me out."]
)
iface.launch()