Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer | |
import onnxruntime as ort | |
import numpy as np | |
# Load tokenizer and ONNX quantized model | |
tokenizer = AutoTokenizer.from_pretrained("onnx/") | |
session = ort.InferenceSession("onnx/model_quantized.onnx") | |
# Softmax function | |
def softmax(x): | |
e_x = np.exp(x - np.max(x)) | |
return e_x / e_x.sum() | |
# Prediction function | |
def classify_sentiment(text): | |
# Tokenize the input text | |
inputs = tokenizer(text, return_tensors="np") | |
#print(inputs) | |
# Run inference | |
outputs = session.run(None, { | |
"input_ids": inputs["input_ids"], | |
"attention_mask": inputs["attention_mask"] | |
}) | |
# Process logits | |
logits = outputs[0][0] | |
probs = softmax(logits) | |
pred_class = int(np.argmax(probs)) | |
label_map = {0: "Negative", 1: "Positive"} | |
print(label_map[pred_class]) | |
return label_map[pred_class] | |
# Gradio Interface | |
interface = gr.Interface( | |
fn=classify_sentiment, | |
inputs=gr.Textbox(lines=2, placeholder="Enter text here..."), | |
outputs='label', | |
title="Sentiment Classifier", | |
description="Enter a sentence to classify its sentiment", | |
) | |
# Launch the app | |
if __name__ == "__main__": | |
interface.launch(share=True) | |