ksvmuralidhar's picture
Create app.py
63c6811
raw
history blame
2.12 kB
import streamlit as st
import pandas as pd
import numpy as np
from unidecode import unidecode
import tensorflow as tf
import cloudpickle
from transformers import AlbertTokenizerFast
import os
def load_model():
interpreter = tf.lite.Interpreter(model_path=os.path.join("models/albert_sentiment_analysis.tflite"))
with open("models/sentiment_preprocessor_labelencoder.bin", "rb") as model_file_obj:
text_preprocessor, label_encoder = cloudpickle.load(model_file_obj)
model_checkpoint = "albert-base-v2"
tokenizer = AlbertTokenizerFast.from_pretrained(model_checkpoint)
return interpreter, text_preprocessor, label_encoder, tokenizer
interpreter, text_preprocessor, label_encoder, tokenizer = load_model()
def inference(text):
tflite_pred = "Can't Predict"
text = text_preprocessor.preprocess(pd.Series(text))[0]
if text != "this is an empty message":
tokens = tokenizer(text, max_length=150, padding="max_length", truncation=True, return_tensors="tf")
# tflite model inference
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()[0]
attention_mask, input_ids = tokens['attention_mask'], tokens['input_ids']
interpreter.set_tensor(input_details[0]["index"], attention_mask)
interpreter.set_tensor(input_details[1]["index"], input_ids)
interpreter.invoke()
tflite_pred = interpreter.get_tensor(output_details["index"])[0]
tflite_pred_argmax = np.argmax(tflite_pred)
tflite_pred = f"{label_encoder.inverse_transform([tflite_pred_argmax])} ({tflite_pred[tflite_pred_argmax]})"
return tflite_pred
def main():
st.title("Sentiment Analysis App")
review = st.text_area("Enter Review:", "")
if st.button("Submit"):
# result = "Can't Predict"
# if len(review.strip()) > 0:
result = inference(review)
if result.find("positive") >=0 :
st.success(f"{result}")
else:
st.error(f"{result}")
if __name__ == "__main__":
main()