danielolusipe's picture
Update app.py
c5fd310
raw
history blame
2.94 kB
import gradio as gr
import tensorflow as tf
def split_char(text):
return " ".join(list(text))
from spacy.lang.en import English
def make_predictions(Input):
class_names=['BACKGROUND','CONCLUSIONS','METHODS','OBJECTIVE','RESULTS']
# setup English sentence parser
nlp = English()
# create & add sentence splitting pipeline object to sentence parser
nlp.add_pipe('sentencizer')
# create "doc" of parsed sequences
doc=nlp(Input)
# Create a list
sents_list = []
for sent in doc.sents:
sents_list.append(sent.text)
abstract_sentences=sents_list
# Creating a loop to go through each line in abstract and create a list of dictionaries containing festure for each lines
sample_line=[]
for i,line in enumerate(abstract_sentences):
sample_dict={}
sample_dict["text"]=str(line)
sample_dict["line_number"]=i
sample_dict["total_lines"]=len(abstract_sentences)-1
sample_line.append(sample_dict)
# Get all the line_number values from sample abstract
abstract_line_number=[line["line_number"] for line in sample_line]
# one-hot encoding line_number values
abstract_line_number_one_hot=tf.one_hot(abstract_line_number,depth=15)
# Getting all total_number values from sample abstract
abstract_total_lines=[line["total_lines"] for line in sample_line]
# One-hot encoding total_number lines
abstract_total_lines_one_hot=tf.one_hot(abstract_total_lines,depth=20)
# Splitting abstract lines into character
abstract_char=[split_char(sentence) for sentence in abstract_sentences]
#abstract_char=[" ".join(list((sentence) for sentence in abstract_sentences))]
# Loading in model and getting a summary of loaded model
skimlit_universal_sentence_encoder_model=tf.keras.models.load_model("/content/drive/MyDrive/skimlit_models/Universal_sentence_encoder_Tribrid_embedding_model")
# Making prediction with loaded model on sample abstract
abstract_pred_probs=skimlit_universal_sentence_encoder_model.predict(x=(abstract_line_number_one_hot,
abstract_total_lines_one_hot,
tf.constant(abstract_sentences),
tf.constant(abstract_char)))
# Turning model's prediction into labels
abstract_preds=tf.argmax(abstract_pred_probs,axis=1)
# Turn predicted labels into string class names
predicted_classes=[class_names[i] for i in abstract_preds]
# Visualizing abstract lines and predicted labels
summary=""
for i,line in enumerate(abstract_sentences):
summary=summary+f"{predicted_classes[i]}: {line}\n"
#summary=f"{predicted_classes[i]}: {line}"
return summary
demo = gr.Interface(fn=make_predictions,inputs=gr.Textbox(lines=2, placeholder="Enter Abstract Here..."),outputs="text")
demo.launch(debug=True, inline=True)