import re import os import tensorflow as tf import keras import keras_nlp # Constants MAX_ENCODER_SEQUENCE_LENGTH = 512 MAX_DECODER_SEQUENCE_LENGTH_FOR_SUMMARY = 128 MAX_DECODER_SEQUENCE_LENGTH_FOR_PARAPHRASE = 512 # Model Files MODEL_PATH = r"ModelFiles" # "WithoutGrammarSummary" Model WEIGHT_PATH_WO_G = r"WithoutGrammarSummary/new_model.weights.h5" WEIGHT_PATH_W_G = r"GrammarSummary/new_model.weights.h5" WEIGHT_PATH_PARAPHRASE = r"Paraphrase/new_model.weights.h5" def cleanText(text): text = str(text) text = re.sub(r'[^a-zA-Z0-9\s]', '', text) text = text.lower() return text # Preprocessor For Summary preprocessorForSummary = keras_nlp.models.BartSeq2SeqLMPreprocessor.from_preset(MODEL_PATH, encoder_sequence_length=MAX_ENCODER_SEQUENCE_LENGTH,decoder_sequence_length=MAX_DECODER_SEQUENCE_LENGTH_FOR_SUMMARY,) preprocessorForParaphrase = keras_nlp.models.BartSeq2SeqLMPreprocessor.from_preset(MODEL_PATH, encoder_sequence_length=MAX_ENCODER_SEQUENCE_LENGTH,decoder_sequence_length=MAX_DECODER_SEQUENCE_LENGTH_FOR_PARAPHRASE,) # Load Model # "WithoutGrammarSummary" Model modelWOG = keras_nlp.models.BartSeq2SeqLM.from_preset(MODEL_PATH, preprocessor=preprocessorForSummary) modelWOG.load_weights(WEIGHT_PATH_WO_G) # "WithGrammarSummary" Model modelWG = keras_nlp.models.BartSeq2SeqLM.from_preset(MODEL_PATH, preprocessor=preprocessorForSummary) modelWG.load_weights(WEIGHT_PATH_W_G) # "Paraphrase" Model modelParaphrase = keras_nlp.models.BartSeq2SeqLM.from_preset(MODEL_PATH, preprocessor=preprocessorForParaphrase) modelParaphrase.load_weights(WEIGHT_PATH_W_G) def generateText(input_text, model, max_length, wo_summary): if wo_summary: input_text = cleanText(input_text) output = model.generate(input_text, max_length=max_length) return output