Spaces:
Runtime error
Runtime error
File size: 3,461 Bytes
1a99dd4 b90a15d 1a99dd4 8bf8bcd 1a99dd4 8bf8bcd 1a99dd4 e2313eb 701e8d1 e2313eb a9bf681 e2313eb 7120a21 1a99dd4 cfbc60f 5e6da5f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import gradio as gr
from fastai.text.all import *
from transformers import *
from blurr.data.all import *
from blurr.modeling.all import *
import spacy
from spacy_readability import Readability
# from save_data import save_data_and_sendmail
readablility_nlp = spacy.load('en_core_web_sm')
read = Readability()
cwd = os.getcwd()
readablility_nlp.add_pipe(read, last=True)
bart_ext_model_path = os.path.join(cwd, 'bart_extractive_model')
bart_extractive_model = BartForConditionalGeneration.from_pretrained(bart_ext_model_path)
bart_extractive_tokenizer = BartTokenizer.from_pretrained('facebook/bart-large-cnn')
t5_model_path = os.path.join(cwd, 't5_model')
t5_model = AutoModelWithLMHead.from_pretrained(t5_model_path)
t5_tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-summarize-news")
def generate_text_summarization(sum_type,article):
if article.strip():
print("text input :",article)
if sum_type == 'BART Extractive Text Summarization':
inputs = bart_extractive_tokenizer([article], max_length=1024, return_tensors='pt')
summary_ids = bart_extractive_model.generate(inputs['input_ids'], num_beams=4, min_length=60, max_length=300, early_stopping=True)
summary = [bart_extractive_tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids]
print(type(summary))
print(summary)
summary= summary[0]
doc = readablility_nlp(summary)
summary_score = round(doc._.flesch_kincaid_reading_ease,2)
summarized_data = {
"summary" : summary,
"score" : summary_score
}
if sum_type == 'T5 Abstractive Text Summarization':
inputs = t5_tokenizer.encode(article, return_tensors="pt", max_length=2048)
summary_ids = t5_model.generate(inputs,
num_beams=2,
no_repeat_ngram_size=2,
min_length=100,
max_length=300,
early_stopping=True)
summary = t5_tokenizer.decode(summary_ids[0], skip_special_tokens=True)
print(type(summary))
print(summary)
doc = readablility_nlp(summary)
summary_score = round(doc._.flesch_kincaid_reading_ease,2)
summarized_data = {
"summary" : summary,
"score" : summary_score
}
# save_data_and_sendmail(article, sum_type, summary)
return summary
else:
raise gr.Error("Please enter text in inputbox!!!!")
input_text=gr.Textbox(lines=5, label="Paragraph")
input_radio= gr.Radio(['BART Extractive Text Summarization','T5 Abstractive Text Summarization'],label='Select summarization',value='BART Extractive Text Summarization')
output_text=gr.Textbox(lines=7, label="Summarize text")
demo = gr.Interface(
generate_text_summarization,
[input_radio,input_text],
output_text,
title="Text Summarization",
css=".gradio-container {background-color: lightgray}",
article="""<p style='text-align: center;'>Developed by: <a href="https://www.pragnakalp.com" target="_blank">Pragnakalp Techlabs</a></p>"""
)
demo.launch() |