bart-generation / app.py
inclusive-ml
initial commit
270ad7b
raw
history blame contribute delete
687 Bytes
import gradio as gr
import transformers
from transformers import BartTokenizer, BartForConditionalGeneration
model_name = 'facebook/bart-large-cnn'
tokenizer = BartTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
def summarize(inp):
inp = inp.replace('\n','')
inp = tokenizer.encode(inp, return_tensors='pt', max_length=1024)
summary_ids = model.generate(inp, num_beams=4, max_length=150, early_stopping=True)
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary
gr.Interface(fn=summarize, inputs=gr.inputs.Textbox(lines=7, label="Input Text"), outputs="text").launch(inline=False)