```py from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("sobamchan/bart-large-scitldr") model = AutoModelForSeq2SeqLM.from_pretrained("sobamchan/bart-large-scitldr") text = "Abstract of a paper." batch = tok(text, return_tensors="pt") generated_ids = model.generate(batch["input_ids"]) print(tok.batch_decode(generated_ids, skip_special_tokens=True)) ```