from transformers import AutoTokenizer, AutoModelForSeq2SeqLM from io import StringIO tokenizer = AutoTokenizer.from_pretrained("facebook/bart-large-cnn") model = AutoModelForSeq2SeqLM.from_pretrained("facebook/bart-large-cnn") import gradio as gr def predict(): pass description = "upload pdf file" title = "Text Summarization from a pdf." iface = gr.Interface( fn =predict, inputs = [ gr.inputs.File()], outputs = [ gr.outputs.Carousel(['text']), ], description=description, title = title, allow_screenshot=False) iface.launch(share = True,enable_queue=True, show_error =True)