import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline tokenizer = AutoTokenizer.from_pretrained("human-centered-summarization/financial-summarization-pegasus") model = AutoModelForSeq2SeqLM.from_pretrained("human-centered-summarization/financial-summarization-pegasus") pipe = pipeline(task="text2text-generation", model=model, tokenizer=tokenizer) with open('text1.txt') as f: text1 = f.read() with open('text2.txt') as f: text2 = f.read() with open('text3.txt') as f: text3 = f.read() gr.Interface.from_pipeline(pipe, title="Financial Summarization", description="Financial Summarization using google/pegasus-xsum fine-tuned on financial news dataset. Model can be found at https://huggingface.co/human-centered-summarization/financial-summarization-pegasus. Examples are news articles from business.inquirer.net.", examples=[text1,text2,text3] ).launch()