import gradio as gr title = "T5" description = "Gradio Demo for T5. To use it, simply add your text, or click one of the examples to load them. Read more at the links below." article = "

Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer

" examples = [ ['My name is Sarah and I live in London',"t5-base"] ] io1 = gr.Interface.load("huggingface/t5-base") io2 = gr.Interface.load("huggingface/t5-small") io3 = gr.Interface.load("huggingface/t5-large") io4 = gr.Interface.load("huggingface/t5-3b") def inference(text, model): if model == "t5-base": outtext = io1(text) elif model == "t5-small": outtext = io2(text) elif model == "t5-large": outtext = io3(text) else: outtext = io4(text) return outtext gr.Interface( inference, [gr.inputs.Textbox(label="Input"),gr.inputs.Dropdown(choices=["t5-base","t5-small","t5-large","t5-3b"], type="value", default="t5-base", label="model") ], gr.outputs.Textbox(label="Output"), examples=examples, article=article, title=title, description=description).launch(enable_queue=True)