import gradio as gr title = "ELECTRA" description = "Gradio Demo for ELECTRA. To use it, simply add your text, or click one of the examples to load them. Read more at the links below." article = "
ELECTRA: Pre-training Text Encoders as Discriminators Rather Than Generators
" examples = [ ["My name is Sarah and I live in London","electra_large_discriminator_squad2_512","Where do I live?"] ] io1 = gr.Interface.load("huggingface/ahotrod/electra_large_discriminator_squad2_512") io2 = gr.Interface.load("huggingface/deepset/electra-base-squad2") def inference(context, model,question): if model == "electra_large_discriminator_squad2_512": outlabel = io1(context,question) else: outlabel = io2(context,question) return outlabel gr.Interface( inference, [gr.inputs.Textbox(label="Context",lines=10),gr.inputs.Dropdown(choices=["electra_large_discriminator_squad2_512","electra-base-squad2"], type="value", default="electra_large_discriminator_squad2_512", label="model"),gr.inputs.Textbox(label="Question Answering")], [gr.outputs.Textbox(label="Output")], examples=examples, article=article, title=title, description=description).launch(enable_queue=True)