import gradio as gr title = "MarianMT" description = "Gradio Demo for MarianMT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below." article = "

Marian: Fast Neural Machine Translation in C++

" examples = [ ['我叫沃尔夫冈,我住在柏林。',"opus-mt-zh-en"] ] io1 = gr.Interface.load("huggingface/Helsinki-NLP/opus-mt-zh-en") io2 = gr.Interface.load("huggingface/Helsinki-NLP/opus-mt-en-de") def inference(text, model): if model == "opus-mt-zh-en": outtext = io1(text) else: outtext = io2(text) return outtext gr.Interface( inference, [gr.inputs.Textbox(label="Input"),gr.inputs.Dropdown(choices=["opus-mt-zh-en","opus-mt-en-de"], type="value", default="opus-mt-zh-en", label="model")], [gr.outputs.Textbox(label="Output")], examples=examples, article=article, title=title, description=description).launch(enable_queue=True)