Spaces:
Runtime error
Runtime error
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import gradio as gr
|
3 |
+
|
4 |
+
# Load an En-De Transformer model trained on WMT'19 data:
|
5 |
+
en2de = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.en-de.single_model', tokenizer='moses', bpe='fastbpe')
|
6 |
+
# Load an En-Fr Transformer model trained on WMT'14 data :
|
7 |
+
en2fr = torch.hub.load('pytorch/fairseq', 'transformer.wmt14.en-fr', tokenizer='moses', bpe='subword_nmt')
|
8 |
+
|
9 |
+
def translate(text, lang):
|
10 |
+
if lang == "French":
|
11 |
+
# Manually tokenize:
|
12 |
+
en_toks = en2fr.tokenize(text)
|
13 |
+
|
14 |
+
# Manually apply BPE:
|
15 |
+
en_bpe = en2fr.apply_bpe(en_toks)
|
16 |
+
|
17 |
+
# Manually binarize:
|
18 |
+
en_bin = en2fr.binarize(en_bpe)
|
19 |
+
|
20 |
+
# Generate five translations with top-k sampling:
|
21 |
+
fr_bin = en2fr.generate(en_bin, beam=5, sampling=True, sampling_topk=20)
|
22 |
+
|
23 |
+
# Convert one of the samples to a string and detokenize
|
24 |
+
fr_sample = fr_bin[0]['tokens']
|
25 |
+
fr_bpe = en2fr.string(fr_sample)
|
26 |
+
fr_toks = en2fr.remove_bpe(fr_bpe)
|
27 |
+
fr = en2fr.detokenize(fr_toks)
|
28 |
+
return fr
|
29 |
+
else:
|
30 |
+
# Translate from En-De
|
31 |
+
de = en2de.translate(text)
|
32 |
+
return de
|
33 |
+
|
34 |
+
inputs = [
|
35 |
+
gr.inputs.Textbox(lines=5, label="Input Text in English"),
|
36 |
+
gr.inputs.Radio(choices=["French", "German"], type="value", label="Output Language")
|
37 |
+
]
|
38 |
+
|
39 |
+
outputs = gr.outputs.Textbox(label="Output Text")
|
40 |
+
|
41 |
+
title = "Transformer (NMT)"
|
42 |
+
description = "Gradio demo for Transformer (NMT). To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
|
43 |
+
article = """<p style='text-align: center'><a href='https://arxiv.org/abs/1806.00187'>Scaling Neural Machine Translation</a> | <a href='https://github.com/pytorch/fairseq/'>Github Repo</a></p>"""
|
44 |
+
|
45 |
+
examples = [
|
46 |
+
["Hello world!"],
|
47 |
+
["PyTorch Hub is a pre-trained model repository designed to facilitate research reproducibility."]
|
48 |
+
]
|
49 |
+
|
50 |
+
gr.Interface(translate, inputs, outputs, title=title, description=description, article=article, examples=examples, analytics_enabled=False).launch()
|