Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,39 +1,7 @@
|
|
1 |
-
import torch
|
2 |
import gradio as gr
|
3 |
|
4 |
-
# Load an En-De Transformer model trained on WMT'19 data:
|
5 |
-
en2de = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.en-de.single_model', tokenizer='moses', bpe='fastbpe')
|
6 |
-
# Load an En-Fr Transformer model trained on WMT'14 data :
|
7 |
-
en2fr = torch.hub.load('pytorch/fairseq', 'transformer.wmt14.en-fr', tokenizer='moses', bpe='subword_nmt')
|
8 |
-
|
9 |
-
def translate(text, lang):
|
10 |
-
if lang == "French":
|
11 |
-
# Manually tokenize:
|
12 |
-
en_toks = en2fr.tokenize(text)
|
13 |
-
|
14 |
-
# Manually apply BPE:
|
15 |
-
en_bpe = en2fr.apply_bpe(en_toks)
|
16 |
-
|
17 |
-
# Manually binarize:
|
18 |
-
en_bin = en2fr.binarize(en_bpe)
|
19 |
-
|
20 |
-
# Generate five translations with top-k sampling:
|
21 |
-
fr_bin = en2fr.generate(en_bin, beam=5, sampling=True, sampling_topk=20)
|
22 |
-
|
23 |
-
# Convert one of the samples to a string and detokenize
|
24 |
-
fr_sample = fr_bin[0]['tokens']
|
25 |
-
fr_bpe = en2fr.string(fr_sample)
|
26 |
-
fr_toks = en2fr.remove_bpe(fr_bpe)
|
27 |
-
fr = en2fr.detokenize(fr_toks)
|
28 |
-
return fr
|
29 |
-
else:
|
30 |
-
# Translate from En-De
|
31 |
-
de = en2de.translate(text)
|
32 |
-
return de
|
33 |
-
|
34 |
inputs = [
|
35 |
gr.inputs.Textbox(lines=5, label="Input Text in English"),
|
36 |
-
gr.inputs.Radio(choices=["French", "German"], type="value", label="Output Language")
|
37 |
]
|
38 |
|
39 |
outputs = gr.outputs.Textbox(label="Output Text")
|
@@ -47,4 +15,4 @@ examples = [
|
|
47 |
["PyTorch Hub is a pre-trained model repository designed to facilitate research reproducibility."]
|
48 |
]
|
49 |
|
50 |
-
gr.Interface(
|
|
|
|
|
1 |
import gradio as gr
|
2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
inputs = [
|
4 |
gr.inputs.Textbox(lines=5, label="Input Text in English"),
|
|
|
5 |
]
|
6 |
|
7 |
outputs = gr.outputs.Textbox(label="Output Text")
|
|
|
15 |
["PyTorch Hub is a pre-trained model repository designed to facilitate research reproducibility."]
|
16 |
]
|
17 |
|
18 |
+
gr.Interface.load("huggingface/facebook/wmt19-en-de", inputs, outputs, title=title, description=description, article=article, examples=examples, analytics_enabled=False).launch()
|