Initial commit
Browse files- .gitattributes +1 -0
- README.md +378 -0
- benchmark_results.txt +28 -0
- benchmark_translations.zip +3 -0
- config.json +45 -0
- pytorch_model.bin +3 -0
- source.spm +3 -0
- special_tokens_map.json +1 -0
- target.spm +3 -0
- tokenizer_config.json +1 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -26,3 +26,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
26 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
27 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
28 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
26 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
27 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
28 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.spm filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,378 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- cs
|
4 |
+
- dsb
|
5 |
+
- en
|
6 |
+
- hsb
|
7 |
+
- pl
|
8 |
+
- zlw
|
9 |
+
|
10 |
+
tags:
|
11 |
+
- translation
|
12 |
+
|
13 |
+
license: cc-by-4.0
|
14 |
+
model-index:
|
15 |
+
- name: opus-mt-tc-big-zlw-en
|
16 |
+
results:
|
17 |
+
- task:
|
18 |
+
name: Translation ces-eng
|
19 |
+
type: translation
|
20 |
+
args: ces-eng
|
21 |
+
dataset:
|
22 |
+
name: flores101-devtest
|
23 |
+
type: flores_101
|
24 |
+
args: ces eng devtest
|
25 |
+
metrics:
|
26 |
+
- name: BLEU
|
27 |
+
type: bleu
|
28 |
+
value: 41.2
|
29 |
+
- task:
|
30 |
+
name: Translation pol-eng
|
31 |
+
type: translation
|
32 |
+
args: pol-eng
|
33 |
+
dataset:
|
34 |
+
name: flores101-devtest
|
35 |
+
type: flores_101
|
36 |
+
args: pol eng devtest
|
37 |
+
metrics:
|
38 |
+
- name: BLEU
|
39 |
+
type: bleu
|
40 |
+
value: 29.6
|
41 |
+
- task:
|
42 |
+
name: Translation slk-eng
|
43 |
+
type: translation
|
44 |
+
args: slk-eng
|
45 |
+
dataset:
|
46 |
+
name: flores101-devtest
|
47 |
+
type: flores_101
|
48 |
+
args: slk eng devtest
|
49 |
+
metrics:
|
50 |
+
- name: BLEU
|
51 |
+
type: bleu
|
52 |
+
value: 40.0
|
53 |
+
- task:
|
54 |
+
name: Translation ces-eng
|
55 |
+
type: translation
|
56 |
+
args: ces-eng
|
57 |
+
dataset:
|
58 |
+
name: multi30k_test_2016_flickr
|
59 |
+
type: multi30k-2016_flickr
|
60 |
+
args: ces-eng
|
61 |
+
metrics:
|
62 |
+
- name: BLEU
|
63 |
+
type: bleu
|
64 |
+
value: 37.6
|
65 |
+
- task:
|
66 |
+
name: Translation ces-eng
|
67 |
+
type: translation
|
68 |
+
args: ces-eng
|
69 |
+
dataset:
|
70 |
+
name: multi30k_test_2018_flickr
|
71 |
+
type: multi30k-2018_flickr
|
72 |
+
args: ces-eng
|
73 |
+
metrics:
|
74 |
+
- name: BLEU
|
75 |
+
type: bleu
|
76 |
+
value: 37.4
|
77 |
+
- task:
|
78 |
+
name: Translation ces-eng
|
79 |
+
type: translation
|
80 |
+
args: ces-eng
|
81 |
+
dataset:
|
82 |
+
name: news-test2008
|
83 |
+
type: news-test2008
|
84 |
+
args: ces-eng
|
85 |
+
metrics:
|
86 |
+
- name: BLEU
|
87 |
+
type: bleu
|
88 |
+
value: 26.3
|
89 |
+
- task:
|
90 |
+
name: Translation pol-eng
|
91 |
+
type: translation
|
92 |
+
args: pol-eng
|
93 |
+
dataset:
|
94 |
+
name: newsdev2020
|
95 |
+
type: newsdev2020
|
96 |
+
args: pol-eng
|
97 |
+
metrics:
|
98 |
+
- name: BLEU
|
99 |
+
type: bleu
|
100 |
+
value: 32.7
|
101 |
+
- task:
|
102 |
+
name: Translation ces-eng
|
103 |
+
type: translation
|
104 |
+
args: ces-eng
|
105 |
+
dataset:
|
106 |
+
name: tatoeba-test-v2021-08-07
|
107 |
+
type: tatoeba_mt
|
108 |
+
args: ces-eng
|
109 |
+
metrics:
|
110 |
+
- name: BLEU
|
111 |
+
type: bleu
|
112 |
+
value: 57.4
|
113 |
+
- task:
|
114 |
+
name: Translation pol-eng
|
115 |
+
type: translation
|
116 |
+
args: pol-eng
|
117 |
+
dataset:
|
118 |
+
name: tatoeba-test-v2021-08-07
|
119 |
+
type: tatoeba_mt
|
120 |
+
args: pol-eng
|
121 |
+
metrics:
|
122 |
+
- name: BLEU
|
123 |
+
type: bleu
|
124 |
+
value: 55.7
|
125 |
+
- task:
|
126 |
+
name: Translation ces-eng
|
127 |
+
type: translation
|
128 |
+
args: ces-eng
|
129 |
+
dataset:
|
130 |
+
name: newstest2009
|
131 |
+
type: wmt-2009-news
|
132 |
+
args: ces-eng
|
133 |
+
metrics:
|
134 |
+
- name: BLEU
|
135 |
+
type: bleu
|
136 |
+
value: 29.5
|
137 |
+
- task:
|
138 |
+
name: Translation ces-eng
|
139 |
+
type: translation
|
140 |
+
args: ces-eng
|
141 |
+
dataset:
|
142 |
+
name: newstest2010
|
143 |
+
type: wmt-2010-news
|
144 |
+
args: ces-eng
|
145 |
+
metrics:
|
146 |
+
- name: BLEU
|
147 |
+
type: bleu
|
148 |
+
value: 30.7
|
149 |
+
- task:
|
150 |
+
name: Translation ces-eng
|
151 |
+
type: translation
|
152 |
+
args: ces-eng
|
153 |
+
dataset:
|
154 |
+
name: newstest2011
|
155 |
+
type: wmt-2011-news
|
156 |
+
args: ces-eng
|
157 |
+
metrics:
|
158 |
+
- name: BLEU
|
159 |
+
type: bleu
|
160 |
+
value: 30.9
|
161 |
+
- task:
|
162 |
+
name: Translation ces-eng
|
163 |
+
type: translation
|
164 |
+
args: ces-eng
|
165 |
+
dataset:
|
166 |
+
name: newstest2012
|
167 |
+
type: wmt-2012-news
|
168 |
+
args: ces-eng
|
169 |
+
metrics:
|
170 |
+
- name: BLEU
|
171 |
+
type: bleu
|
172 |
+
value: 29.4
|
173 |
+
- task:
|
174 |
+
name: Translation ces-eng
|
175 |
+
type: translation
|
176 |
+
args: ces-eng
|
177 |
+
dataset:
|
178 |
+
name: newstest2013
|
179 |
+
type: wmt-2013-news
|
180 |
+
args: ces-eng
|
181 |
+
metrics:
|
182 |
+
- name: BLEU
|
183 |
+
type: bleu
|
184 |
+
value: 32.8
|
185 |
+
- task:
|
186 |
+
name: Translation ces-eng
|
187 |
+
type: translation
|
188 |
+
args: ces-eng
|
189 |
+
dataset:
|
190 |
+
name: newstest2014
|
191 |
+
type: wmt-2014-news
|
192 |
+
args: ces-eng
|
193 |
+
metrics:
|
194 |
+
- name: BLEU
|
195 |
+
type: bleu
|
196 |
+
value: 38.7
|
197 |
+
- task:
|
198 |
+
name: Translation ces-eng
|
199 |
+
type: translation
|
200 |
+
args: ces-eng
|
201 |
+
dataset:
|
202 |
+
name: newstest2015
|
203 |
+
type: wmt-2015-news
|
204 |
+
args: ces-eng
|
205 |
+
metrics:
|
206 |
+
- name: BLEU
|
207 |
+
type: bleu
|
208 |
+
value: 33.4
|
209 |
+
- task:
|
210 |
+
name: Translation ces-eng
|
211 |
+
type: translation
|
212 |
+
args: ces-eng
|
213 |
+
dataset:
|
214 |
+
name: newstest2016
|
215 |
+
type: wmt-2016-news
|
216 |
+
args: ces-eng
|
217 |
+
metrics:
|
218 |
+
- name: BLEU
|
219 |
+
type: bleu
|
220 |
+
value: 37.1
|
221 |
+
- task:
|
222 |
+
name: Translation ces-eng
|
223 |
+
type: translation
|
224 |
+
args: ces-eng
|
225 |
+
dataset:
|
226 |
+
name: newstest2017
|
227 |
+
type: wmt-2017-news
|
228 |
+
args: ces-eng
|
229 |
+
metrics:
|
230 |
+
- name: BLEU
|
231 |
+
type: bleu
|
232 |
+
value: 32.5
|
233 |
+
- task:
|
234 |
+
name: Translation ces-eng
|
235 |
+
type: translation
|
236 |
+
args: ces-eng
|
237 |
+
dataset:
|
238 |
+
name: newstest2018
|
239 |
+
type: wmt-2018-news
|
240 |
+
args: ces-eng
|
241 |
+
metrics:
|
242 |
+
- name: BLEU
|
243 |
+
type: bleu
|
244 |
+
value: 33.1
|
245 |
+
- task:
|
246 |
+
name: Translation pol-eng
|
247 |
+
type: translation
|
248 |
+
args: pol-eng
|
249 |
+
dataset:
|
250 |
+
name: newstest2020
|
251 |
+
type: wmt-2020-news
|
252 |
+
args: pol-eng
|
253 |
+
metrics:
|
254 |
+
- name: BLEU
|
255 |
+
type: bleu
|
256 |
+
value: 32.6
|
257 |
+
---
|
258 |
+
# opus-mt-tc-big-zlw-en
|
259 |
+
|
260 |
+
Neural machine translation model for translating from West Slavic languages (zlw) to English (en).
|
261 |
+
|
262 |
+
This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
|
263 |
+
|
264 |
+
* Publications: [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
|
265 |
+
|
266 |
+
```
|
267 |
+
@inproceedings{tiedemann-thottingal-2020-opus,
|
268 |
+
title = "{OPUS}-{MT} {--} Building open translation services for the World",
|
269 |
+
author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
|
270 |
+
booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
|
271 |
+
month = nov,
|
272 |
+
year = "2020",
|
273 |
+
address = "Lisboa, Portugal",
|
274 |
+
publisher = "European Association for Machine Translation",
|
275 |
+
url = "https://aclanthology.org/2020.eamt-1.61",
|
276 |
+
pages = "479--480",
|
277 |
+
}
|
278 |
+
|
279 |
+
@inproceedings{tiedemann-2020-tatoeba,
|
280 |
+
title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
|
281 |
+
author = {Tiedemann, J{\"o}rg},
|
282 |
+
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
|
283 |
+
month = nov,
|
284 |
+
year = "2020",
|
285 |
+
address = "Online",
|
286 |
+
publisher = "Association for Computational Linguistics",
|
287 |
+
url = "https://aclanthology.org/2020.wmt-1.139",
|
288 |
+
pages = "1174--1182",
|
289 |
+
}
|
290 |
+
```
|
291 |
+
|
292 |
+
## Model info
|
293 |
+
|
294 |
+
* Release: 2022-03-17
|
295 |
+
* source language(s): ces dsb hsb pol
|
296 |
+
* target language(s): eng
|
297 |
+
* model: transformer-big
|
298 |
+
* data: opusTCv20210807+bt ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
|
299 |
+
* tokenization: SentencePiece (spm32k,spm32k)
|
300 |
+
* original model: [opusTCv20210807+bt_transformer-big_2022-03-17.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/zlw-eng/opusTCv20210807+bt_transformer-big_2022-03-17.zip)
|
301 |
+
* more information released models: [OPUS-MT zlw-eng README](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/zlw-eng/README.md)
|
302 |
+
|
303 |
+
## Usage
|
304 |
+
|
305 |
+
A short example code:
|
306 |
+
|
307 |
+
```python
|
308 |
+
from transformers import MarianMTModel, MarianTokenizer
|
309 |
+
|
310 |
+
src_text = [
|
311 |
+
"Aoi'ego hobby to tańczenie.",
|
312 |
+
"Myślisz, że Tom planuje to zrobić?"
|
313 |
+
]
|
314 |
+
|
315 |
+
model_name = "pytorch-models/opus-mt-tc-big-zlw-en"
|
316 |
+
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
317 |
+
model = MarianMTModel.from_pretrained(model_name)
|
318 |
+
translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
|
319 |
+
|
320 |
+
for t in translated:
|
321 |
+
print( tokenizer.decode(t, skip_special_tokens=True) )
|
322 |
+
|
323 |
+
# expected output:
|
324 |
+
# Aoi's hobby is dancing.
|
325 |
+
# You think Tom's planning on doing that?
|
326 |
+
```
|
327 |
+
|
328 |
+
You can also use OPUS-MT models with the transformers pipelines, for example:
|
329 |
+
|
330 |
+
```python
|
331 |
+
from transformers import pipeline
|
332 |
+
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-big-zlw-en")
|
333 |
+
print(pipe("Aoi'ego hobby to tańczenie."))
|
334 |
+
|
335 |
+
# expected output: Aoi's hobby is dancing.
|
336 |
+
```
|
337 |
+
|
338 |
+
## Benchmarks
|
339 |
+
|
340 |
+
* test set translations: [opusTCv20210807+bt_transformer-big_2022-03-17.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zlw-eng/opusTCv20210807+bt_transformer-big_2022-03-17.test.txt)
|
341 |
+
* test set scores: [opusTCv20210807+bt_transformer-big_2022-03-17.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zlw-eng/opusTCv20210807+bt_transformer-big_2022-03-17.eval.txt)
|
342 |
+
* benchmark results: [benchmark_results.txt](benchmark_results.txt)
|
343 |
+
* benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
|
344 |
+
|
345 |
+
| langpair | testset | chr-F | BLEU | #sent | #words |
|
346 |
+
|----------|---------|-------|-------|-------|--------|
|
347 |
+
| ces-eng | tatoeba-test-v2021-08-07 | 0.71861 | 57.4 | 13824 | 105010 |
|
348 |
+
| pol-eng | tatoeba-test-v2021-08-07 | 0.70544 | 55.7 | 10099 | 75766 |
|
349 |
+
| ces-eng | flores101-devtest | 0.66444 | 41.2 | 1012 | 24721 |
|
350 |
+
| pol-eng | flores101-devtest | 0.58301 | 29.6 | 1012 | 24721 |
|
351 |
+
| slk-eng | flores101-devtest | 0.66103 | 40.0 | 1012 | 24721 |
|
352 |
+
| ces-eng | multi30k_test_2016_flickr | 0.61482 | 37.6 | 1000 | 12955 |
|
353 |
+
| ces-eng | multi30k_test_2018_flickr | 0.61405 | 37.4 | 1071 | 14689 |
|
354 |
+
| pol-eng | newsdev2020 | 0.60478 | 32.7 | 2000 | 46654 |
|
355 |
+
| ces-eng | newssyscomb2009 | 0.56495 | 30.2 | 502 | 11818 |
|
356 |
+
| ces-eng | news-test2008 | 0.54300 | 26.3 | 2051 | 49380 |
|
357 |
+
| ces-eng | newstest2009 | 0.56309 | 29.5 | 2525 | 65399 |
|
358 |
+
| ces-eng | newstest2010 | 0.57778 | 30.7 | 2489 | 61711 |
|
359 |
+
| ces-eng | newstest2011 | 0.57336 | 30.9 | 3003 | 74681 |
|
360 |
+
| ces-eng | newstest2012 | 0.56761 | 29.4 | 3003 | 72812 |
|
361 |
+
| ces-eng | newstest2013 | 0.58809 | 32.8 | 3000 | 64505 |
|
362 |
+
| ces-eng | newstest2014 | 0.64401 | 38.7 | 3003 | 68065 |
|
363 |
+
| ces-eng | newstest2015 | 0.58607 | 33.4 | 2656 | 53569 |
|
364 |
+
| ces-eng | newstest2016 | 0.61780 | 37.1 | 2999 | 64670 |
|
365 |
+
| ces-eng | newstest2017 | 0.58259 | 32.5 | 3005 | 61721 |
|
366 |
+
| ces-eng | newstest2018 | 0.58677 | 33.1 | 2983 | 63495 |
|
367 |
+
| pol-eng | newstest2020 | 0.60047 | 32.6 | 1001 | 21755 |
|
368 |
+
|
369 |
+
## Acknowledgements
|
370 |
+
|
371 |
+
The work is supported by the [European Language Grid](https://www.european-language-grid.eu/) as [pilot project 2866](https://live.european-language-grid.eu/catalogue/#/resource/projects/2866), by the [FoTran project](https://www.helsinki.fi/en/researchgroups/natural-language-understanding-with-cross-lingual-grounding), funded by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 771113), and the [MeMAD project](https://memad.eu/), funded by the European Union’s Horizon 2020 Research and Innovation Programme under grant agreement No 780069. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland.
|
372 |
+
|
373 |
+
## Model conversion info
|
374 |
+
|
375 |
+
* transformers version: 4.16.2
|
376 |
+
* OPUS-MT git hash: 3405783
|
377 |
+
* port time: Wed Apr 13 20:19:48 EEST 2022
|
378 |
+
* port machine: LM0-400-22516.local
|
benchmark_results.txt
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ces-eng flores101-dev 0.65704 39.7 997 23555
|
2 |
+
pol-eng flores101-dev 0.58424 29.8 997 23555
|
3 |
+
slk-eng flores101-dev 0.66379 40.1 997 23555
|
4 |
+
ces-eng flores101-devtest 0.66444 41.2 1012 24721
|
5 |
+
pol-eng flores101-devtest 0.58301 29.6 1012 24721
|
6 |
+
slk-eng flores101-devtest 0.66103 40.0 1012 24721
|
7 |
+
ces-eng multi30k_test_2016_flickr 0.61482 37.6 1000 12955
|
8 |
+
ces-eng multi30k_test_2018_flickr 0.61405 37.4 1071 14689
|
9 |
+
pol-eng newsdev2020 0.60478 32.7 2000 46654
|
10 |
+
ces-eng newssyscomb2009 0.56495 30.2 502 11818
|
11 |
+
ces-eng news-test2008 0.54300 26.3 2051 49380
|
12 |
+
ces-eng newstest2009 0.56309 29.5 2525 65399
|
13 |
+
ces-eng newstest2010 0.57778 30.7 2489 61711
|
14 |
+
ces-eng newstest2011 0.57336 30.9 3003 74681
|
15 |
+
ces-eng newstest2012 0.56761 29.4 3003 72812
|
16 |
+
ces-eng newstest2013 0.58809 32.8 3000 64505
|
17 |
+
ces-eng newstest2014 0.64401 38.7 3003 68065
|
18 |
+
ces-eng newstest2015 0.58607 33.4 2656 53569
|
19 |
+
ces-eng newstest2016 0.61780 37.1 2999 64670
|
20 |
+
ces-eng newstest2017 0.58259 32.5 3005 61721
|
21 |
+
ces-eng newstest2018 0.58677 33.1 2983 63495
|
22 |
+
pol-eng newstest2020 0.60047 32.6 1001 21755
|
23 |
+
ces-eng tatoeba-test-v2020-07-28 0.72722 58.5 10000 75376
|
24 |
+
pol-eng tatoeba-test-v2020-07-28 0.70515 55.7 10000 75002
|
25 |
+
ces-eng tatoeba-test-v2021-03-30 0.72179 57.7 12076 91333
|
26 |
+
pol-eng tatoeba-test-v2021-03-30 0.70528 55.7 10056 75479
|
27 |
+
ces-eng tatoeba-test-v2021-08-07 0.71861 57.4 13824 105010
|
28 |
+
pol-eng tatoeba-test-v2021-08-07 0.70544 55.7 10099 75766
|
benchmark_translations.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad4d5097406a80af1e7a9f3565017934d47c324754f2c9a7da0ea6f71879ce98
|
3 |
+
size 7975989
|
config.json
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"activation_dropout": 0.0,
|
3 |
+
"activation_function": "relu",
|
4 |
+
"architectures": [
|
5 |
+
"MarianMTModel"
|
6 |
+
],
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bad_words_ids": [
|
9 |
+
[
|
10 |
+
58101
|
11 |
+
]
|
12 |
+
],
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_dropout": 0.0,
|
15 |
+
"d_model": 1024,
|
16 |
+
"decoder_attention_heads": 16,
|
17 |
+
"decoder_ffn_dim": 4096,
|
18 |
+
"decoder_layerdrop": 0.0,
|
19 |
+
"decoder_layers": 6,
|
20 |
+
"decoder_start_token_id": 58101,
|
21 |
+
"decoder_vocab_size": 58102,
|
22 |
+
"dropout": 0.1,
|
23 |
+
"encoder_attention_heads": 16,
|
24 |
+
"encoder_ffn_dim": 4096,
|
25 |
+
"encoder_layerdrop": 0.0,
|
26 |
+
"encoder_layers": 6,
|
27 |
+
"eos_token_id": 42640,
|
28 |
+
"forced_eos_token_id": 42640,
|
29 |
+
"init_std": 0.02,
|
30 |
+
"is_encoder_decoder": true,
|
31 |
+
"max_length": 512,
|
32 |
+
"max_position_embeddings": 1024,
|
33 |
+
"model_type": "marian",
|
34 |
+
"normalize_embedding": false,
|
35 |
+
"num_beams": 4,
|
36 |
+
"num_hidden_layers": 6,
|
37 |
+
"pad_token_id": 58101,
|
38 |
+
"scale_embedding": true,
|
39 |
+
"share_encoder_decoder_embeddings": true,
|
40 |
+
"static_position_embeddings": true,
|
41 |
+
"torch_dtype": "float16",
|
42 |
+
"transformers_version": "4.18.0.dev0",
|
43 |
+
"use_cache": true,
|
44 |
+
"vocab_size": 58102
|
45 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:19eb1ca44900a7a5d90e18109a9b704d86d48f5e6bafa7379ee1d25e2fe06c14
|
3 |
+
size 590908419
|
source.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f6c4f92c0339fc552f89bf13f0131bf7f219a02bcfd160492d9e5f23829e80a
|
3 |
+
size 820935
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
target.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:578a275c6f4a88a733d01da296827a167eaa46c289c113b51446ab1470ec1fa1
|
3 |
+
size 796568
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"source_lang": "zlw", "target_lang": "en", "unk_token": "<unk>", "eos_token": "</s>", "pad_token": "<pad>", "model_max_length": 512, "sp_model_kwargs": {}, "separate_vocabs": false, "special_tokens_map_file": null, "name_or_path": "marian-models/opusTCv20210807+bt_transformer-big_2022-03-17/zlw-en", "tokenizer_class": "MarianTokenizer"}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|