Initial commit
Browse files- .gitattributes +1 -0
- README.md +391 -0
- benchmark_results.txt +52 -0
- benchmark_translations.zip +3 -0
- config.json +45 -0
- pytorch_model.bin +3 -0
- source.spm +3 -0
- special_tokens_map.json +1 -0
- target.spm +3 -0
- tokenizer_config.json +1 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.spm filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- be
|
4 |
+
- bg
|
5 |
+
- hr
|
6 |
+
- ru
|
7 |
+
- sh
|
8 |
+
- sl
|
9 |
+
- sr_Cyrl
|
10 |
+
- sr_Latn
|
11 |
+
- uk
|
12 |
+
- zle
|
13 |
+
- zls
|
14 |
+
|
15 |
+
tags:
|
16 |
+
- translation
|
17 |
+
|
18 |
+
license: cc-by-4.0
|
19 |
+
model-index:
|
20 |
+
- name: opus-mt-tc-big-zle-zls
|
21 |
+
results:
|
22 |
+
- task:
|
23 |
+
name: Translation rus-bul
|
24 |
+
type: translation
|
25 |
+
args: rus-bul
|
26 |
+
dataset:
|
27 |
+
name: flores101-devtest
|
28 |
+
type: flores_101
|
29 |
+
args: rus bul devtest
|
30 |
+
metrics:
|
31 |
+
- name: BLEU
|
32 |
+
type: bleu
|
33 |
+
value: 28.9
|
34 |
+
- task:
|
35 |
+
name: Translation rus-hrv
|
36 |
+
type: translation
|
37 |
+
args: rus-hrv
|
38 |
+
dataset:
|
39 |
+
name: flores101-devtest
|
40 |
+
type: flores_101
|
41 |
+
args: rus hrv devtest
|
42 |
+
metrics:
|
43 |
+
- name: BLEU
|
44 |
+
type: bleu
|
45 |
+
value: 23.2
|
46 |
+
- task:
|
47 |
+
name: Translation rus-mkd
|
48 |
+
type: translation
|
49 |
+
args: rus-mkd
|
50 |
+
dataset:
|
51 |
+
name: flores101-devtest
|
52 |
+
type: flores_101
|
53 |
+
args: rus mkd devtest
|
54 |
+
metrics:
|
55 |
+
- name: BLEU
|
56 |
+
type: bleu
|
57 |
+
value: 24.3
|
58 |
+
- task:
|
59 |
+
name: Translation rus-slv
|
60 |
+
type: translation
|
61 |
+
args: rus-slv
|
62 |
+
dataset:
|
63 |
+
name: flores101-devtest
|
64 |
+
type: flores_101
|
65 |
+
args: rus slv devtest
|
66 |
+
metrics:
|
67 |
+
- name: BLEU
|
68 |
+
type: bleu
|
69 |
+
value: 23.1
|
70 |
+
- task:
|
71 |
+
name: Translation rus-srp_Cyrl
|
72 |
+
type: translation
|
73 |
+
args: rus-srp_Cyrl
|
74 |
+
dataset:
|
75 |
+
name: flores101-devtest
|
76 |
+
type: flores_101
|
77 |
+
args: rus srp_Cyrl devtest
|
78 |
+
metrics:
|
79 |
+
- name: BLEU
|
80 |
+
type: bleu
|
81 |
+
value: 24.1
|
82 |
+
- task:
|
83 |
+
name: Translation ukr-bul
|
84 |
+
type: translation
|
85 |
+
args: ukr-bul
|
86 |
+
dataset:
|
87 |
+
name: flores101-devtest
|
88 |
+
type: flores_101
|
89 |
+
args: ukr bul devtest
|
90 |
+
metrics:
|
91 |
+
- name: BLEU
|
92 |
+
type: bleu
|
93 |
+
value: 30.8
|
94 |
+
- task:
|
95 |
+
name: Translation ukr-hrv
|
96 |
+
type: translation
|
97 |
+
args: ukr-hrv
|
98 |
+
dataset:
|
99 |
+
name: flores101-devtest
|
100 |
+
type: flores_101
|
101 |
+
args: ukr hrv devtest
|
102 |
+
metrics:
|
103 |
+
- name: BLEU
|
104 |
+
type: bleu
|
105 |
+
value: 24.6
|
106 |
+
- task:
|
107 |
+
name: Translation ukr-mkd
|
108 |
+
type: translation
|
109 |
+
args: ukr-mkd
|
110 |
+
dataset:
|
111 |
+
name: flores101-devtest
|
112 |
+
type: flores_101
|
113 |
+
args: ukr mkd devtest
|
114 |
+
metrics:
|
115 |
+
- name: BLEU
|
116 |
+
type: bleu
|
117 |
+
value: 26.2
|
118 |
+
- task:
|
119 |
+
name: Translation ukr-slv
|
120 |
+
type: translation
|
121 |
+
args: ukr-slv
|
122 |
+
dataset:
|
123 |
+
name: flores101-devtest
|
124 |
+
type: flores_101
|
125 |
+
args: ukr slv devtest
|
126 |
+
metrics:
|
127 |
+
- name: BLEU
|
128 |
+
type: bleu
|
129 |
+
value: 24.2
|
130 |
+
- task:
|
131 |
+
name: Translation ukr-srp_Cyrl
|
132 |
+
type: translation
|
133 |
+
args: ukr-srp_Cyrl
|
134 |
+
dataset:
|
135 |
+
name: flores101-devtest
|
136 |
+
type: flores_101
|
137 |
+
args: ukr srp_Cyrl devtest
|
138 |
+
metrics:
|
139 |
+
- name: BLEU
|
140 |
+
type: bleu
|
141 |
+
value: 26.2
|
142 |
+
- task:
|
143 |
+
name: Translation rus-bul
|
144 |
+
type: translation
|
145 |
+
args: rus-bul
|
146 |
+
dataset:
|
147 |
+
name: tatoeba-test-v2021-08-07
|
148 |
+
type: tatoeba_mt
|
149 |
+
args: rus-bul
|
150 |
+
metrics:
|
151 |
+
- name: BLEU
|
152 |
+
type: bleu
|
153 |
+
value: 53.7
|
154 |
+
- task:
|
155 |
+
name: Translation rus-hbs
|
156 |
+
type: translation
|
157 |
+
args: rus-hbs
|
158 |
+
dataset:
|
159 |
+
name: tatoeba-test-v2021-08-07
|
160 |
+
type: tatoeba_mt
|
161 |
+
args: rus-hbs
|
162 |
+
metrics:
|
163 |
+
- name: BLEU
|
164 |
+
type: bleu
|
165 |
+
value: 49.4
|
166 |
+
- task:
|
167 |
+
name: Translation rus-slv
|
168 |
+
type: translation
|
169 |
+
args: rus-slv
|
170 |
+
dataset:
|
171 |
+
name: tatoeba-test-v2021-08-07
|
172 |
+
type: tatoeba_mt
|
173 |
+
args: rus-slv
|
174 |
+
metrics:
|
175 |
+
- name: BLEU
|
176 |
+
type: bleu
|
177 |
+
value: 21.5
|
178 |
+
- task:
|
179 |
+
name: Translation rus-srp_Cyrl
|
180 |
+
type: translation
|
181 |
+
args: rus-srp_Cyrl
|
182 |
+
dataset:
|
183 |
+
name: tatoeba-test-v2021-08-07
|
184 |
+
type: tatoeba_mt
|
185 |
+
args: rus-srp_Cyrl
|
186 |
+
metrics:
|
187 |
+
- name: BLEU
|
188 |
+
type: bleu
|
189 |
+
value: 46.1
|
190 |
+
- task:
|
191 |
+
name: Translation rus-srp_Latn
|
192 |
+
type: translation
|
193 |
+
args: rus-srp_Latn
|
194 |
+
dataset:
|
195 |
+
name: tatoeba-test-v2021-08-07
|
196 |
+
type: tatoeba_mt
|
197 |
+
args: rus-srp_Latn
|
198 |
+
metrics:
|
199 |
+
- name: BLEU
|
200 |
+
type: bleu
|
201 |
+
value: 51.7
|
202 |
+
- task:
|
203 |
+
name: Translation ukr-bul
|
204 |
+
type: translation
|
205 |
+
args: ukr-bul
|
206 |
+
dataset:
|
207 |
+
name: tatoeba-test-v2021-08-07
|
208 |
+
type: tatoeba_mt
|
209 |
+
args: ukr-bul
|
210 |
+
metrics:
|
211 |
+
- name: BLEU
|
212 |
+
type: bleu
|
213 |
+
value: 61.3
|
214 |
+
- task:
|
215 |
+
name: Translation ukr-hbs
|
216 |
+
type: translation
|
217 |
+
args: ukr-hbs
|
218 |
+
dataset:
|
219 |
+
name: tatoeba-test-v2021-08-07
|
220 |
+
type: tatoeba_mt
|
221 |
+
args: ukr-hbs
|
222 |
+
metrics:
|
223 |
+
- name: BLEU
|
224 |
+
type: bleu
|
225 |
+
value: 52.1
|
226 |
+
- task:
|
227 |
+
name: Translation ukr-hrv
|
228 |
+
type: translation
|
229 |
+
args: ukr-hrv
|
230 |
+
dataset:
|
231 |
+
name: tatoeba-test-v2021-08-07
|
232 |
+
type: tatoeba_mt
|
233 |
+
args: ukr-hrv
|
234 |
+
metrics:
|
235 |
+
- name: BLEU
|
236 |
+
type: bleu
|
237 |
+
value: 50.1
|
238 |
+
- task:
|
239 |
+
name: Translation ukr-srp_Cyrl
|
240 |
+
type: translation
|
241 |
+
args: ukr-srp_Cyrl
|
242 |
+
dataset:
|
243 |
+
name: tatoeba-test-v2021-08-07
|
244 |
+
type: tatoeba_mt
|
245 |
+
args: ukr-srp_Cyrl
|
246 |
+
metrics:
|
247 |
+
- name: BLEU
|
248 |
+
type: bleu
|
249 |
+
value: 54.7
|
250 |
+
- task:
|
251 |
+
name: Translation ukr-srp_Latn
|
252 |
+
type: translation
|
253 |
+
args: ukr-srp_Latn
|
254 |
+
dataset:
|
255 |
+
name: tatoeba-test-v2021-08-07
|
256 |
+
type: tatoeba_mt
|
257 |
+
args: ukr-srp_Latn
|
258 |
+
metrics:
|
259 |
+
- name: BLEU
|
260 |
+
type: bleu
|
261 |
+
value: 53.4
|
262 |
+
---
|
263 |
+
# opus-mt-tc-big-zle-zls
|
264 |
+
|
265 |
+
Neural machine translation model for translating from East Slavic languages (zle) to South Slavic languages (zls).
|
266 |
+
|
267 |
+
This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
|
268 |
+
|
269 |
+
* Publications: [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
|
270 |
+
|
271 |
+
```
|
272 |
+
@inproceedings{tiedemann-thottingal-2020-opus,
|
273 |
+
title = "{OPUS}-{MT} {--} Building open translation services for the World",
|
274 |
+
author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
|
275 |
+
booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
|
276 |
+
month = nov,
|
277 |
+
year = "2020",
|
278 |
+
address = "Lisboa, Portugal",
|
279 |
+
publisher = "European Association for Machine Translation",
|
280 |
+
url = "https://aclanthology.org/2020.eamt-1.61",
|
281 |
+
pages = "479--480",
|
282 |
+
}
|
283 |
+
|
284 |
+
@inproceedings{tiedemann-2020-tatoeba,
|
285 |
+
title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
|
286 |
+
author = {Tiedemann, J{\"o}rg},
|
287 |
+
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
|
288 |
+
month = nov,
|
289 |
+
year = "2020",
|
290 |
+
address = "Online",
|
291 |
+
publisher = "Association for Computational Linguistics",
|
292 |
+
url = "https://aclanthology.org/2020.wmt-1.139",
|
293 |
+
pages = "1174--1182",
|
294 |
+
}
|
295 |
+
```
|
296 |
+
|
297 |
+
## Model info
|
298 |
+
|
299 |
+
* Release: 2022-03-23
|
300 |
+
* source language(s): bel rus ukr
|
301 |
+
* target language(s): bul hbs hrv slv srp_Cyrl srp_Latn
|
302 |
+
* valid target language labels: >>bul<< >>hbs<< >>hrv<< >>slv<< >>srp_Cyrl<< >>srp_Latn<<
|
303 |
+
* model: transformer-big
|
304 |
+
* data: opusTCv20210807+bt ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
|
305 |
+
* tokenization: SentencePiece (spm32k,spm32k)
|
306 |
+
* original model: [opusTCv20210807+bt_transformer-big_2022-03-23.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/zle-zls/opusTCv20210807+bt_transformer-big_2022-03-23.zip)
|
307 |
+
* more information released models: [OPUS-MT zle-zls README](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/zle-zls/README.md)
|
308 |
+
* more information about the model: [MarianMT](https://huggingface.co/docs/transformers/model_doc/marian)
|
309 |
+
|
310 |
+
This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>bul<<`
|
311 |
+
|
312 |
+
## Usage
|
313 |
+
|
314 |
+
A short example code:
|
315 |
+
|
316 |
+
```python
|
317 |
+
from transformers import MarianMTModel, MarianTokenizer
|
318 |
+
|
319 |
+
src_text = [
|
320 |
+
">>bul<< Новы каранавірус вельмі заразны.",
|
321 |
+
">>srp_Latn<< Моє ім'я — Саллі."
|
322 |
+
]
|
323 |
+
|
324 |
+
model_name = "pytorch-models/opus-mt-tc-big-zle-zls"
|
325 |
+
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
326 |
+
model = MarianMTModel.from_pretrained(model_name)
|
327 |
+
translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
|
328 |
+
|
329 |
+
for t in translated:
|
330 |
+
print( tokenizer.decode(t, skip_special_tokens=True) )
|
331 |
+
|
332 |
+
# expected output:
|
333 |
+
# Короната е силно заразна.
|
334 |
+
# Zovem se Sali.
|
335 |
+
```
|
336 |
+
|
337 |
+
You can also use OPUS-MT models with the transformers pipelines, for example:
|
338 |
+
|
339 |
+
```python
|
340 |
+
from transformers import pipeline
|
341 |
+
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-big-zle-zls")
|
342 |
+
print(pipe(">>bul<< Новы каранавірус вельмі заразны."))
|
343 |
+
|
344 |
+
# expected output: Короната е силно заразна.
|
345 |
+
```
|
346 |
+
|
347 |
+
## Benchmarks
|
348 |
+
|
349 |
+
* test set translations: [opusTCv20210807+bt_transformer-big_2022-03-23.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zle-zls/opusTCv20210807+bt_transformer-big_2022-03-23.test.txt)
|
350 |
+
* test set scores: [opusTCv20210807+bt_transformer-big_2022-03-23.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zle-zls/opusTCv20210807+bt_transformer-big_2022-03-23.eval.txt)
|
351 |
+
* benchmark results: [benchmark_results.txt](benchmark_results.txt)
|
352 |
+
* benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
|
353 |
+
|
354 |
+
| langpair | testset | chr-F | BLEU | #sent | #words |
|
355 |
+
|----------|---------|-------|-------|-------|--------|
|
356 |
+
| rus-bul | tatoeba-test-v2021-08-07 | 0.71515 | 53.7 | 1247 | 8272 |
|
357 |
+
| rus-hbs | tatoeba-test-v2021-08-07 | 0.69192 | 49.4 | 2500 | 14736 |
|
358 |
+
| rus-slv | tatoeba-test-v2021-08-07 | 0.38051 | 21.5 | 657 | 3969 |
|
359 |
+
| rus-srp_Cyrl | tatoeba-test-v2021-08-07 | 0.66622 | 46.1 | 881 | 5407 |
|
360 |
+
| rus-srp_Latn | tatoeba-test-v2021-08-07 | 0.70990 | 51.7 | 1483 | 8552 |
|
361 |
+
| ukr-bul | tatoeba-test-v2021-08-07 | 0.77283 | 61.3 | 1020 | 5181 |
|
362 |
+
| ukr-hbs | tatoeba-test-v2021-08-07 | 0.69401 | 52.1 | 942 | 5130 |
|
363 |
+
| ukr-hrv | tatoeba-test-v2021-08-07 | 0.67202 | 50.1 | 389 | 2302 |
|
364 |
+
| ukr-srp_Cyrl | tatoeba-test-v2021-08-07 | 0.70064 | 54.7 | 205 | 1112 |
|
365 |
+
| ukr-srp_Latn | tatoeba-test-v2021-08-07 | 0.72405 | 53.4 | 348 | 1716 |
|
366 |
+
| bel-bul | flores101-devtest | 0.49528 | 16.1 | 1012 | 24700 |
|
367 |
+
| bel-hrv | flores101-devtest | 0.46308 | 12.4 | 1012 | 22423 |
|
368 |
+
| bel-mkd | flores101-devtest | 0.48608 | 13.5 | 1012 | 24314 |
|
369 |
+
| bel-slv | flores101-devtest | 0.44452 | 12.2 | 1012 | 23425 |
|
370 |
+
| bel-srp_Cyrl | flores101-devtest | 0.44424 | 12.6 | 1012 | 23456 |
|
371 |
+
| rus-bul | flores101-devtest | 0.58653 | 28.9 | 1012 | 24700 |
|
372 |
+
| rus-hrv | flores101-devtest | 0.53494 | 23.2 | 1012 | 22423 |
|
373 |
+
| rus-mkd | flores101-devtest | 0.55184 | 24.3 | 1012 | 24314 |
|
374 |
+
| rus-slv | flores101-devtest | 0.52201 | 23.1 | 1012 | 23425 |
|
375 |
+
| rus-srp_Cyrl | flores101-devtest | 0.53038 | 24.1 | 1012 | 23456 |
|
376 |
+
| ukr-bul | flores101-devtest | 0.59625 | 30.8 | 1012 | 24700 |
|
377 |
+
| ukr-hrv | flores101-devtest | 0.54530 | 24.6 | 1012 | 22423 |
|
378 |
+
| ukr-mkd | flores101-devtest | 0.56822 | 26.2 | 1012 | 24314 |
|
379 |
+
| ukr-slv | flores101-devtest | 0.53092 | 24.2 | 1012 | 23425 |
|
380 |
+
| ukr-srp_Cyrl | flores101-devtest | 0.54618 | 26.2 | 1012 | 23456 |
|
381 |
+
|
382 |
+
## Acknowledgements
|
383 |
+
|
384 |
+
The work is supported by the [European Language Grid](https://www.european-language-grid.eu/) as [pilot project 2866](https://live.european-language-grid.eu/catalogue/#/resource/projects/2866), by the [FoTran project](https://www.helsinki.fi/en/researchgroups/natural-language-understanding-with-cross-lingual-grounding), funded by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 771113), and the [MeMAD project](https://memad.eu/), funded by the European Union’s Horizon 2020 Research and Innovation Programme under grant agreement No 780069. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland.
|
385 |
+
|
386 |
+
## Model conversion info
|
387 |
+
|
388 |
+
* transformers version: 4.16.2
|
389 |
+
* OPUS-MT git hash: 1bdabf7
|
390 |
+
* port time: Thu Mar 24 00:46:26 EET 2022
|
391 |
+
* port machine: LM0-400-22516.local
|
benchmark_results.txt
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
bel-bul flores101-dev 0.49247 15.7 997 23520
|
2 |
+
bel-hrv flores101-dev 0.46000 11.9 997 21567
|
3 |
+
bel-mkd flores101-dev 0.47774 12.8 997 23159
|
4 |
+
bel-slv flores101-dev 0.44985 12.5 997 22448
|
5 |
+
bel-srp_Cyrl flores101-dev 0.44253 11.8 997 22384
|
6 |
+
rus-bul flores101-dev 0.57899 27.7 997 23520
|
7 |
+
rus-hrv flores101-dev 0.53388 23.0 997 21567
|
8 |
+
rus-mkd flores101-dev 0.54919 23.6 997 23159
|
9 |
+
rus-slv flores101-dev 0.53010 24.3 997 22448
|
10 |
+
rus-srp_Cyrl flores101-dev 0.53167 24.1 997 22384
|
11 |
+
bel-bul flores101-devtest 0.49528 16.1 1012 24700
|
12 |
+
bel-hrv flores101-devtest 0.46308 12.4 1012 22423
|
13 |
+
bel-mkd flores101-devtest 0.48608 13.5 1012 24314
|
14 |
+
bel-slv flores101-devtest 0.44452 12.2 1012 23425
|
15 |
+
bel-srp_Cyrl flores101-devtest 0.44424 12.6 1012 23456
|
16 |
+
rus-bul flores101-devtest 0.58653 28.9 1012 24700
|
17 |
+
rus-hrv flores101-devtest 0.53494 23.2 1012 22423
|
18 |
+
rus-mkd flores101-devtest 0.55184 24.3 1012 24314
|
19 |
+
rus-slv flores101-devtest 0.52201 23.1 1012 23425
|
20 |
+
rus-srp_Cyrl flores101-devtest 0.53038 24.1 1012 23456
|
21 |
+
ukr-bul flores101-devtest 0.59625 30.8 1012 24700
|
22 |
+
ukr-hrv flores101-devtest 0.54530 24.6 1012 22423
|
23 |
+
ukr-mkd flores101-devtest 0.56822 26.2 1012 24314
|
24 |
+
ukr-slv flores101-devtest 0.53092 24.2 1012 23425
|
25 |
+
ukr-srp_Cyrl flores101-devtest 0.54618 26.2 1012 23456
|
26 |
+
ukr-bul flores101-dev 0.59416 30.6 997 23520
|
27 |
+
ukr-hrv flores101-dev 0.53975 24.3 997 21567
|
28 |
+
ukr-mkd flores101-dev 0.55488 24.9 997 23159
|
29 |
+
ukr-slv flores101-dev 0.53045 24.5 997 22448
|
30 |
+
ukr-srp_Cyrl flores101-dev 0.54306 25.8 997 22384
|
31 |
+
rus-slv tatoeba-test-v2020-07-28 0.50977 34.4 378 2135
|
32 |
+
ukr-hbs tatoeba-test-v2020-07-28 0.69419 52.1 941 5128
|
33 |
+
ukr-slv tatoeba-test-v2020-07-28 0.29535 14.7 848 3823
|
34 |
+
ukr-srp_Cyrl tatoeba-test-v2020-07-28 0.70152 54.7 204 1110
|
35 |
+
rus-slv tatoeba-test-v2021-03-30 0.50384 33.9 447 2547
|
36 |
+
ukr-bul tatoeba-test-v2021-03-30 0.77339 61.4 1022 5192
|
37 |
+
ukr-hbs tatoeba-test-v2021-03-30 0.69451 52.1 953 5194
|
38 |
+
ukr-hrv tatoeba-test-v2021-03-30 0.67148 49.9 393 2330
|
39 |
+
ukr-slv tatoeba-test-v2021-03-30 0.29159 14.2 916 4141
|
40 |
+
ukr-srp_Cyrl tatoeba-test-v2021-03-30 0.70373 54.9 209 1131
|
41 |
+
ukr-srp_Latn tatoeba-test-v2021-03-30 0.72447 53.5 351 1733
|
42 |
+
rus-bul tatoeba-test-v2021-08-07 0.71515 53.7 1247 8272
|
43 |
+
rus-hbs tatoeba-test-v2021-08-07 0.69192 49.4 2500 14736
|
44 |
+
rus-slv tatoeba-test-v2021-08-07 0.38051 21.5 657 3969
|
45 |
+
rus-srp_Cyrl tatoeba-test-v2021-08-07 0.66622 46.1 881 5407
|
46 |
+
rus-srp_Latn tatoeba-test-v2021-08-07 0.70990 51.7 1483 8552
|
47 |
+
ukr-bul tatoeba-test-v2021-08-07 0.77283 61.3 1020 5181
|
48 |
+
ukr-hbs tatoeba-test-v2021-08-07 0.69401 52.1 942 5130
|
49 |
+
ukr-hrv tatoeba-test-v2021-08-07 0.67202 50.1 389 2302
|
50 |
+
ukr-slv tatoeba-test-v2021-08-07 0.28906 14.6 915 4267
|
51 |
+
ukr-srp_Cyrl tatoeba-test-v2021-08-07 0.70064 54.7 205 1112
|
52 |
+
ukr-srp_Latn tatoeba-test-v2021-08-07 0.72405 53.4 348 1716
|
benchmark_translations.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3a45800559823cfaa0dd41a5deb8dd055b6144f5688b38f81de51b66c5557d22
|
3 |
+
size 6486635
|
config.json
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"activation_dropout": 0.0,
|
3 |
+
"activation_function": "relu",
|
4 |
+
"architectures": [
|
5 |
+
"MarianMTModel"
|
6 |
+
],
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bad_words_ids": [
|
9 |
+
[
|
10 |
+
57829
|
11 |
+
]
|
12 |
+
],
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_dropout": 0.0,
|
15 |
+
"d_model": 1024,
|
16 |
+
"decoder_attention_heads": 16,
|
17 |
+
"decoder_ffn_dim": 4096,
|
18 |
+
"decoder_layerdrop": 0.0,
|
19 |
+
"decoder_layers": 6,
|
20 |
+
"decoder_start_token_id": 57829,
|
21 |
+
"decoder_vocab_size": 57830,
|
22 |
+
"dropout": 0.1,
|
23 |
+
"encoder_attention_heads": 16,
|
24 |
+
"encoder_ffn_dim": 4096,
|
25 |
+
"encoder_layerdrop": 0.0,
|
26 |
+
"encoder_layers": 6,
|
27 |
+
"eos_token_id": 11317,
|
28 |
+
"forced_eos_token_id": 11317,
|
29 |
+
"init_std": 0.02,
|
30 |
+
"is_encoder_decoder": true,
|
31 |
+
"max_length": 512,
|
32 |
+
"max_position_embeddings": 1024,
|
33 |
+
"model_type": "marian",
|
34 |
+
"normalize_embedding": false,
|
35 |
+
"num_beams": 4,
|
36 |
+
"num_hidden_layers": 6,
|
37 |
+
"pad_token_id": 57829,
|
38 |
+
"scale_embedding": true,
|
39 |
+
"share_encoder_decoder_embeddings": true,
|
40 |
+
"static_position_embeddings": true,
|
41 |
+
"torch_dtype": "float16",
|
42 |
+
"transformers_version": "4.18.0.dev0",
|
43 |
+
"use_cache": true,
|
44 |
+
"vocab_size": 57830
|
45 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3474e85e43ceb3c5f52d2aceeed6979bfd38340200073e31c724a713730ab45e
|
3 |
+
size 589793795
|
source.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65b06b918b96b013d576aec6060f5ff70b65c38b986da107c06f3006638e07fc
|
3 |
+
size 1008545
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
target.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ae10d5dc2d9be8bf00def389730a37432fbc27c34b5404ee5fd547623778ef8e
|
3 |
+
size 902152
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"source_lang": "zle", "target_lang": "zls", "unk_token": "<unk>", "eos_token": "</s>", "pad_token": "<pad>", "model_max_length": 512, "sp_model_kwargs": {}, "separate_vocabs": false, "special_tokens_map_file": null, "name_or_path": "marian-models/opusTCv20210807+bt_transformer-big_2022-03-23/zle-zls", "tokenizer_class": "MarianTokenizer"}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|