tiedeman commited on
Commit
4f62b61
1 Parent(s): eac2dd1

update in model card

Browse files
Files changed (1) hide show
  1. README.md +122 -122
README.md CHANGED
@@ -26,8 +26,8 @@ model-index:
26
  args: afr-deu
27
  dataset:
28
  name: flores101-devtest
29
- type: flores101
30
- args: afr-deu
31
  metrics:
32
  - name: BLEU
33
  type: bleu
@@ -38,8 +38,8 @@ model-index:
38
  args: afr-eng
39
  dataset:
40
  name: flores101-devtest
41
- type: flores101
42
- args: afr-eng
43
  metrics:
44
  - name: BLEU
45
  type: bleu
@@ -50,8 +50,8 @@ model-index:
50
  args: deu-afr
51
  dataset:
52
  name: flores101-devtest
53
- type: flores101
54
- args: deu-afr
55
  metrics:
56
  - name: BLEU
57
  type: bleu
@@ -62,8 +62,8 @@ model-index:
62
  args: deu-eng
63
  dataset:
64
  name: flores101-devtest
65
- type: flores101
66
- args: deu-eng
67
  metrics:
68
  - name: BLEU
69
  type: bleu
@@ -74,8 +74,8 @@ model-index:
74
  args: eng-afr
75
  dataset:
76
  name: flores101-devtest
77
- type: flores101
78
- args: eng-afr
79
  metrics:
80
  - name: BLEU
81
  type: bleu
@@ -86,8 +86,8 @@ model-index:
86
  args: eng-deu
87
  dataset:
88
  name: flores101-devtest
89
- type: flores101
90
- args: eng-deu
91
  metrics:
92
  - name: BLEU
93
  type: bleu
@@ -98,8 +98,8 @@ model-index:
98
  args: eng-nld
99
  dataset:
100
  name: flores101-devtest
101
- type: flores101
102
- args: eng-nld
103
  metrics:
104
  - name: BLEU
105
  type: bleu
@@ -110,8 +110,8 @@ model-index:
110
  args: nld-eng
111
  dataset:
112
  name: flores101-devtest
113
- type: flores101
114
- args: nld-eng
115
  metrics:
116
  - name: BLEU
117
  type: bleu
@@ -123,7 +123,7 @@ model-index:
123
  dataset:
124
  name: multi30k_test_2016_flickr
125
  type: multi30k-2016_flickr
126
- args: deu-eng
127
  metrics:
128
  - name: BLEU
129
  type: bleu
@@ -135,7 +135,7 @@ model-index:
135
  dataset:
136
  name: multi30k_test_2016_flickr
137
  type: multi30k-2016_flickr
138
- args: eng-deu
139
  metrics:
140
  - name: BLEU
141
  type: bleu
@@ -147,7 +147,7 @@ model-index:
147
  dataset:
148
  name: multi30k_test_2017_flickr
149
  type: multi30k-2017_flickr
150
- args: deu-eng
151
  metrics:
152
  - name: BLEU
153
  type: bleu
@@ -159,7 +159,7 @@ model-index:
159
  dataset:
160
  name: multi30k_test_2017_flickr
161
  type: multi30k-2017_flickr
162
- args: eng-deu
163
  metrics:
164
  - name: BLEU
165
  type: bleu
@@ -171,7 +171,7 @@ model-index:
171
  dataset:
172
  name: multi30k_test_2017_mscoco
173
  type: multi30k-2017_mscoco
174
- args: deu-eng
175
  metrics:
176
  - name: BLEU
177
  type: bleu
@@ -183,7 +183,7 @@ model-index:
183
  dataset:
184
  name: multi30k_test_2017_mscoco
185
  type: multi30k-2017_mscoco
186
- args: eng-deu
187
  metrics:
188
  - name: BLEU
189
  type: bleu
@@ -195,7 +195,7 @@ model-index:
195
  dataset:
196
  name: multi30k_test_2018_flickr
197
  type: multi30k-2018_flickr
198
- args: deu-eng
199
  metrics:
200
  - name: BLEU
201
  type: bleu
@@ -207,7 +207,7 @@ model-index:
207
  dataset:
208
  name: multi30k_test_2018_flickr
209
  type: multi30k-2018_flickr
210
- args: eng-deu
211
  metrics:
212
  - name: BLEU
213
  type: bleu
@@ -219,7 +219,7 @@ model-index:
219
  dataset:
220
  name: news-test2008
221
  type: news-test2008
222
- args: deu-eng
223
  metrics:
224
  - name: BLEU
225
  type: bleu
@@ -229,9 +229,9 @@ model-index:
229
  type: translation
230
  args: afr-deu
231
  dataset:
232
- name: tatoeba
233
- type: tatoeba
234
- args: afr-deu v2021-08-07
235
  metrics:
236
  - name: BLEU
237
  type: bleu
@@ -241,9 +241,9 @@ model-index:
241
  type: translation
242
  args: afr-eng
243
  dataset:
244
- name: tatoeba
245
- type: tatoeba
246
- args: afr-eng v2021-08-07
247
  metrics:
248
  - name: BLEU
249
  type: bleu
@@ -253,9 +253,9 @@ model-index:
253
  type: translation
254
  args: afr-nld
255
  dataset:
256
- name: tatoeba
257
- type: tatoeba
258
- args: afr-nld v2021-08-07
259
  metrics:
260
  - name: BLEU
261
  type: bleu
@@ -265,9 +265,9 @@ model-index:
265
  type: translation
266
  args: deu-afr
267
  dataset:
268
- name: tatoeba
269
- type: tatoeba
270
- args: deu-afr v2021-08-07
271
  metrics:
272
  - name: BLEU
273
  type: bleu
@@ -277,9 +277,9 @@ model-index:
277
  type: translation
278
  args: deu-eng
279
  dataset:
280
- name: tatoeba
281
- type: tatoeba
282
- args: deu-eng v2021-08-07
283
  metrics:
284
  - name: BLEU
285
  type: bleu
@@ -289,9 +289,9 @@ model-index:
289
  type: translation
290
  args: deu-nld
291
  dataset:
292
- name: tatoeba
293
- type: tatoeba
294
- args: deu-nld v2021-08-07
295
  metrics:
296
  - name: BLEU
297
  type: bleu
@@ -301,9 +301,9 @@ model-index:
301
  type: translation
302
  args: eng-afr
303
  dataset:
304
- name: tatoeba
305
- type: tatoeba
306
- args: eng-afr v2021-08-07
307
  metrics:
308
  - name: BLEU
309
  type: bleu
@@ -313,9 +313,9 @@ model-index:
313
  type: translation
314
  args: eng-deu
315
  dataset:
316
- name: tatoeba
317
- type: tatoeba
318
- args: eng-deu v2021-08-07
319
  metrics:
320
  - name: BLEU
321
  type: bleu
@@ -325,9 +325,9 @@ model-index:
325
  type: translation
326
  args: eng-nld
327
  dataset:
328
- name: tatoeba
329
- type: tatoeba
330
- args: eng-nld v2021-08-07
331
  metrics:
332
  - name: BLEU
333
  type: bleu
@@ -337,9 +337,9 @@ model-index:
337
  type: translation
338
  args: fry-eng
339
  dataset:
340
- name: tatoeba
341
- type: tatoeba
342
- args: fry-eng v2021-08-07
343
  metrics:
344
  - name: BLEU
345
  type: bleu
@@ -349,9 +349,9 @@ model-index:
349
  type: translation
350
  args: fry-nld
351
  dataset:
352
- name: tatoeba
353
- type: tatoeba
354
- args: fry-nld v2021-08-07
355
  metrics:
356
  - name: BLEU
357
  type: bleu
@@ -361,9 +361,9 @@ model-index:
361
  type: translation
362
  args: hrx-deu
363
  dataset:
364
- name: tatoeba
365
- type: tatoeba
366
- args: hrx-deu v2021-08-07
367
  metrics:
368
  - name: BLEU
369
  type: bleu
@@ -373,9 +373,9 @@ model-index:
373
  type: translation
374
  args: hrx-eng
375
  dataset:
376
- name: tatoeba
377
- type: tatoeba
378
- args: hrx-eng v2021-08-07
379
  metrics:
380
  - name: BLEU
381
  type: bleu
@@ -385,9 +385,9 @@ model-index:
385
  type: translation
386
  args: ltz-deu
387
  dataset:
388
- name: tatoeba
389
- type: tatoeba
390
- args: ltz-deu v2021-08-07
391
  metrics:
392
  - name: BLEU
393
  type: bleu
@@ -397,9 +397,9 @@ model-index:
397
  type: translation
398
  args: ltz-eng
399
  dataset:
400
- name: tatoeba
401
- type: tatoeba
402
- args: ltz-eng v2021-08-07
403
  metrics:
404
  - name: BLEU
405
  type: bleu
@@ -409,9 +409,9 @@ model-index:
409
  type: translation
410
  args: ltz-nld
411
  dataset:
412
- name: tatoeba
413
- type: tatoeba
414
- args: ltz-nld v2021-08-07
415
  metrics:
416
  - name: BLEU
417
  type: bleu
@@ -421,9 +421,9 @@ model-index:
421
  type: translation
422
  args: nds-deu
423
  dataset:
424
- name: tatoeba
425
- type: tatoeba
426
- args: nds-deu v2021-08-07
427
  metrics:
428
  - name: BLEU
429
  type: bleu
@@ -433,9 +433,9 @@ model-index:
433
  type: translation
434
  args: nds-eng
435
  dataset:
436
- name: tatoeba
437
- type: tatoeba
438
- args: nds-eng v2021-08-07
439
  metrics:
440
  - name: BLEU
441
  type: bleu
@@ -445,9 +445,9 @@ model-index:
445
  type: translation
446
  args: nds-nld
447
  dataset:
448
- name: tatoeba
449
- type: tatoeba
450
- args: nds-nld v2021-08-07
451
  metrics:
452
  - name: BLEU
453
  type: bleu
@@ -457,9 +457,9 @@ model-index:
457
  type: translation
458
  args: nld-afr
459
  dataset:
460
- name: tatoeba
461
- type: tatoeba
462
- args: nld-afr v2021-08-07
463
  metrics:
464
  - name: BLEU
465
  type: bleu
@@ -469,9 +469,9 @@ model-index:
469
  type: translation
470
  args: nld-deu
471
  dataset:
472
- name: tatoeba
473
- type: tatoeba
474
- args: nld-deu v2021-08-07
475
  metrics:
476
  - name: BLEU
477
  type: bleu
@@ -481,9 +481,9 @@ model-index:
481
  type: translation
482
  args: nld-eng
483
  dataset:
484
- name: tatoeba
485
- type: tatoeba
486
- args: nld-eng v2021-08-07
487
  metrics:
488
  - name: BLEU
489
  type: bleu
@@ -493,9 +493,9 @@ model-index:
493
  type: translation
494
  args: nld-fry
495
  dataset:
496
- name: tatoeba
497
- type: tatoeba
498
- args: nld-fry v2021-08-07
499
  metrics:
500
  - name: BLEU
501
  type: bleu
@@ -505,9 +505,9 @@ model-index:
505
  type: translation
506
  args: nld-nds
507
  dataset:
508
- name: tatoeba
509
- type: tatoeba
510
- args: nld-nds v2021-08-07
511
  metrics:
512
  - name: BLEU
513
  type: bleu
@@ -519,7 +519,7 @@ model-index:
519
  dataset:
520
  name: newstest2009
521
  type: wmt-2009-news
522
- args: deu-eng
523
  metrics:
524
  - name: BLEU
525
  type: bleu
@@ -531,7 +531,7 @@ model-index:
531
  dataset:
532
  name: newstest2010
533
  type: wmt-2010-news
534
- args: deu-eng
535
  metrics:
536
  - name: BLEU
537
  type: bleu
@@ -543,7 +543,7 @@ model-index:
543
  dataset:
544
  name: newstest2010
545
  type: wmt-2010-news
546
- args: eng-deu
547
  metrics:
548
  - name: BLEU
549
  type: bleu
@@ -555,7 +555,7 @@ model-index:
555
  dataset:
556
  name: newstest2011
557
  type: wmt-2011-news
558
- args: deu-eng
559
  metrics:
560
  - name: BLEU
561
  type: bleu
@@ -567,7 +567,7 @@ model-index:
567
  dataset:
568
  name: newstest2012
569
  type: wmt-2012-news
570
- args: deu-eng
571
  metrics:
572
  - name: BLEU
573
  type: bleu
@@ -579,7 +579,7 @@ model-index:
579
  dataset:
580
  name: newstest2013
581
  type: wmt-2013-news
582
- args: deu-eng
583
  metrics:
584
  - name: BLEU
585
  type: bleu
@@ -591,7 +591,7 @@ model-index:
591
  dataset:
592
  name: newstest2013
593
  type: wmt-2013-news
594
- args: eng-deu
595
  metrics:
596
  - name: BLEU
597
  type: bleu
@@ -603,7 +603,7 @@ model-index:
603
  dataset:
604
  name: newstest2014-deen
605
  type: wmt-2014-news
606
- args: deu-eng
607
  metrics:
608
  - name: BLEU
609
  type: bleu
@@ -615,7 +615,7 @@ model-index:
615
  dataset:
616
  name: newstest2014-deen
617
  type: wmt-2014-news
618
- args: eng-deu
619
  metrics:
620
  - name: BLEU
621
  type: bleu
@@ -627,7 +627,7 @@ model-index:
627
  dataset:
628
  name: newstest2015-deen
629
  type: wmt-2015-news
630
- args: deu-eng
631
  metrics:
632
  - name: BLEU
633
  type: bleu
@@ -639,7 +639,7 @@ model-index:
639
  dataset:
640
  name: newstest2015-ende
641
  type: wmt-2015-news
642
- args: eng-deu
643
  metrics:
644
  - name: BLEU
645
  type: bleu
@@ -651,7 +651,7 @@ model-index:
651
  dataset:
652
  name: newstest2016-deen
653
  type: wmt-2016-news
654
- args: deu-eng
655
  metrics:
656
  - name: BLEU
657
  type: bleu
@@ -663,7 +663,7 @@ model-index:
663
  dataset:
664
  name: newstest2016-ende
665
  type: wmt-2016-news
666
- args: eng-deu
667
  metrics:
668
  - name: BLEU
669
  type: bleu
@@ -675,7 +675,7 @@ model-index:
675
  dataset:
676
  name: newstest2017-deen
677
  type: wmt-2017-news
678
- args: deu-eng
679
  metrics:
680
  - name: BLEU
681
  type: bleu
@@ -687,7 +687,7 @@ model-index:
687
  dataset:
688
  name: newstest2017-ende
689
  type: wmt-2017-news
690
- args: eng-deu
691
  metrics:
692
  - name: BLEU
693
  type: bleu
@@ -699,7 +699,7 @@ model-index:
699
  dataset:
700
  name: newstest2018-deen
701
  type: wmt-2018-news
702
- args: deu-eng
703
  metrics:
704
  - name: BLEU
705
  type: bleu
@@ -711,7 +711,7 @@ model-index:
711
  dataset:
712
  name: newstest2018-ende
713
  type: wmt-2018-news
714
- args: eng-deu
715
  metrics:
716
  - name: BLEU
717
  type: bleu
@@ -723,7 +723,7 @@ model-index:
723
  dataset:
724
  name: newstest2019-deen
725
  type: wmt-2019-news
726
- args: deu-eng
727
  metrics:
728
  - name: BLEU
729
  type: bleu
@@ -735,7 +735,7 @@ model-index:
735
  dataset:
736
  name: newstest2019-ende
737
  type: wmt-2019-news
738
- args: eng-deu
739
  metrics:
740
  - name: BLEU
741
  type: bleu
@@ -747,7 +747,7 @@ model-index:
747
  dataset:
748
  name: newstest2020-deen
749
  type: wmt-2020-news
750
- args: deu-eng
751
  metrics:
752
  - name: BLEU
753
  type: bleu
@@ -759,7 +759,7 @@ model-index:
759
  dataset:
760
  name: newstest2020-ende
761
  type: wmt-2020-news
762
- args: eng-deu
763
  metrics:
764
  - name: BLEU
765
  type: bleu
@@ -826,7 +826,7 @@ src_text = [
826
  ">>afr<< I love your son."
827
  ]
828
 
829
- model_name = "pytorch-models/opus-mt-tc-base-en-fi"
830
  tokenizer = MarianTokenizer.from_pretrained(model_name)
831
  model = MarianMTModel.from_pretrained(model_name)
832
  translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
@@ -853,7 +853,7 @@ print(pipe(>>nld<< You need help.))
853
 
854
  * test set translations: [opus-2021-02-23.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmw-gmw/opus-2021-02-23.test.txt)
855
  * test set scores: [opus-2021-02-23.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmw-gmw/opus-2021-02-23.eval.txt)
856
- * benchmark results: [benchmarks.tsv](benchmarks.tsv)
857
  * benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
858
 
859
  | langpair | testset | chr-F | BLEU | #sent | #words |
@@ -947,6 +947,6 @@ The work is supported by the [European Language Grid](https://www.european-langu
947
  ## Model conversion info
948
 
949
  * transformers version: 4.12.3
950
- * OPUS-MT git hash: 64dc362
951
- * port time: Fri Feb 11 00:49:23 EET 2022
952
  * port machine: LM0-400-22516.local
 
26
  args: afr-deu
27
  dataset:
28
  name: flores101-devtest
29
+ type: flores_101
30
+ args: afr deu devtest
31
  metrics:
32
  - name: BLEU
33
  type: bleu
 
38
  args: afr-eng
39
  dataset:
40
  name: flores101-devtest
41
+ type: flores_101
42
+ args: afr eng devtest
43
  metrics:
44
  - name: BLEU
45
  type: bleu
 
50
  args: deu-afr
51
  dataset:
52
  name: flores101-devtest
53
+ type: flores_101
54
+ args: deu afr devtest
55
  metrics:
56
  - name: BLEU
57
  type: bleu
 
62
  args: deu-eng
63
  dataset:
64
  name: flores101-devtest
65
+ type: flores_101
66
+ args: deu eng devtest
67
  metrics:
68
  - name: BLEU
69
  type: bleu
 
74
  args: eng-afr
75
  dataset:
76
  name: flores101-devtest
77
+ type: flores_101
78
+ args: eng afr devtest
79
  metrics:
80
  - name: BLEU
81
  type: bleu
 
86
  args: eng-deu
87
  dataset:
88
  name: flores101-devtest
89
+ type: flores_101
90
+ args: eng deu devtest
91
  metrics:
92
  - name: BLEU
93
  type: bleu
 
98
  args: eng-nld
99
  dataset:
100
  name: flores101-devtest
101
+ type: flores_101
102
+ args: eng nld devtest
103
  metrics:
104
  - name: BLEU
105
  type: bleu
 
110
  args: nld-eng
111
  dataset:
112
  name: flores101-devtest
113
+ type: flores_101
114
+ args: nld eng devtest
115
  metrics:
116
  - name: BLEU
117
  type: bleu
 
123
  dataset:
124
  name: multi30k_test_2016_flickr
125
  type: multi30k-2016_flickr
126
+ args: deu-eng
127
  metrics:
128
  - name: BLEU
129
  type: bleu
 
135
  dataset:
136
  name: multi30k_test_2016_flickr
137
  type: multi30k-2016_flickr
138
+ args: eng-deu
139
  metrics:
140
  - name: BLEU
141
  type: bleu
 
147
  dataset:
148
  name: multi30k_test_2017_flickr
149
  type: multi30k-2017_flickr
150
+ args: deu-eng
151
  metrics:
152
  - name: BLEU
153
  type: bleu
 
159
  dataset:
160
  name: multi30k_test_2017_flickr
161
  type: multi30k-2017_flickr
162
+ args: eng-deu
163
  metrics:
164
  - name: BLEU
165
  type: bleu
 
171
  dataset:
172
  name: multi30k_test_2017_mscoco
173
  type: multi30k-2017_mscoco
174
+ args: deu-eng
175
  metrics:
176
  - name: BLEU
177
  type: bleu
 
183
  dataset:
184
  name: multi30k_test_2017_mscoco
185
  type: multi30k-2017_mscoco
186
+ args: eng-deu
187
  metrics:
188
  - name: BLEU
189
  type: bleu
 
195
  dataset:
196
  name: multi30k_test_2018_flickr
197
  type: multi30k-2018_flickr
198
+ args: deu-eng
199
  metrics:
200
  - name: BLEU
201
  type: bleu
 
207
  dataset:
208
  name: multi30k_test_2018_flickr
209
  type: multi30k-2018_flickr
210
+ args: eng-deu
211
  metrics:
212
  - name: BLEU
213
  type: bleu
 
219
  dataset:
220
  name: news-test2008
221
  type: news-test2008
222
+ args: deu-eng
223
  metrics:
224
  - name: BLEU
225
  type: bleu
 
229
  type: translation
230
  args: afr-deu
231
  dataset:
232
+ name: tatoeba-test-v2021-08-07
233
+ type: tatoeba_mt
234
+ args: afr-deu
235
  metrics:
236
  - name: BLEU
237
  type: bleu
 
241
  type: translation
242
  args: afr-eng
243
  dataset:
244
+ name: tatoeba-test-v2021-08-07
245
+ type: tatoeba_mt
246
+ args: afr-eng
247
  metrics:
248
  - name: BLEU
249
  type: bleu
 
253
  type: translation
254
  args: afr-nld
255
  dataset:
256
+ name: tatoeba-test-v2021-08-07
257
+ type: tatoeba_mt
258
+ args: afr-nld
259
  metrics:
260
  - name: BLEU
261
  type: bleu
 
265
  type: translation
266
  args: deu-afr
267
  dataset:
268
+ name: tatoeba-test-v2021-08-07
269
+ type: tatoeba_mt
270
+ args: deu-afr
271
  metrics:
272
  - name: BLEU
273
  type: bleu
 
277
  type: translation
278
  args: deu-eng
279
  dataset:
280
+ name: tatoeba-test-v2021-08-07
281
+ type: tatoeba_mt
282
+ args: deu-eng
283
  metrics:
284
  - name: BLEU
285
  type: bleu
 
289
  type: translation
290
  args: deu-nld
291
  dataset:
292
+ name: tatoeba-test-v2021-08-07
293
+ type: tatoeba_mt
294
+ args: deu-nld
295
  metrics:
296
  - name: BLEU
297
  type: bleu
 
301
  type: translation
302
  args: eng-afr
303
  dataset:
304
+ name: tatoeba-test-v2021-08-07
305
+ type: tatoeba_mt
306
+ args: eng-afr
307
  metrics:
308
  - name: BLEU
309
  type: bleu
 
313
  type: translation
314
  args: eng-deu
315
  dataset:
316
+ name: tatoeba-test-v2021-08-07
317
+ type: tatoeba_mt
318
+ args: eng-deu
319
  metrics:
320
  - name: BLEU
321
  type: bleu
 
325
  type: translation
326
  args: eng-nld
327
  dataset:
328
+ name: tatoeba-test-v2021-08-07
329
+ type: tatoeba_mt
330
+ args: eng-nld
331
  metrics:
332
  - name: BLEU
333
  type: bleu
 
337
  type: translation
338
  args: fry-eng
339
  dataset:
340
+ name: tatoeba-test-v2021-08-07
341
+ type: tatoeba_mt
342
+ args: fry-eng
343
  metrics:
344
  - name: BLEU
345
  type: bleu
 
349
  type: translation
350
  args: fry-nld
351
  dataset:
352
+ name: tatoeba-test-v2021-08-07
353
+ type: tatoeba_mt
354
+ args: fry-nld
355
  metrics:
356
  - name: BLEU
357
  type: bleu
 
361
  type: translation
362
  args: hrx-deu
363
  dataset:
364
+ name: tatoeba-test-v2021-08-07
365
+ type: tatoeba_mt
366
+ args: hrx-deu
367
  metrics:
368
  - name: BLEU
369
  type: bleu
 
373
  type: translation
374
  args: hrx-eng
375
  dataset:
376
+ name: tatoeba-test-v2021-08-07
377
+ type: tatoeba_mt
378
+ args: hrx-eng
379
  metrics:
380
  - name: BLEU
381
  type: bleu
 
385
  type: translation
386
  args: ltz-deu
387
  dataset:
388
+ name: tatoeba-test-v2021-08-07
389
+ type: tatoeba_mt
390
+ args: ltz-deu
391
  metrics:
392
  - name: BLEU
393
  type: bleu
 
397
  type: translation
398
  args: ltz-eng
399
  dataset:
400
+ name: tatoeba-test-v2021-08-07
401
+ type: tatoeba_mt
402
+ args: ltz-eng
403
  metrics:
404
  - name: BLEU
405
  type: bleu
 
409
  type: translation
410
  args: ltz-nld
411
  dataset:
412
+ name: tatoeba-test-v2021-08-07
413
+ type: tatoeba_mt
414
+ args: ltz-nld
415
  metrics:
416
  - name: BLEU
417
  type: bleu
 
421
  type: translation
422
  args: nds-deu
423
  dataset:
424
+ name: tatoeba-test-v2021-08-07
425
+ type: tatoeba_mt
426
+ args: nds-deu
427
  metrics:
428
  - name: BLEU
429
  type: bleu
 
433
  type: translation
434
  args: nds-eng
435
  dataset:
436
+ name: tatoeba-test-v2021-08-07
437
+ type: tatoeba_mt
438
+ args: nds-eng
439
  metrics:
440
  - name: BLEU
441
  type: bleu
 
445
  type: translation
446
  args: nds-nld
447
  dataset:
448
+ name: tatoeba-test-v2021-08-07
449
+ type: tatoeba_mt
450
+ args: nds-nld
451
  metrics:
452
  - name: BLEU
453
  type: bleu
 
457
  type: translation
458
  args: nld-afr
459
  dataset:
460
+ name: tatoeba-test-v2021-08-07
461
+ type: tatoeba_mt
462
+ args: nld-afr
463
  metrics:
464
  - name: BLEU
465
  type: bleu
 
469
  type: translation
470
  args: nld-deu
471
  dataset:
472
+ name: tatoeba-test-v2021-08-07
473
+ type: tatoeba_mt
474
+ args: nld-deu
475
  metrics:
476
  - name: BLEU
477
  type: bleu
 
481
  type: translation
482
  args: nld-eng
483
  dataset:
484
+ name: tatoeba-test-v2021-08-07
485
+ type: tatoeba_mt
486
+ args: nld-eng
487
  metrics:
488
  - name: BLEU
489
  type: bleu
 
493
  type: translation
494
  args: nld-fry
495
  dataset:
496
+ name: tatoeba-test-v2021-08-07
497
+ type: tatoeba_mt
498
+ args: nld-fry
499
  metrics:
500
  - name: BLEU
501
  type: bleu
 
505
  type: translation
506
  args: nld-nds
507
  dataset:
508
+ name: tatoeba-test-v2021-08-07
509
+ type: tatoeba_mt
510
+ args: nld-nds
511
  metrics:
512
  - name: BLEU
513
  type: bleu
 
519
  dataset:
520
  name: newstest2009
521
  type: wmt-2009-news
522
+ args: deu-eng
523
  metrics:
524
  - name: BLEU
525
  type: bleu
 
531
  dataset:
532
  name: newstest2010
533
  type: wmt-2010-news
534
+ args: deu-eng
535
  metrics:
536
  - name: BLEU
537
  type: bleu
 
543
  dataset:
544
  name: newstest2010
545
  type: wmt-2010-news
546
+ args: eng-deu
547
  metrics:
548
  - name: BLEU
549
  type: bleu
 
555
  dataset:
556
  name: newstest2011
557
  type: wmt-2011-news
558
+ args: deu-eng
559
  metrics:
560
  - name: BLEU
561
  type: bleu
 
567
  dataset:
568
  name: newstest2012
569
  type: wmt-2012-news
570
+ args: deu-eng
571
  metrics:
572
  - name: BLEU
573
  type: bleu
 
579
  dataset:
580
  name: newstest2013
581
  type: wmt-2013-news
582
+ args: deu-eng
583
  metrics:
584
  - name: BLEU
585
  type: bleu
 
591
  dataset:
592
  name: newstest2013
593
  type: wmt-2013-news
594
+ args: eng-deu
595
  metrics:
596
  - name: BLEU
597
  type: bleu
 
603
  dataset:
604
  name: newstest2014-deen
605
  type: wmt-2014-news
606
+ args: deu-eng
607
  metrics:
608
  - name: BLEU
609
  type: bleu
 
615
  dataset:
616
  name: newstest2014-deen
617
  type: wmt-2014-news
618
+ args: eng-deu
619
  metrics:
620
  - name: BLEU
621
  type: bleu
 
627
  dataset:
628
  name: newstest2015-deen
629
  type: wmt-2015-news
630
+ args: deu-eng
631
  metrics:
632
  - name: BLEU
633
  type: bleu
 
639
  dataset:
640
  name: newstest2015-ende
641
  type: wmt-2015-news
642
+ args: eng-deu
643
  metrics:
644
  - name: BLEU
645
  type: bleu
 
651
  dataset:
652
  name: newstest2016-deen
653
  type: wmt-2016-news
654
+ args: deu-eng
655
  metrics:
656
  - name: BLEU
657
  type: bleu
 
663
  dataset:
664
  name: newstest2016-ende
665
  type: wmt-2016-news
666
+ args: eng-deu
667
  metrics:
668
  - name: BLEU
669
  type: bleu
 
675
  dataset:
676
  name: newstest2017-deen
677
  type: wmt-2017-news
678
+ args: deu-eng
679
  metrics:
680
  - name: BLEU
681
  type: bleu
 
687
  dataset:
688
  name: newstest2017-ende
689
  type: wmt-2017-news
690
+ args: eng-deu
691
  metrics:
692
  - name: BLEU
693
  type: bleu
 
699
  dataset:
700
  name: newstest2018-deen
701
  type: wmt-2018-news
702
+ args: deu-eng
703
  metrics:
704
  - name: BLEU
705
  type: bleu
 
711
  dataset:
712
  name: newstest2018-ende
713
  type: wmt-2018-news
714
+ args: eng-deu
715
  metrics:
716
  - name: BLEU
717
  type: bleu
 
723
  dataset:
724
  name: newstest2019-deen
725
  type: wmt-2019-news
726
+ args: deu-eng
727
  metrics:
728
  - name: BLEU
729
  type: bleu
 
735
  dataset:
736
  name: newstest2019-ende
737
  type: wmt-2019-news
738
+ args: eng-deu
739
  metrics:
740
  - name: BLEU
741
  type: bleu
 
747
  dataset:
748
  name: newstest2020-deen
749
  type: wmt-2020-news
750
+ args: deu-eng
751
  metrics:
752
  - name: BLEU
753
  type: bleu
 
759
  dataset:
760
  name: newstest2020-ende
761
  type: wmt-2020-news
762
+ args: eng-deu
763
  metrics:
764
  - name: BLEU
765
  type: bleu
 
826
  ">>afr<< I love your son."
827
  ]
828
 
829
+ model_name = "pytorch-models/opus-mt-tc-base-gmw-gmw"
830
  tokenizer = MarianTokenizer.from_pretrained(model_name)
831
  model = MarianMTModel.from_pretrained(model_name)
832
  translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
 
853
 
854
  * test set translations: [opus-2021-02-23.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmw-gmw/opus-2021-02-23.test.txt)
855
  * test set scores: [opus-2021-02-23.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmw-gmw/opus-2021-02-23.eval.txt)
856
+ * benchmark results: [benchmark_results.txt](benchmark_results.txt)
857
  * benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
858
 
859
  | langpair | testset | chr-F | BLEU | #sent | #words |
 
947
  ## Model conversion info
948
 
949
  * transformers version: 4.12.3
950
+ * OPUS-MT git hash: e56a06b
951
+ * port time: Sun Feb 13 14:42:10 EET 2022
952
  * port machine: LM0-400-22516.local