Gabriele Sarti commited on
Commit
6c5075e
1 Parent(s): 2ff9a04

Initial commit

Browse files
README.md CHANGED
@@ -1,3 +1,59 @@
1
  ---
2
- license: cc-by-nc-sa-4.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ language:
3
+ - en
4
+ - de
5
+ tags:
6
+ - translation
7
+ - opus-mt-tc
8
+ license: cc-by-4.0
9
+ model-index:
10
+ - name: opus-mt-tc-base-en-de
11
+ results:
12
+ - task:
13
+ name: Translation eng-deu
14
+ type: translation
15
+ args: eng-deu
16
+ dataset:
17
+ name: tatoeba-test-v2021-08-07
18
+ type: tatoeba_mt
19
+ args: eng-deu
20
+ metrics:
21
+ - name: BLEU
22
+ type: bleu
23
+ value: 43.7
24
  ---
25
+
26
+ # Opus Tatoeba English-German
27
+
28
+ *This model was obtained by running the script [convert_marian_to_pytorch.py](https://github.com/huggingface/transformers/blob/master/src/transformers/models/marian/convert_marian_to_pytorch.py). The original models were trained by [Jörg Tiedemann](https://blogs.helsinki.fi/tiedeman/) using the [MarianNMT](https://marian-nmt.github.io/) library. See all available `MarianMTModel` models on the profile of the [Helsinki NLP](https://huggingface.co/Helsinki-NLP) group.*
29
+
30
+ * dataset: opusTCv20210807+bt
31
+ * model: transformer-big
32
+ * source language(s): eng
33
+ * target language(s): deu
34
+ * raw source language(s): eng
35
+ * raw target language(s): deu
36
+ * model: transformer-big
37
+ * pre-processing: normalization + SentencePiece (spm32k,spm32k)
38
+ * download: [opusTCv20210807+bt-2021-12-08.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-deu/opusTCv20210807+bt-2021-12-08.zip)
39
+ * test set translations: [opusTCv20210807+bt-2021-12-08.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-deu/opusTCv20210807+bt-2021-12-08.test.txt)
40
+ * test set scores: [opusTCv20210807+bt-2021-12-08.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-deu/opusTCv20210807+bt-2021-12-08.eval.txt)
41
+
42
+ ## Benchmarks
43
+
44
+ | testset | BLEU | chr-F | #sent | #words | BP |
45
+ |---------|-------|-------|-------|--------|----|
46
+ | newssyscomb2009.eng-deu | 24.3 | 0.5462 | 502 | 11271 | 0.993 |
47
+ | news-test2008.eng-deu | 24.7 | 0.5412 | 2051 | 47427 | 1.000 |
48
+ | newstest2009.eng-deu | 23.6 | 0.5385 | 2525 | 62816 | 0.999 |
49
+ | newstest2010.eng-deu | 26.9 | 0.5589 | 2489 | 61511 | 0.966 |
50
+ | newstest2011.eng-deu | 24.1 | 0.5364 | 3003 | 72981 | 0.990 |
51
+ | newstest2012.eng-deu | 24.6 | 0.5375 | 3003 | 72886 | 0.972 |
52
+ | newstest2013.eng-deu | 28.3 | 0.5636 | 3000 | 63737 | 0.988 |
53
+ | newstest2014-deen.eng-deu | 30.9 | 0.6084 | 3003 | 62964 | 1.000 |
54
+ | newstest2015-ende.eng-deu | 33.2 | 0.6106 | 2169 | 44260 | 1.000 |
55
+ | newstest2016-ende.eng-deu | 39.8 | 0.6595 | 2999 | 62670 | 0.993 |
56
+ | newstest2017-ende.eng-deu | 32.0 | 0.6047 | 3004 | 61291 | 1.000 |
57
+ | newstest2018-ende.eng-deu | 48.8 | 0.7146 | 2998 | 64276 | 1.000 |
58
+ | newstest2019-ende.eng-deu | 45.0 | 0.6821 | 1997 | 48969 | 0.995 |
59
+ | Tatoeba-test-v2021-08-07.eng-deu | 43.7 | 0.6442 | 10000 | 85728 | 1.000 |
config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "relu",
4
+ "architectures": [
5
+ "MarianMTModel"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bad_words_ids": [
9
+ [
10
+ 65000
11
+ ]
12
+ ],
13
+ "bos_token_id": 0,
14
+ "classifier_dropout": 0.0,
15
+ "d_model": 1024,
16
+ "decoder_attention_heads": 16,
17
+ "decoder_ffn_dim": 4096,
18
+ "decoder_layerdrop": 0.0,
19
+ "decoder_layers": 6,
20
+ "decoder_start_token_id": 65000,
21
+ "decoder_vocab_size": 65001,
22
+ "dropout": 0.1,
23
+ "encoder_attention_heads": 16,
24
+ "encoder_ffn_dim": 4096,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
+ "eos_token_id": 0,
28
+ "forced_eos_token_id": 0,
29
+ "init_std": 0.02,
30
+ "is_encoder_decoder": true,
31
+ "max_length": 512,
32
+ "max_position_embeddings": 1024,
33
+ "model_type": "marian",
34
+ "normalize_embedding": false,
35
+ "num_beams": 6,
36
+ "num_hidden_layers": 6,
37
+ "pad_token_id": 65000,
38
+ "scale_embedding": true,
39
+ "share_encoder_decoder_embeddings": true,
40
+ "static_position_embeddings": true,
41
+ "torch_dtype": "float16",
42
+ "transformers_version": "4.21.0.dev0",
43
+ "use_cache": true,
44
+ "vocab_size": 65001
45
+ }
opusTCv20210807+bt.spm32k-spm32k.transformer-big.train1.log ADDED
The diff for this file is too large to render. See raw diff
opusTCv20210807+bt.spm32k-spm32k.transformer-big.valid1.log ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2021-12-01 16:52:50] [valid] Ep. 1 : Up. 10000 : perplexity : 4.62405 : new best
2
+ [2021-12-01 18:42:19] [valid] Ep. 1 : Up. 20000 : perplexity : 3.17074 : new best
3
+ [2021-12-01 20:31:43] [valid] Ep. 1 : Up. 30000 : perplexity : 2.89787 : new best
4
+ [2021-12-01 22:21:07] [valid] Ep. 1 : Up. 40000 : perplexity : 2.76201 : new best
5
+ [2021-12-02 00:10:29] [valid] Ep. 1 : Up. 50000 : perplexity : 2.68948 : new best
6
+ [2021-12-02 01:59:57] [valid] Ep. 1 : Up. 60000 : perplexity : 2.64063 : new best
7
+ [2021-12-02 03:49:30] [valid] Ep. 1 : Up. 70000 : perplexity : 2.60022 : new best
8
+ [2021-12-02 05:38:44] [valid] Ep. 1 : Up. 80000 : perplexity : 2.56641 : new best
9
+ [2021-12-02 07:28:03] [valid] Ep. 1 : Up. 90000 : perplexity : 2.53931 : new best
10
+ [2021-12-02 09:17:19] [valid] Ep. 1 : Up. 100000 : perplexity : 2.51865 : new best
11
+ [2021-12-02 11:06:38] [valid] Ep. 1 : Up. 110000 : perplexity : 2.50066 : new best
12
+ [2021-12-02 12:55:59] [valid] Ep. 1 : Up. 120000 : perplexity : 2.48827 : new best
13
+ [2021-12-02 14:45:20] [valid] Ep. 1 : Up. 130000 : perplexity : 2.47352 : new best
14
+ [2021-12-02 16:34:59] [valid] Ep. 1 : Up. 140000 : perplexity : 2.46277 : new best
15
+ [2021-12-02 18:24:13] [valid] Ep. 1 : Up. 150000 : perplexity : 2.4536 : new best
16
+ [2021-12-02 20:13:45] [valid] Ep. 1 : Up. 160000 : perplexity : 2.44542 : new best
17
+ [2021-12-02 22:03:11] [valid] Ep. 1 : Up. 170000 : perplexity : 2.43751 : new best
18
+ [2021-12-02 23:52:31] [valid] Ep. 1 : Up. 180000 : perplexity : 2.42974 : new best
19
+ [2021-12-03 01:41:53] [valid] Ep. 1 : Up. 190000 : perplexity : 2.42317 : new best
20
+ [2021-12-03 17:40:24] [valid] Ep. 1 : Up. 200000 : perplexity : 2.41677 : new best
21
+ [2021-12-03 19:30:07] [valid] Ep. 1 : Up. 210000 : perplexity : 2.41299 : new best
22
+ [2021-12-03 21:19:37] [valid] Ep. 1 : Up. 220000 : perplexity : 2.40513 : new best
23
+ [2021-12-03 23:09:03] [valid] Ep. 1 : Up. 230000 : perplexity : 2.39959 : new best
24
+ [2021-12-04 00:58:20] [valid] Ep. 1 : Up. 240000 : perplexity : 2.39771 : new best
25
+ [2021-12-04 02:47:43] [valid] Ep. 1 : Up. 250000 : perplexity : 2.39285 : new best
26
+ [2021-12-04 04:37:02] [valid] Ep. 1 : Up. 260000 : perplexity : 2.38914 : new best
27
+ [2021-12-04 06:26:43] [valid] Ep. 1 : Up. 270000 : perplexity : 2.38545 : new best
28
+ [2021-12-04 08:15:58] [valid] Ep. 1 : Up. 280000 : perplexity : 2.38241 : new best
29
+ [2021-12-04 10:05:13] [valid] Ep. 1 : Up. 290000 : perplexity : 2.37948 : new best
30
+ [2021-12-04 11:54:23] [valid] Ep. 1 : Up. 300000 : perplexity : 2.37573 : new best
31
+ [2021-12-04 13:43:34] [valid] Ep. 1 : Up. 310000 : perplexity : 2.37134 : new best
32
+ [2021-12-04 15:32:47] [valid] Ep. 1 : Up. 320000 : perplexity : 2.36864 : new best
33
+ [2021-12-04 17:22:00] [valid] Ep. 1 : Up. 330000 : perplexity : 2.36621 : new best
34
+ [2021-12-04 19:11:32] [valid] Ep. 1 : Up. 340000 : perplexity : 2.36401 : new best
35
+ [2021-12-04 21:00:49] [valid] Ep. 1 : Up. 350000 : perplexity : 2.36358 : new best
36
+ [2021-12-04 22:50:04] [valid] Ep. 1 : Up. 360000 : perplexity : 2.36217 : new best
37
+ [2021-12-05 00:39:08] [valid] Ep. 1 : Up. 370000 : perplexity : 2.36086 : new best
38
+ [2021-12-05 02:28:18] [valid] Ep. 1 : Up. 380000 : perplexity : 2.35793 : new best
39
+ [2021-12-05 16:54:50] [valid] Ep. 1 : Up. 390000 : perplexity : 2.35314 : new best
40
+ [2021-12-05 18:43:23] [valid] Ep. 1 : Up. 400000 : perplexity : 2.35098 : new best
41
+ [2021-12-05 20:32:10] [valid] Ep. 1 : Up. 410000 : perplexity : 2.35121 : stalled 1 times (last best: 2.35098)
42
+ [2021-12-05 22:21:06] [valid] Ep. 1 : Up. 420000 : perplexity : 2.34825 : new best
43
+ [2021-12-06 00:09:43] [valid] Ep. 1 : Up. 430000 : perplexity : 2.34765 : new best
44
+ [2021-12-06 01:58:38] [valid] Ep. 1 : Up. 440000 : perplexity : 2.3442 : new best
45
+ [2021-12-06 03:47:17] [valid] Ep. 1 : Up. 450000 : perplexity : 2.34198 : new best
46
+ [2021-12-06 05:36:09] [valid] Ep. 1 : Up. 460000 : perplexity : 2.34093 : new best
47
+ [2021-12-06 07:24:44] [valid] Ep. 1 : Up. 470000 : perplexity : 2.33995 : new best
48
+ [2021-12-06 09:13:39] [valid] Ep. 1 : Up. 480000 : perplexity : 2.33989 : new best
49
+ [2021-12-06 11:02:16] [valid] Ep. 1 : Up. 490000 : perplexity : 2.3377 : new best
50
+ [2021-12-06 12:51:03] [valid] Ep. 1 : Up. 500000 : perplexity : 2.33755 : new best
51
+ [2021-12-06 14:39:34] [valid] Ep. 1 : Up. 510000 : perplexity : 2.33954 : stalled 1 times (last best: 2.33755)
52
+ [2021-12-06 16:27:44] [valid] Ep. 1 : Up. 520000 : perplexity : 2.34083 : stalled 2 times (last best: 2.33755)
53
+ [2021-12-06 18:16:02] [valid] Ep. 1 : Up. 530000 : perplexity : 2.34335 : stalled 3 times (last best: 2.33755)
54
+ [2021-12-06 20:04:29] [valid] Ep. 1 : Up. 540000 : perplexity : 2.3448 : stalled 4 times (last best: 2.33755)
55
+ [2021-12-06 21:53:00] [valid] Ep. 1 : Up. 550000 : perplexity : 2.34299 : stalled 5 times (last best: 2.33755)
56
+ [2021-12-06 23:41:33] [valid] Ep. 1 : Up. 560000 : perplexity : 2.34046 : stalled 6 times (last best: 2.33755)
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94ff3bc13fb7c6e175ced0ce952d1ec47f8d836fe70045c0f2e6cbc78e92bd53
3
+ size 619169923
source.spm ADDED
Binary file (794 kB). View file
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
1
+ {
2
+ "eos_token": "</s>",
3
+ "pad_token": "<pad>",
4
+ "unk_token": "<unk>"
5
+ }
target.spm ADDED
Binary file (819 kB). View file
tokenizer_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token": "</s>",
3
+ "model_max_length": 512,
4
+ "name_or_path": "models/opus-mt-tc-big-en-de",
5
+ "pad_token": "<pad>",
6
+ "separate_vocabs": false,
7
+ "source_lang": "opus-mt-tc-big-en",
8
+ "sp_model_kwargs": {},
9
+ "special_tokens_map_file": null,
10
+ "target_lang": "de",
11
+ "tokenizer_class": "MarianTokenizer",
12
+ "tokenizer_file": null,
13
+ "unk_token": "<unk>"
14
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff