lysandre HF staff commited on
Commit
f4dca6a
0 Parent(s):

Initial commit

Browse files
.gitattributes ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.bin.* filter=lfs diff=lfs merge=lfs -text
2
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.h5 filter=lfs diff=lfs merge=lfs -text
5
+ *.tflite filter=lfs diff=lfs merge=lfs -text
6
+ *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
+ *.ot filter=lfs diff=lfs merge=lfs -text
8
+ *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.arrow filter=lfs diff=lfs merge=lfs -text
10
+ *.ftz filter=lfs diff=lfs merge=lfs -text
11
+ *.joblib filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.pb filter=lfs diff=lfs merge=lfs -text
15
+ *.pt filter=lfs diff=lfs merge=lfs -text
16
+ *.pth filter=lfs diff=lfs merge=lfs -text
17
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "relu",
4
+ "attention_dropout": 0.1,
5
+ "bos_token_id": 0,
6
+ "d_model": 16,
7
+ "decoder_attention_heads": 4,
8
+ "decoder_ffn_dim": 4,
9
+ "decoder_layerdrop": 0.05,
10
+ "decoder_layers": 2,
11
+ "decoder_start_token_id": 2,
12
+ "dropout": 0.1,
13
+ "encoder_attention_heads": 4,
14
+ "encoder_ffn_dim": 4,
15
+ "encoder_layerdrop": 0.05,
16
+ "encoder_layers": 2,
17
+ "eos_token_id": 2,
18
+ "gradient_checkpointing": false,
19
+ "init_std": 0.02,
20
+ "is_encoder_decoder": true,
21
+ "max_position_embeddings": 20,
22
+ "model_type": "m2m_100",
23
+ "num_hidden_layers": 2,
24
+ "pad_token_id": 1,
25
+ "scale_embedding": true,
26
+ "transformers_version": "4.11.0.dev0",
27
+ "use_cache": true,
28
+ "vocab_size": 128112
29
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09355f5f8a9733622d4e286df2680d565e0a8fab5e3e4e4ef954b06ac1685a85
3
+ size 16520597
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8dfd1eae4522281b1b839eab877a791befec7a1663a41c814c77d9c89c748f2d
3
+ size 253154
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true}, "additional_special_tokens": ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN"]}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "src_lang": null, "tgt_lang": null, "additional_special_tokens": null, "sp_model_kwargs": {}, "keep_accents": true, "special_tokens_map_file": "./special_tokens_map.json", "name_or_path": "./", "tokenizer_class": "MBartTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff