LysandreJik commited on
Commit
9f3dbba
1 Parent(s): a5c91a7
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "num_encoder_layers": 4,
26
  "pad_token_id": 0,
27
  "relative_max_distance": 128,
28
- "transformers_version": "4.10.0.dev0",
29
  "use_cache": true,
30
- "vocab_size": 99
31
  }
25
  "num_encoder_layers": 4,
26
  "pad_token_id": 0,
27
  "relative_max_distance": 128,
28
+ "transformers_version": "4.11.0.dev0",
29
  "use_cache": true,
30
+ "vocab_size": 30522
31
  }
prophetnet.tokenizer ADDED
The diff for this file is too large to render. See raw diff
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9fc138f69fa7ec8476389a8b12956044a9d4988455e72dae16dde4a182bd24c
3
- size 192145
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d11e246d4a9132b18943c98a4adfbfa37c0d6ae6db0f94c1be9dd4fcc630b714
3
+ size 2139217
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "mask_token": "[MASK]"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": true, "do_basic_tokenize": true, "never_split": null, "unk_token": "[UNK]", "sep_token": "[SEP]", "x_sep_token": "[X_SEP]", "pad_token": "[PAD]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": "/home/lysandre/.cache/huggingface/transformers/73e9134d07adccf5cf38d24c2f241f87e3febd91b209b279fff2741c9ff2ab4f.6fa7d8b91dc65e73f96549c829087df43aa7359e1d576908a012e7747d67c6c2", "tokenizer_file": null, "name_or_path": "microsoft/prophetnet-large-uncased", "tokenizer_class": "ProphetNetTokenizer"}