mogoi commited on
Commit
2cc0d0f
1 Parent(s): 19a627f

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -1
  2. tokenizer_config.json +2 -3
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "mask_token": {
17
- "content": "<unused0>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "mask_token": {
17
+ "content": "<mask>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
5
  "content": "<s>",
@@ -3005,9 +3004,9 @@
3005
  "bos_token": "</s>",
3006
  "clean_up_tokenization_spaces": true,
3007
  "eos_token": "</s>",
3008
- "mask_token": "<unused0>",
3009
  "model_max_length": 1000000000000000019884624838656,
3010
  "pad_token": "<pad>",
3011
- "tokenizer_class": "GPT2Tokenizer",
3012
  "unk_token": "<unk>"
3013
  }
 
1
  {
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<s>",
 
3004
  "bos_token": "</s>",
3005
  "clean_up_tokenization_spaces": true,
3006
  "eos_token": "</s>",
3007
+ "mask_token": "<mask>",
3008
  "model_max_length": 1000000000000000019884624838656,
3009
  "pad_token": "<pad>",
3010
+ "tokenizer_class": "PreTrainedTokenizerFast",
3011
  "unk_token": "<unk>"
3012
  }