dainb commited on
Commit
36d0850
1 Parent(s): cb04180

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1 +1,9 @@
1
- {}
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "pad_token": {
3
+ "content": "[PAD]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ }
9
+ }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -43,6 +43,11 @@
43
  },
44
  "clean_up_tokenization_spaces": false,
45
  "extra_special_tokens": {},
 
46
  "model_max_length": 1000000000000000019884624838656,
 
 
 
 
47
  "tokenizer_class": "PreTrainedTokenizerFast"
48
  }
 
43
  },
44
  "clean_up_tokenization_spaces": false,
45
  "extra_special_tokens": {},
46
+ "max_length": null,
47
  "model_max_length": 1000000000000000019884624838656,
48
+ "pad_to_multiple_of": null,
49
+ "pad_token": "[PAD]",
50
+ "pad_token_type_id": 0,
51
+ "padding_side": "right",
52
  "tokenizer_class": "PreTrainedTokenizerFast"
53
  }