Vishnou commited on
Commit
4c1dd0b
1 Parent(s): 215c743

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +16 -2
  2. tokenizer_config.json +2 -1
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 50,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 50
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "[PAD]"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
tokenizer_config.json CHANGED
@@ -46,7 +46,7 @@
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
- "max_length": 268,
50
  "model_max_length": 1000000000000000019884624838656,
51
  "never_split": null,
52
  "pad_token": "[PAD]",
@@ -55,5 +55,6 @@
55
  "strip_accents": null,
56
  "tokenize_chinese_chars": true,
57
  "tokenizer_class": "BertTokenizer",
 
58
  "unk_token": "[UNK]"
59
  }
 
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
+ "max_length": 50,
50
  "model_max_length": 1000000000000000019884624838656,
51
  "never_split": null,
52
  "pad_token": "[PAD]",
 
55
  "strip_accents": null,
56
  "tokenize_chinese_chars": true,
57
  "tokenizer_class": "BertTokenizer",
58
+ "truncation": true,
59
  "unk_token": "[UNK]"
60
  }