TheophileCA commited on
Commit
c88bf23
1 Parent(s): 2574123

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +7 -0
  2. tokenizer_config.json +11 -1
special_tokens_map.json CHANGED
@@ -1,4 +1,11 @@
1
  {
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<s>",
4
+ "<pad>",
5
+ "</s>",
6
+ "<unk>",
7
+ "<mask>"
8
+ ],
9
  "bos_token": "<s>",
10
  "cls_token": "<s>",
11
  "eos_token": "</s>",
tokenizer_config.json CHANGED
@@ -42,17 +42,27 @@
42
  "special": true
43
  }
44
  },
45
- "additional_special_tokens": [],
 
 
 
 
 
 
46
  "bos_token": "<s>",
47
  "clean_up_tokenization_spaces": true,
48
  "cls_token": "<s>",
49
  "eos_token": "</s>",
50
  "errors": "replace",
51
  "mask_token": "<mask>",
 
52
  "model_max_length": 512,
53
  "pad_token": "<pad>",
54
  "sep_token": "</s>",
 
55
  "tokenizer_class": "RobertaTokenizer",
56
  "trim_offsets": true,
 
 
57
  "unk_token": "<unk>"
58
  }
 
42
  "special": true
43
  }
44
  },
45
+ "additional_special_tokens": [
46
+ "<s>",
47
+ "<pad>",
48
+ "</s>",
49
+ "<unk>",
50
+ "<mask>"
51
+ ],
52
  "bos_token": "<s>",
53
  "clean_up_tokenization_spaces": true,
54
  "cls_token": "<s>",
55
  "eos_token": "</s>",
56
  "errors": "replace",
57
  "mask_token": "<mask>",
58
+ "max_length": 128,
59
  "model_max_length": 512,
60
  "pad_token": "<pad>",
61
  "sep_token": "</s>",
62
+ "stride": 0,
63
  "tokenizer_class": "RobertaTokenizer",
64
  "trim_offsets": true,
65
+ "truncation_side": "right",
66
+ "truncation_strategy": "longest_first",
67
  "unk_token": "<unk>"
68
  }