ming030890 commited on
Commit
a62549d
1 Parent(s): f135500

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "eos_token": {
3
  "content": "</s>",
4
  "lstrip": false,
@@ -6,6 +7,7 @@
6
  "rstrip": false,
7
  "single_word": false
8
  },
 
9
  "pad_token": {
10
  "content": "<pad>",
11
  "lstrip": false,
@@ -13,6 +15,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,
 
1
  {
2
+ "cls_token": "<s>",
3
  "eos_token": {
4
  "content": "</s>",
5
  "lstrip": false,
 
7
  "rstrip": false,
8
  "single_word": false
9
  },
10
+ "mask_token": "<mask>",
11
  "pad_token": {
12
  "content": "<pad>",
13
  "lstrip": false,
 
15
  "rstrip": false,
16
  "single_word": false
17
  },
18
+ "sep_token": "</s>",
19
  "unk_token": {
20
  "content": "<unk>",
21
  "lstrip": false,
tokenizer.json CHANGED
@@ -12,15 +12,6 @@
12
  "normalized": false,
13
  "special": true
14
  },
15
- {
16
- "id": 1,
17
- "content": "<s>",
18
- "single_word": false,
19
- "lstrip": false,
20
- "rstrip": false,
21
- "normalized": false,
22
- "special": true
23
- },
24
  {
25
  "id": 2,
26
  "content": "</s>",
@@ -30,15 +21,6 @@
30
  "normalized": false,
31
  "special": true
32
  },
33
- {
34
- "id": 3,
35
- "content": "<mask>",
36
- "single_word": false,
37
- "lstrip": false,
38
- "rstrip": false,
39
- "normalized": false,
40
- "special": true
41
- },
42
  {
43
  "id": 15999,
44
  "content": "<pad>",
 
12
  "normalized": false,
13
  "special": true
14
  },
 
 
 
 
 
 
 
 
 
15
  {
16
  "id": 2,
17
  "content": "</s>",
 
21
  "normalized": false,
22
  "special": true
23
  },
 
 
 
 
 
 
 
 
 
24
  {
25
  "id": 15999,
26
  "content": "<pad>",
tokenizer_config.json CHANGED
@@ -28,11 +28,14 @@
28
  },
29
  "additional_special_tokens": [],
30
  "clean_up_tokenization_spaces": true,
 
31
  "eos_token": "</s>",
32
  "extra_ids": 0,
33
  "legacy": false,
 
34
  "model_max_length": 1000000000000000019884624838656,
35
  "pad_token": "<pad>",
 
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "T5Tokenizer",
38
  "unk_token": "<unk>"
 
28
  },
29
  "additional_special_tokens": [],
30
  "clean_up_tokenization_spaces": true,
31
+ "cls_token": "<s>",
32
  "eos_token": "</s>",
33
  "extra_ids": 0,
34
  "legacy": false,
35
+ "mask_token": "<mask>",
36
  "model_max_length": 1000000000000000019884624838656,
37
  "pad_token": "<pad>",
38
+ "sep_token": "</s>",
39
  "sp_model_kwargs": {},
40
  "tokenizer_class": "T5Tokenizer",
41
  "unk_token": "<unk>"