kushaltatariya commited on
Commit
5cf98d4
1 Parent(s): 80b69dc

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,6 +1,9 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
4
- "mask_token": "<mask>",
5
- "unk_token": "<unk>"
 
 
 
6
  }
 
1
  {
2
+ "bos_token": "[BOS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[EOS]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": "[UNK]"
9
  }
tokenization.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7040cb89ebefa01ce0946938a47f6e4dc89499bf9c6327683fbbb6de7b28fbdc
3
- size 471475
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc0ecc23104f0452a33c52952689e4a40b3feb69c75af7c4bf4b616862f2557
3
+ size 471512
tokenizer.json CHANGED
@@ -5,7 +5,7 @@
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
- "content": "<unk>",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
@@ -14,7 +14,7 @@
14
  },
15
  {
16
  "id": 1,
17
- "content": "<s>",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
@@ -23,7 +23,7 @@
23
  },
24
  {
25
  "id": 2,
26
- "content": "</s>",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
@@ -32,7 +32,34 @@
32
  },
33
  {
34
  "id": 3,
35
- "content": "<mask>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
@@ -76,22 +103,34 @@
76
  },
77
  "model": {
78
  "type": "Unigram",
79
- "unk_id": 0,
80
  "vocab": [
81
  [
82
- "<unk>",
 
 
 
 
 
 
 
 
 
 
 
 
83
  0.0
84
  ],
85
  [
86
- "<s>",
87
  0.0
88
  ],
89
  [
90
- "</s>",
91
  0.0
92
  ],
93
  [
94
- "<mask>",
95
  0.0
96
  ],
97
  [
@@ -56102,18 +56141,6 @@
56102
  "Ahafo",
56103
  -12.80194854736328
56104
  ],
56105
- [
56106
- "profile",
56107
- -12.80194854736328
56108
- ],
56109
- [
56110
- "British",
56111
- -12.801949501037598
56112
- ],
56113
- [
56114
- "larabci",
56115
- -12.80195140838623
56116
- ],
56117
  [
56118
  "Ƴ",
56119
  -14.519325256347656
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
+ "content": "[PAD]",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
 
14
  },
15
  {
16
  "id": 1,
17
+ "content": "[UNK]",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
 
23
  },
24
  {
25
  "id": 2,
26
+ "content": "[BOS]",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
 
32
  },
33
  {
34
  "id": 3,
35
+ "content": "[EOS]",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ },
42
+ {
43
+ "id": 4,
44
+ "content": "[CLS]",
45
+ "single_word": false,
46
+ "lstrip": false,
47
+ "rstrip": false,
48
+ "normalized": false,
49
+ "special": true
50
+ },
51
+ {
52
+ "id": 5,
53
+ "content": "[SEP]",
54
+ "single_word": false,
55
+ "lstrip": false,
56
+ "rstrip": false,
57
+ "normalized": false,
58
+ "special": true
59
+ },
60
+ {
61
+ "id": 6,
62
+ "content": "[MASK]",
63
  "single_word": false,
64
  "lstrip": false,
65
  "rstrip": false,
 
103
  },
104
  "model": {
105
  "type": "Unigram",
106
+ "unk_id": 1,
107
  "vocab": [
108
  [
109
+ "[PAD]",
110
+ 0.0
111
+ ],
112
+ [
113
+ "[UNK]",
114
+ 0.0
115
+ ],
116
+ [
117
+ "[BOS]",
118
+ 0.0
119
+ ],
120
+ [
121
+ "[EOS]",
122
  0.0
123
  ],
124
  [
125
+ "[CLS]",
126
  0.0
127
  ],
128
  [
129
+ "[SEP]",
130
  0.0
131
  ],
132
  [
133
+ "[MASK]",
134
  0.0
135
  ],
136
  [
 
56141
  "Ahafo",
56142
  -12.80194854736328
56143
  ],
 
 
 
 
 
 
 
 
 
 
 
 
56144
  [
56145
  "Ƴ",
56146
  -14.519325256347656
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
- "content": "<unk>",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
@@ -12,7 +12,7 @@
12
  "special": true
13
  },
14
  "1": {
15
- "content": "<s>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
@@ -20,7 +20,7 @@
20
  "special": true
21
  },
22
  "2": {
23
- "content": "</s>",
24
  "lstrip": false,
25
  "normalized": false,
26
  "rstrip": false,
@@ -28,7 +28,31 @@
28
  "special": true
29
  },
30
  "3": {
31
- "content": "<mask>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
@@ -36,18 +60,24 @@
36
  "special": false
37
  }
38
  },
39
- "bos_token": "<s>",
40
- "bos_token_id": 1,
41
  "clean_up_tokenization_spaces": false,
42
- "eos_token": "</s>",
43
- "eos_token_id": 2,
 
 
44
  "legacy": true,
45
- "mask_token": "<mask>",
46
- "mask_token_id": 3,
47
  "model_max_length": 1000000000000000019884624838656,
 
 
 
 
48
  "sp_model_kwargs": {},
49
  "spaces_between_special_tokens": false,
50
  "tokenizer_class": "HfSentencePieceTokenizer",
51
- "unk_token": "<unk>",
52
- "unk_token_id": 0
53
  }
 
4
  "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
+ "content": "[PAD]",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
 
12
  "special": true
13
  },
14
  "1": {
15
+ "content": "[UNK]",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
 
20
  "special": true
21
  },
22
  "2": {
23
+ "content": "[BOS]",
24
  "lstrip": false,
25
  "normalized": false,
26
  "rstrip": false,
 
28
  "special": true
29
  },
30
  "3": {
31
+ "content": "[EOS]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "4": {
39
+ "content": "[CLS]",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "5": {
47
+ "content": "[SEP]",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "6": {
55
+ "content": "[MASK]",
56
  "lstrip": false,
57
  "normalized": false,
58
  "rstrip": false,
 
60
  "special": false
61
  }
62
  },
63
+ "bos_token": "[BOS]",
64
+ "bos_token_id": 2,
65
  "clean_up_tokenization_spaces": false,
66
+ "cls_token": "[CLS]",
67
+ "cls_token_id": 4,
68
+ "eos_token": "[EOS]",
69
+ "eos_token_id": 3,
70
  "legacy": true,
71
+ "mask_token": "[MASK]",
72
+ "mask_token_id": 6,
73
  "model_max_length": 1000000000000000019884624838656,
74
+ "pad_token": "[PAD]",
75
+ "pad_token_id": 0,
76
+ "sep_token": "[SEP]",
77
+ "sep_token_id": 5,
78
  "sp_model_kwargs": {},
79
  "spaces_between_special_tokens": false,
80
  "tokenizer_class": "HfSentencePieceTokenizer",
81
+ "unk_token": "[UNK]",
82
+ "unk_token_id": 1
83
  }