mylibrar commited on
Commit
aaf824f
1 Parent(s): fa4fcfb

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +11 -28
  2. tokenizer.json +2 -29
  3. tokenizer_config.json +2 -0
special_tokens_map.json CHANGED
@@ -1,34 +1,17 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "<|endofsystemprompt|>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<|beginofsystem|>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "<|beginofuser|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- {
25
- "content": "<|endofchat|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
  ],
 
 
 
 
 
 
 
32
  "eos_token": {
33
  "content": "<|endoftext|>",
34
  "lstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
+ "<|endofsystemprompt|>",
4
+ "<|beginofsystem|>",
5
+ "<|beginofuser|>",
6
+ "<|endofchat|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  ],
8
+ "bos_token": {
9
+ "content": "<|endoftext|>",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ },
15
  "eos_token": {
16
  "content": "<|endoftext|>",
17
  "lstrip": false,
tokenizer.json CHANGED
@@ -249,12 +249,6 @@
249
  "post_processor": {
250
  "type": "TemplateProcessing",
251
  "single": [
252
- {
253
- "SpecialToken": {
254
- "id": "<s>",
255
- "type_id": 0
256
- }
257
- },
258
  {
259
  "Sequence": {
260
  "id": "A",
@@ -263,24 +257,12 @@
263
  }
264
  ],
265
  "pair": [
266
- {
267
- "SpecialToken": {
268
- "id": "<s>",
269
- "type_id": 0
270
- }
271
- },
272
  {
273
  "Sequence": {
274
  "id": "A",
275
  "type_id": 0
276
  }
277
  },
278
- {
279
- "SpecialToken": {
280
- "id": "<s>",
281
- "type_id": 1
282
- }
283
- },
284
  {
285
  "Sequence": {
286
  "id": "B",
@@ -288,17 +270,7 @@
288
  }
289
  }
290
  ],
291
- "special_tokens": {
292
- "<s>": {
293
- "id": "<s>",
294
- "ids": [
295
- 1
296
- ],
297
- "tokens": [
298
- "<s>"
299
- ]
300
- }
301
- }
302
  },
303
  "decoder": {
304
  "type": "Sequence",
@@ -332,6 +304,7 @@
332
  "end_of_word_suffix": null,
333
  "fuse_unk": true,
334
  "byte_fallback": true,
 
335
  "vocab": {
336
  "<unk>": 0,
337
  "<s>": 1,
 
249
  "post_processor": {
250
  "type": "TemplateProcessing",
251
  "single": [
 
 
 
 
 
 
252
  {
253
  "Sequence": {
254
  "id": "A",
 
257
  }
258
  ],
259
  "pair": [
 
 
 
 
 
 
260
  {
261
  "Sequence": {
262
  "id": "A",
263
  "type_id": 0
264
  }
265
  },
 
 
 
 
 
 
266
  {
267
  "Sequence": {
268
  "id": "B",
 
270
  }
271
  }
272
  ],
273
+ "special_tokens": {}
 
 
 
 
 
 
 
 
 
 
274
  },
275
  "decoder": {
276
  "type": "Sequence",
 
304
  "end_of_word_suffix": null,
305
  "fuse_unk": true,
306
  "byte_fallback": true,
307
+ "ignore_merges": false,
308
  "vocab": {
309
  "<unk>": 0,
310
  "<s>": 1,
tokenizer_config.json CHANGED
@@ -210,8 +210,10 @@
210
  "<|endofchat|>"
211
  ],
212
  "bos_token": "<|endoftext|>",
 
213
  "clean_up_tokenization_spaces": false,
214
  "eos_token": "<|endoftext|>",
 
215
  "model_max_length": 8192,
216
  "pad_token": null,
217
  "sp_model_kwargs": {},
 
210
  "<|endofchat|>"
211
  ],
212
  "bos_token": "<|endoftext|>",
213
+ "chat_template": "{% for message in messages %}{% if message['role'] == 'user' %}{{ '<|beginofuser|>' + message['content'] }}{% elif message['role'] == 'system' %}{{ message['content'] + '<|endofsystemprompt|>' }}{% elif message['role'] == 'assistant' %}{{ '<|beginofsystem|>' + message['content'] }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '<|beginofsystem|>' }}{% endif %}{% endfor %}",
214
  "clean_up_tokenization_spaces": false,
215
  "eos_token": "<|endoftext|>",
216
+ "legacy": true,
217
  "model_max_length": 8192,
218
  "pad_token": null,
219
  "sp_model_kwargs": {},