whizzzzkid commited on
Commit
705253f
1 Parent(s): d39836e

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -3
  2. tokenizer.json +2 -36
  3. tokenizer_config.json +4 -31
special_tokens_map.json CHANGED
@@ -35,14 +35,14 @@
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
- "content": "<s>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
- "content": "</s>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
@@ -56,7 +56,7 @@
56
  "single_word": false
57
  },
58
  "unk_token": {
59
- "content": "<unk>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
 
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
+ "content": "<|endoftext|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
+ "content": "<|endoftext|>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
 
56
  "single_word": false
57
  },
58
  "unk_token": {
59
+ "content": "<|endoftext|>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
tokenizer.json CHANGED
@@ -2,18 +2,11 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 2048,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
- "padding": {
10
- "strategy": "BatchLongest",
11
- "direction": "Right",
12
- "pad_to_multiple_of": null,
13
- "pad_id": 100257,
14
- "pad_type_id": 0,
15
- "pad_token": "<|endoftext|>"
16
- },
17
  "added_tokens": [
18
  {
19
  "id": 100256,
@@ -311,33 +304,6 @@
311
  "rstrip": false,
312
  "normalized": false,
313
  "special": true
314
- },
315
- {
316
- "id": 100289,
317
- "content": "<s>",
318
- "single_word": false,
319
- "lstrip": false,
320
- "rstrip": false,
321
- "normalized": false,
322
- "special": true
323
- },
324
- {
325
- "id": 100290,
326
- "content": "</s>",
327
- "single_word": false,
328
- "lstrip": false,
329
- "rstrip": false,
330
- "normalized": false,
331
- "special": true
332
- },
333
- {
334
- "id": 100291,
335
- "content": "<unk>",
336
- "single_word": false,
337
- "lstrip": false,
338
- "rstrip": false,
339
- "normalized": false,
340
- "special": true
341
  }
342
  ],
343
  "normalizer": null,
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 4096,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
+ "padding": null,
 
 
 
 
 
 
 
10
  "added_tokens": [
11
  {
12
  "id": 100256,
 
304
  "rstrip": false,
305
  "normalized": false,
306
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
307
  }
308
  ],
309
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -264,30 +264,6 @@
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
267
- },
268
- "100289": {
269
- "content": "<s>",
270
- "lstrip": false,
271
- "normalized": false,
272
- "rstrip": false,
273
- "single_word": false,
274
- "special": true
275
- },
276
- "100290": {
277
- "content": "</s>",
278
- "lstrip": false,
279
- "normalized": false,
280
- "rstrip": false,
281
- "single_word": false,
282
- "special": true
283
- },
284
- "100291": {
285
- "content": "<unk>",
286
- "lstrip": false,
287
- "normalized": false,
288
- "rstrip": false,
289
- "single_word": false,
290
- "special": true
291
  }
292
  },
293
  "additional_special_tokens": [
@@ -325,19 +301,16 @@
325
  "<|reg7|>",
326
  "<|extra0|>"
327
  ],
328
- "bos_token": "<s>",
329
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
330
  "clean_up_tokenization_spaces": true,
331
- "eos_token": "</s>",
332
- "max_length": 2048,
333
  "model_max_length": 2048,
334
- "pad_to_multiple_of": null,
335
  "pad_token": "<|endoftext|>",
336
- "pad_token_type_id": 0,
337
- "padding_side": "right",
338
  "stride": 0,
339
  "tokenizer_class": "GPT2Tokenizer",
340
  "truncation_side": "right",
341
  "truncation_strategy": "longest_first",
342
- "unk_token": "<unk>"
343
  }
 
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
  }
268
  },
269
  "additional_special_tokens": [
 
301
  "<|reg7|>",
302
  "<|extra0|>"
303
  ],
304
+ "bos_token": "<|endoftext|>",
305
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
306
  "clean_up_tokenization_spaces": true,
307
+ "eos_token": "<|endoftext|>",
308
+ "max_length": 4096,
309
  "model_max_length": 2048,
 
310
  "pad_token": "<|endoftext|>",
 
 
311
  "stride": 0,
312
  "tokenizer_class": "GPT2Tokenizer",
313
  "truncation_side": "right",
314
  "truncation_strategy": "longest_first",
315
+ "unk_token": "<|endoftext|>"
316
  }