whizzzzkid commited on
Commit
ebfa1e0
1 Parent(s): 772031b

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -35,14 +35,14 @@
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
- "content": "<s>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
- "content": "</s>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
@@ -56,7 +56,7 @@
56
  "single_word": false
57
  },
58
  "unk_token": {
59
- "content": "<unk>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
 
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
+ "content": "<|endoftext|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
+ "content": "<|endoftext|>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
 
56
  "single_word": false
57
  },
58
  "unk_token": {
59
+ "content": "<|endoftext|>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3dd2879cbbbc0c1a557fecbe91b977e696b7fa7af7b62901d68b358cf7f1240b
3
- size 4240235
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:631fbd6e3f9415c226372013d876a6a5106009bb2637021ea2b73d524163dd5c
3
+ size 4239425
tokenizer_config.json CHANGED
@@ -264,30 +264,6 @@
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
267
- },
268
- "100289": {
269
- "content": "<s>",
270
- "lstrip": false,
271
- "normalized": false,
272
- "rstrip": false,
273
- "single_word": false,
274
- "special": true
275
- },
276
- "100290": {
277
- "content": "</s>",
278
- "lstrip": false,
279
- "normalized": false,
280
- "rstrip": false,
281
- "single_word": false,
282
- "special": true
283
- },
284
- "100291": {
285
- "content": "<unk>",
286
- "lstrip": false,
287
- "normalized": false,
288
- "rstrip": false,
289
- "single_word": false,
290
- "special": true
291
  }
292
  },
293
  "additional_special_tokens": [
@@ -325,19 +301,12 @@
325
  "<|reg7|>",
326
  "<|extra0|>"
327
  ],
328
- "bos_token": "<s>",
329
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
330
  "clean_up_tokenization_spaces": true,
331
- "eos_token": "</s>",
332
- "max_length": 2048,
333
  "model_max_length": 2048,
334
- "pad_to_multiple_of": null,
335
  "pad_token": "<|endoftext|>",
336
- "pad_token_type_id": 0,
337
- "padding_side": "right",
338
- "stride": 0,
339
  "tokenizer_class": "GPT2Tokenizer",
340
- "truncation_side": "right",
341
- "truncation_strategy": "longest_first",
342
- "unk_token": "<unk>"
343
  }
 
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
  }
268
  },
269
  "additional_special_tokens": [
 
301
  "<|reg7|>",
302
  "<|extra0|>"
303
  ],
304
+ "bos_token": "<|endoftext|>",
305
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
306
  "clean_up_tokenization_spaces": true,
307
+ "eos_token": "<|endoftext|>",
 
308
  "model_max_length": 2048,
 
309
  "pad_token": "<|endoftext|>",
 
 
 
310
  "tokenizer_class": "GPT2Tokenizer",
311
+ "unk_token": "<|endoftext|>"
 
 
312
  }