whizzzzkid commited on
Commit
edb3bf4
·
verified ·
1 Parent(s): 5ab3b02

Upload tokenizer

Browse files
added_tokens.json CHANGED
@@ -1,3 +1,5 @@
1
  {
2
- "<|end|>": 100289
 
 
3
  }
 
1
  {
2
+ "</s>": 100290,
3
+ "<s>": 100289,
4
+ "<unk>": 100291
5
  }
special_tokens_map.json CHANGED
@@ -35,14 +35,14 @@
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
- "content": "<|endoftext|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
- "content": "<|endoftext|>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
@@ -56,7 +56,7 @@
56
  "single_word": false
57
  },
58
  "unk_token": {
59
- "content": "<|endoftext|>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
 
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
+ "content": "<s>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
+ "content": "</s>",
46
  "lstrip": false,
47
  "normalized": false,
48
  "rstrip": false,
 
56
  "single_word": false
57
  },
58
  "unk_token": {
59
+ "content": "<unk>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:290f60278265b590e6e79137d425f9aa8c19d934e08b6ac7a6229e55a1bdaa27
3
- size 4239524
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa7112692ada7c064feef29b0d52fdd35aa4262fc1d0ee84b5a072b363c66ff3
3
+ size 4239968
tokenizer_config.json CHANGED
@@ -264,6 +264,30 @@
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
  }
268
  },
269
  "additional_special_tokens": [
@@ -301,16 +325,12 @@
301
  "<|reg7|>",
302
  "<|extra0|>"
303
  ],
304
- "bos_token": "<|endoftext|>",
305
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
306
  "clean_up_tokenization_spaces": true,
307
- "eos_token": "<|endoftext|>",
308
- "max_length": 2048,
309
  "model_max_length": 2048,
310
  "pad_token": "<|endoftext|>",
311
- "stride": 0,
312
  "tokenizer_class": "GPT2Tokenizer",
313
- "truncation_side": "right",
314
- "truncation_strategy": "longest_first",
315
- "unk_token": "<|endoftext|>"
316
  }
 
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
267
+ },
268
+ "100289": {
269
+ "content": "<s>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "100290": {
277
+ "content": "</s>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "100291": {
285
+ "content": "<unk>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
  }
292
  },
293
  "additional_special_tokens": [
 
325
  "<|reg7|>",
326
  "<|extra0|>"
327
  ],
328
+ "bos_token": "<s>",
329
  "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
330
  "clean_up_tokenization_spaces": true,
331
+ "eos_token": "</s>",
 
332
  "model_max_length": 2048,
333
  "pad_token": "<|endoftext|>",
 
334
  "tokenizer_class": "GPT2Tokenizer",
335
+ "unk_token": "<unk>"
 
 
336
  }