kingabzpro commited on
Commit
5d0e863
1 Parent(s): 8007bdb

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +19 -14
  2. tokenizer.json +26 -1
  3. tokenizer_config.json +24 -3
special_tokens_map.json CHANGED
@@ -1,16 +1,21 @@
1
  {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|end_of_text|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- }
 
 
 
 
 
16
  }
 
1
  {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_start|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "<|im_end|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<|im_start|>",
19
+ "eos_token": "<|im_end|>",
20
+ "pad_token": "<|im_end|>"
21
  }
tokenizer.json CHANGED
@@ -1,7 +1,14 @@
1
  {
2
  "version": "1.0",
3
  "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 128000,
@@ -2306,6 +2313,24 @@
2306
  "rstrip": false,
2307
  "normalized": false,
2308
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2309
  }
2310
  ],
2311
  "normalizer": null,
 
1
  {
2
  "version": "1.0",
3
  "truncation": null,
4
+ "padding": {
5
+ "strategy": "BatchLongest",
6
+ "direction": "Right",
7
+ "pad_to_multiple_of": null,
8
+ "pad_id": 128257,
9
+ "pad_type_id": 0,
10
+ "pad_token": "<|im_end|>"
11
+ },
12
  "added_tokens": [
13
  {
14
  "id": 128000,
 
2313
  "rstrip": false,
2314
  "normalized": false,
2315
  "special": true
2316
+ },
2317
+ {
2318
+ "id": 128256,
2319
+ "content": "<|im_start|>",
2320
+ "single_word": false,
2321
+ "lstrip": false,
2322
+ "rstrip": false,
2323
+ "normalized": false,
2324
+ "special": true
2325
+ },
2326
+ {
2327
+ "id": 128257,
2328
+ "content": "<|im_end|>",
2329
+ "single_word": false,
2330
+ "lstrip": false,
2331
+ "rstrip": false,
2332
+ "normalized": false,
2333
+ "special": true
2334
  }
2335
  ],
2336
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -2047,16 +2047,37 @@
2047
  "rstrip": false,
2048
  "single_word": false,
2049
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2050
  }
2051
  },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
 
 
 
 
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|end_of_text|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 1000000000000000019884624838656,
 
2061
  "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }
 
2047
  "rstrip": false,
2048
  "single_word": false,
2049
  "special": true
2050
+ },
2051
+ "128256": {
2052
+ "content": "<|im_start|>",
2053
+ "lstrip": false,
2054
+ "normalized": false,
2055
+ "rstrip": false,
2056
+ "single_word": false,
2057
+ "special": true
2058
+ },
2059
+ "128257": {
2060
+ "content": "<|im_end|>",
2061
+ "lstrip": false,
2062
+ "normalized": false,
2063
+ "rstrip": false,
2064
+ "single_word": false,
2065
+ "special": true
2066
  }
2067
  },
2068
+ "additional_special_tokens": [
2069
+ "<|im_start|>",
2070
+ "<|im_end|>"
2071
+ ],
2072
+ "bos_token": "<|im_start|>",
2073
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
2074
  "clean_up_tokenization_spaces": true,
2075
+ "eos_token": "<|im_end|>",
2076
  "model_input_names": [
2077
  "input_ids",
2078
  "attention_mask"
2079
  ],
2080
  "model_max_length": 1000000000000000019884624838656,
2081
+ "pad_token": "<|im_end|>",
2082
  "tokenizer_class": "PreTrainedTokenizerFast"
2083
  }