Pablonm commited on
Commit
3d97e8f
·
verified ·
1 Parent(s): d6c722b

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +21 -3
  2. tokenizer.json +2 -2
  3. tokenizer_config.json +24 -17
special_tokens_map.json CHANGED
@@ -1,5 +1,23 @@
1
  {
2
- "bos_token": "<|begin_of_text|>",
3
- "eos_token": "<|im_end|>",
4
- "pad_token": "<|finetune_right_pad_id|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end▁of▁sentence|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|finetune_right_pad_id|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1f4a1a7159f2dacfa01410288c1b9faee8d5e309c880cb3dfc8e9ab3e070cef
3
- size 17209915
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d91915040cfac999d8c55f4b5bc6e67367c065e3a7a4e4b9438ce1f256addd86
3
+ size 17209530
tokenizer_config.json CHANGED
@@ -1,7 +1,10 @@
1
  {
 
 
 
2
  "added_tokens_decoder": {
3
  "128000": {
4
- "content": "<|begin_of_text|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -9,7 +12,7 @@
9
  "special": true
10
  },
11
  "128001": {
12
- "content": "<|im_end|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
@@ -89,39 +92,39 @@
89
  "special": true
90
  },
91
  "128011": {
92
- "content": "<|reserved_special_token_3|>",
93
  "lstrip": false,
94
  "normalized": false,
95
  "rstrip": false,
96
  "single_word": false,
97
- "special": true
98
  },
99
  "128012": {
100
- "content": "<|reserved_special_token_4|>",
101
  "lstrip": false,
102
  "normalized": false,
103
  "rstrip": false,
104
  "single_word": false,
105
- "special": true
106
  },
107
  "128013": {
108
- "content": "<|reserved_special_token_5|>",
109
  "lstrip": false,
110
  "normalized": false,
111
  "rstrip": false,
112
  "single_word": false,
113
- "special": true
114
  },
115
  "128014": {
116
- "content": "<|reserved_special_token_6|>",
117
  "lstrip": false,
118
  "normalized": false,
119
  "rstrip": false,
120
  "single_word": false,
121
- "special": true
122
  },
123
  "128015": {
124
- "content": "<|reserved_special_token_7|>",
125
  "lstrip": false,
126
  "normalized": false,
127
  "rstrip": false,
@@ -2049,13 +2052,17 @@
2049
  "special": true
2050
  }
2051
  },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% if 'role' in messages[0] %}{% for message in messages %}{% if message['role'] == 'user' %}{{'<|im_start|>user\n' + message['content'] + '<|im_end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|im_start|>assistant\n' + message['content'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['content'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% else %}{% for message in messages %}{% if message['from'] == 'human' %}{{'<|im_start|>user\n' + message['value'] + '<|im_end|>\n'}}{% elif message['from'] == 'gpt' %}{{'<|im_start|>assistant\n' + message['value'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['value'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% endif %}",
2054
  "clean_up_tokenization_spaces": false,
2055
- "eos_token": "<|im_end|>",
2056
  "extra_special_tokens": {},
2057
- "model_max_length": 1000000000000000019884624838656,
 
2058
  "pad_token": "<|finetune_right_pad_id|>",
2059
- "tokenizer_class": "PreTrainedTokenizerFast",
2060
- "unk_token": null
 
 
 
2061
  }
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "128000": {
7
+ "content": "<|begin▁of▁sentence|>",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
 
12
  "special": true
13
  },
14
  "128001": {
15
+ "content": "<|end▁of▁sentence|>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
 
92
  "special": true
93
  },
94
  "128011": {
95
+ "content": "<|User|>",
96
  "lstrip": false,
97
  "normalized": false,
98
  "rstrip": false,
99
  "single_word": false,
100
+ "special": false
101
  },
102
  "128012": {
103
+ "content": "<|Assistant|>",
104
  "lstrip": false,
105
  "normalized": false,
106
  "rstrip": false,
107
  "single_word": false,
108
+ "special": false
109
  },
110
  "128013": {
111
+ "content": "<think>",
112
  "lstrip": false,
113
  "normalized": false,
114
  "rstrip": false,
115
  "single_word": false,
116
+ "special": false
117
  },
118
  "128014": {
119
+ "content": "</think>",
120
  "lstrip": false,
121
  "normalized": false,
122
  "rstrip": false,
123
  "single_word": false,
124
+ "special": false
125
  },
126
  "128015": {
127
+ "content": "<|▁pad▁|>",
128
  "lstrip": false,
129
  "normalized": false,
130
  "rstrip": false,
 
2052
  "special": true
2053
  }
2054
  },
2055
+ "bos_token": "<|begin▁of▁sentence|>",
2056
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
2057
  "clean_up_tokenization_spaces": false,
2058
+ "eos_token": "<|end▁of▁sentence|>",
2059
  "extra_special_tokens": {},
2060
+ "legacy": true,
2061
+ "model_max_length": 131072,
2062
  "pad_token": "<|finetune_right_pad_id|>",
2063
+ "padding_side": "left",
2064
+ "sp_model_kwargs": {},
2065
+ "tokenizer_class": "LlamaTokenizer",
2066
+ "unk_token": null,
2067
+ "use_default_system_prompt": false
2068
  }