fullstack commited on
Commit
5ba455f
·
verified ·
1 Parent(s): 167dacd

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -1996,7 +1996,7 @@
1996
  }
1997
  },
1998
  "bos_token": "<bos>",
1999
- "chat_template": "{{ bos_token }}{{ '<|im_start|>system\nYou an advanced artificial intelligence system, capable of <thinking> <reflection> and you output a brief and small to the point <output>.\n<|im_end|>\n' }}{% if 'role' in messages[0] %}{% for message in messages %}{% if message['role'] == 'user' %}{{'<|im_start|>user\n' + message['content'] + '<|im_end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|im_start|>assistant\n' + message['content'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['content'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% else %}{% for message in messages %}{% if message['from'] == 'human' %}{{'<|im_start|>user\n' + message['value'] + '<|im_end|>\n'}}{% elif message['from'] == 'gpt' %}{{'<|im_start|>assistant\n' + message['value'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['value'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% endif %}",
2000
  "clean_up_tokenization_spaces": false,
2001
  "eos_token": "<|im_end|>",
2002
  "model_max_length": 1000000000000000019884624838656,
 
1996
  }
1997
  },
1998
  "bos_token": "<bos>",
1999
+ "chat_template": "{{ bos_token }}{{ '<|im_start|>system\\nYou an advanced artificial intelligence system, capable of <thinking> <reflection> and you output a brief and small to the point <output>.\\n<|im_end|>\\n' }}{% if 'role' in messages[0] %}{% for message in messages %}{% if message['role'] == 'user' %}{{'<|im_start|>user\\n' + message['content'] + '<|im_end|>\\n'}}{% elif message['role'] == 'assistant' %}{{'<|im_start|>assistant\\n' + message['content'] + '<|im_end|>\\n' }}{% else %}{{ '<|im_start|>system\\n' + message['content'] + '<|im_end|>\\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}{% else %}{% for message in messages %}{% if message['from'] == 'human' %}{{'<|im_start|>user\\n' + message['value'] + '<|im_end|>\\n'}}{% elif message['from'] == 'gpt' %}{{'<|im_start|>assistant\\n' + message['value'] + '<|im_end|>\\n' }}{% else %}{{ '<|im_start|>system\\n' + message['value'] + '<|im_end|>\\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}{% endif %}",
2000
  "clean_up_tokenization_spaces": false,
2001
  "eos_token": "<|im_end|>",
2002
  "model_max_length": 1000000000000000019884624838656,