jlzhou commited on
Commit
91b6fa1
1 Parent(s): 26a9bd3

Add `chat_template` in tokenizer config

Browse files

As dolphin-mistral follows chatml template, adding `chat_template` in tokenizer config can be helpful to users.

Also text-generation-inference has supported chat_template since 1.4.0

Files changed (1) hide show
  1. tokenizer_config.json +1 -0
tokenizer_config.json CHANGED
@@ -37,6 +37,7 @@
37
  },
38
  "additional_special_tokens": [],
39
  "bos_token": "<s>",
 
40
  "clean_up_tokenization_spaces": false,
41
  "eos_token": "<|im_end|>",
42
  "legacy": true,
 
37
  },
38
  "additional_special_tokens": [],
39
  "bos_token": "<s>",
40
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|im_start|>user\n' + message['content'] + '<|im_end|>' }}\n{% elif message['role'] == 'system' %}\n{{ '<|im_start|>system\n' + message['content'] + '<|im_end|>' }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|im_start|>assistant\n' + message['content'] + '<|im_end|>' }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|im_start|>assistant' }}\n{% endif %}\n{% endfor %}",
41
  "clean_up_tokenization_spaces": false,
42
  "eos_token": "<|im_end|>",
43
  "legacy": true,