ZwwWayne chujiezheng commited on
Commit
a928a63
1 Parent(s): 4275caa

fix `eos_token` (#12)

Browse files

- fix `eos_token` (993732e680b81127118090ed39be92b33ef69d35)


Co-authored-by: Chujie Zheng <chujiezheng@users.noreply.huggingface.co>

Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -93,7 +93,7 @@
93
  "chat_template": "{{ bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
94
  "clean_up_tokenization_spaces": false,
95
  "decode_with_prefix_space": false,
96
- "eos_token": "</s>",
97
  "model_max_length": 1000000000000000019884624838656,
98
  "pad_token": "</s>",
99
  "sp_model_kwargs": null,
 
93
  "chat_template": "{{ bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
94
  "clean_up_tokenization_spaces": false,
95
  "decode_with_prefix_space": false,
96
+ "eos_token": "<|im_end|>",
97
  "model_max_length": 1000000000000000019884624838656,
98
  "pad_token": "</s>",
99
  "sp_model_kwargs": null,