Nikita Sushko commited on
Commit
d2ea5af
1 Parent(s): cc5c270

Updated special_tokens_map.json and tokenizer_config.json

Browse files

In the standard LLaMA 3 `special_tokens_map.json` and `tokenizer_config.json` the eos token is set to `<|end_of_text|>`. This leads to model spamming `assistant` into chat, not stopping and chatting with itself.

This fix changes the eos token to `<|eot_id|>` in `special_tokens_map.json` and `tokenizer_config.json`, so the model can work with standard settings.

Files changed (2) hide show
  1. special_tokens_map.json +1 -1
  2. tokenizer_config.json +1 -1
special_tokens_map.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "bos_token": "<|begin_of_text|>",
3
- "eos_token": "<|end_of_text|>"
4
  }
 
1
  {
2
  "bos_token": "<|begin_of_text|>",
3
+ "eos_token": "<|eot_id|>"
4
  }
tokenizer_config.json CHANGED
@@ -2052,7 +2052,7 @@
2052
  "bos_token": "<|begin_of_text|>",
2053
  "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|end_of_text|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
 
2052
  "bos_token": "<|begin_of_text|>",
2053
  "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"