Commit
•
1d5980f
1
Parent(s):
bbbedd1
Fix Incorrect Prompt Template defined in tokenizer_config.json (#3)
Browse files- Fix Incorrect Prompt Template defined in tokenizer_config.json (f8744cd4f6268b99ee7fd790b735694acfd07b70)
Co-authored-by: Mikael <Mikael110@users.noreply.huggingface.co>
- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -2066,7 +2066,7 @@
|
|
2066 |
}
|
2067 |
},
|
2068 |
"bos_token": "<|begin_of_text|>",
|
2069 |
-
"chat_template": "{% set
|
2070 |
"clean_up_tokenization_spaces": true,
|
2071 |
"eos_token": "<|im_end|>",
|
2072 |
"model_input_names": [
|
|
|
2066 |
}
|
2067 |
},
|
2068 |
"bos_token": "<|begin_of_text|>",
|
2069 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
2070 |
"clean_up_tokenization_spaces": true,
|
2071 |
"eos_token": "<|im_end|>",
|
2072 |
"model_input_names": [
|