ShinoharaHare
commited on
Upload tokenizer
Browse files- tokenizer_config.json +1 -0
tokenizer_config.json
CHANGED
@@ -245803,6 +245803,7 @@
|
|
245803 |
}
|
245804 |
},
|
245805 |
"bos_token": "<|endoftext|>",
|
|
|
245806 |
"clean_up_tokenization_spaces": true,
|
245807 |
"eos_token": "<|endoftext|>",
|
245808 |
"model_max_length": 2048,
|
|
|
245803 |
}
|
245804 |
},
|
245805 |
"bos_token": "<|endoftext|>",
|
245806 |
+
"chat_template": "{% for message in messages %}\n {% if message['role'] == 'assistant' %}\n {% if messages|length > 1 %}\n {{- '<|im_start|>assistant\\n' -}}\n {% endif %}\n {{- message['content'] + '<|im_end|>\\n' -}}\n {% else %}\n {{- '<|im_start|>' + message['role'] + '\\n' + message['content'] + '<|im_end|>\\n' -}}\n {% endif %}\n{% endfor %}\n{% if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' -}}\n{% endif %}\n",
|
245807 |
"clean_up_tokenization_spaces": true,
|
245808 |
"eos_token": "<|endoftext|>",
|
245809 |
"model_max_length": 2048,
|