{ "add_bos_token": true, "add_eos_token": false, "added_tokens_decoder": { "32000": { "content": "õ", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32001": { "content": "÷", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32002": { "content": "Á", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32003": { "content": "ý", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32004": { "content": "À", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32005": { "content": "ÿ", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32006": { "content": "ø", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32007": { "content": "ú", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32008": { "content": "þ", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32009": { "content": "ü", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32010": { "content": "ù", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32011": { "content": "ö", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32012": { "content": "û", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32013": { "content": "<|begin▁of▁sentence|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "32014": { "content": "<|end▁of▁sentence|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "32015": { "content": "<|fim▁hole|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32016": { "content": "<|fim▁begin|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32017": { "content": "<|fim▁end|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32018": { "content": "", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32019": { "content": "<|User|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32020": { "content": "<|Assistant|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false }, "32021": { "content": "<|EOT|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": false } }, "bos_token": "<|begin▁of▁sentence|>", "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user' or message['role'] == 'human') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = 'system\n' + system_message + '\n' %}{% else %}{% set content = '' %}{% endif %}{% if message['role'] == 'user' or message['role'] == 'human' %}{{ content + 'human\n' + message['content'] + '\n' }}{% elif message['role'] == 'assistant' or message['role'] == 'bot' %}{{ 'bot\n' + message['content'] + '\n' + eos_token + '\n'}}{% else %}{{ raise_exception('Only user/human and assistant/bot roles are supported!') }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'bot\n' }}{% endif %}", "clean_up_tokenization_spaces": false, "eos_token": "<|end▁of▁sentence|>", "legacy": true, "model_max_length": 16384, "pad_token": "<|end▁of▁sentence|>", "sp_model_kwargs": {}, "tokenizer_class": "LlamaTokenizer", "unk_token": null, "use_default_system_prompt": false }