|
{ |
|
"add_prefix_space": false, |
|
"added_tokens_decoder": { |
|
"100257": { |
|
"content": "<|endoftext|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100258": { |
|
"content": "<|fim_prefix|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100259": { |
|
"content": "<|fim_middle|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100260": { |
|
"content": "<|fim_suffix|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100261": { |
|
"content": "\u0012", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100262": { |
|
"content": "<extra_id_1>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100263": { |
|
"content": "<|im_start|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100264": { |
|
"content": "<|im_end|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
}, |
|
"100276": { |
|
"content": "<|endofprompt|>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": true |
|
} |
|
}, |
|
"additional_special_tokens": [ |
|
"<|im_start|>", |
|
"<|im_end|>" |
|
], |
|
"bos_token": "<|im_start|>", |
|
"chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", |
|
"clean_up_tokenization_spaces": false, |
|
"eos_token": "<|im_end|>", |
|
"model_max_length": 4096, |
|
"pad_token": "<|im_end|>", |
|
"tokenizer_class": "GPT2Tokenizer", |
|
"unk_token": "<|endoftext|>" |
|
} |
|
|