{ | |
"add_prefix_space": false, | |
"added_tokens_decoder": { | |
"50256": { | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"50257": { | |
"content": "### End", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"50258": { | |
"content": "### Instruction:", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"50259": { | |
"content": "### Response:\n", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
} | |
}, | |
"additional_special_tokens": [ | |
"### End", | |
"### Instruction:", | |
"### Response:\n" | |
], | |
"bos_token": "<|endoftext|>", | |
"clean_up_tokenization_spaces": true, | |
"eos_token": "<|endoftext|>", | |
"legacy": false, | |
"model_max_length": 1024, | |
"pad_token": "<|endoftext|>", | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": "<|endoftext|>" | |
} | |