{ "add_prefix_space": false, "added_tokens_decoder": { "0": { "content": "<|endoftext|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "50257": { "content": "<|prompter|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "50258": { "content": "<|assistant|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "50259": { "content": "<|system|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "50260": { "content": "<|prefix_end|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "50261": { "content": "<|prefix_begin|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "<|prompter|>", "<|assistant|>", "<|system|>", "<|prefix_end|>", "<|prefix_begin|>" ], "bos_token": "<|endoftext|>", "clean_up_tokenization_spaces": true, "eos_token": "<|endoftext|>", "model_max_length": 1000000000000000019884624838656, "pad_token": "<|endoftext|>", "sep_token": "<|endoftext|>", "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|endoftext|>" }