{ "additional_special_tokens": [ "<|startofchat|>", "<|endofchat|>", "<|human|>", "<|gpt|>" ], "bos_token": "", "clean_up_tokenization_spaces": false, "eos_token": "", "model_max_length": 2048, "pad_token": "[PAD]", "sp_model_kwargs": {}, "tokenizer_class": "LlamaTokenizer", "unk_token": "" }