{ "tokenizer_class": "LlamaTokenizer", "model_max_length": 2048, "padding_side": "left", "add_bos_token": false, "add_eos_token": false, "bos_token": "", "eos_token": "", "unk_token": "", "clean_up_tokenization_spaces": false, "special_tokens_map_file": "special_tokens_map.json" }