{
"tokenizer_class": "LlamaTokenizer",
"add_bos_token": false,
"add_eos_token": false,
"model_max_length": 2048,
"padding_side": "left",
"bos_token": "",
"eos_token": "",
"unk_token": "",
"clean_up_tokenization_spaces": false,
"special_tokens_map_file": "special_tokens_map.json"
}