{ | |
"added_tokens_decoder": {}, | |
"auto_map": { | |
"AutoTokenizer": [ | |
"tokenizer.ByteTokenizer", | |
null | |
] | |
}, | |
"byte_level": true, | |
"clean_up_tokenization_spaces": true, | |
"model_max_length": 1000000000000000019884624838656, | |
"padding_side": "left", | |
"truncation_side": "left" | |
} | |