{ "model_max_length": 1000000000000000019884624838656, "name_or_path": "law_tokenizer", "special_tokens_map_file": "test_tokenizer/special_tokens_map.json", "tokenizer_class": "PreTrainedTokenizerFast" }