{ "clean_up_tokenization_spaces": true, "model_input_names": [ "input_ids", "attention_mask" ], "model_max_length": 1000000000000000019884624838656, "tokenizer_class": "PreTrainedTokenizerFast" }