{"model_max_length": 512, "special_tokens_map_file": null, "tokenizer_class": "GPT2Tokenizer"}