{ "add_bos_token": false, "add_prefix_space": false, "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "errors": "replace", "full_tokenizer_file": null, "model_max_length": 1000000000000000019884624838656, "name_or_path": "pierreguillou/gpt2-small-portuguese", "pad_token": "<|endoftext|>", "special_tokens_map_file": null, "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|endoftext|>" }