{ "name_or_path": "free-smallgpt-1epoch", "special_tokens": [ "", "|endoftext|" ], "special_tokens_map_file": "gpt2_small/special_tokens_map.json", "tokenizer_class": "PreTrainedTokenizerFast" }