gpt2-wechsel-french / tokenizer_config.json
benjamin's picture
initial commit
2c0ac76
raw
history blame
250 Bytes
{"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "models/gpt2_french", "tokenizer_class": "GPT2Tokenizer"}