xlnet-large_spell_10k_2_p3 / tokenizer_config.json
stuartmesham's picture
Upload with huggingface_hub
86426b3
raw
history blame contribute delete
641 Bytes
{
"add_prefix_space": true,
"add_special_tokens": false,
"additional_special_tokens": [
"<eop>",
"<eod>"
],
"bos_token": "<s>",
"cls_token": "<cls>",
"do_lower_case": false,
"eos_token": "</s>",
"keep_accents": false,
"mask_token": {
"__type": "AddedToken",
"content": "<mask>",
"lstrip": true,
"normalized": true,
"rstrip": false,
"single_word": false
},
"name_or_path": "model_saves/xlnet-large_spell_10k_2_p2",
"pad_token": "<pad>",
"remove_space": true,
"sep_token": "<sep>",
"special_tokens_map_file": null,
"tokenizer_class": "XLNetTokenizer",
"unk_token": "<unk>"
}