distilgpt_new3_0035 / tokenizer_config.json
bigmorning's picture
add tokenizer
ff84a6d
raw
history blame contribute delete
683 Bytes
{
"add_prefix_space": false,
"bos_token": "<|endoftext|>",
"cls_token": "[CLS]",
"do_lower_case": true,
"eos_token": "<|endoftext|>",
"mask_token": "[MASK]",
"name_or_path": "bigmorning/distilgpt_new2_0060",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"special_token": [
"[PAD]",
"[UNK]",
"[CLS]",
"[SEP]",
"[MASK]"
],
"special_tokens_map_file": "/root/.cache/huggingface/transformers/f9dd91ec01fc25f5ae3f4dd46e36ac7fc9a7c2e42c82a107dca01bc6b97764ec.7da70648c6cb9951e284c9685f9ba7ae083dd59ed1d6d84bdfc0584a4ea94b6d",
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "[UNK]"
}