py800m / tokenizer_config.json
rexwang8's picture
Upload 5 files
7dbadd2
raw
history blame
394 Bytes
{
"add_prefix_space": false,
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"name_or_path": "EleutherAI/gpt-neox-20b",
"special_tokens_map_file": "/fsx/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/3523781c8df75f7741687a4284f6f70e1afa12f4/special_tokens_map.json",
"tokenizer_class": "GPTNeoXTokenizer",
"unk_token": "<|endoftext|>"
}