32B-full / tokenizer_config.json
dphnAI's picture
Initial upload
3634ff5 verified
raw
history blame contribute delete
371 Bytes
{
"add_prefix_space": false,
"backend": "tokenizers",
"bos_token": "<|endoftext|>",
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"errors": "replace",
"is_local": true,
"model_max_length": 65536,
"model_specific_special_tokens": {},
"pad_token": "<|pad|>",
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "<|endoftext|>"
}