gpt-neox-da / tokenizer_config.json
Prof. Peter Schneider-Kamp
initial commit
9f41942
raw
history blame contribute delete
294 Bytes
{
"add_prefix_space": false,
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"pad_token": "<|endoftext|>",
"model_max_length": 2048,
"special_tokens_map_file": null,
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "<|endoftext|>",
"max_model_input_sizes": 2048
}