unet_small_new / tokenizer_config.json
itay-nakash's picture
Upload tokenizer
34b7936
raw
history blame contribute delete
475 Bytes
{
"bos_token": "<s>",
"clean_up_tokenization_spaces": true,
"cls_token": "<cls>",
"eos_token": "</s>",
"mask_token": "<mask>",
"model_max_length": 128,
"pad_token": "<pad>",
"sep_token": "<sep>",
"special_tokens_map_file": "/data/home/itay.nakash/cramming_w_elad/cramming_playground/outputs/unet_small_new/checkpoints/ScriptableCrammedBERT_2023-08-08_2.4425/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast",
"unk_token": "<unk>"
}