vall-e / tokenizer_config.json
ecker's picture
Upload 4 files
04c31de verified
raw
history blame
941 Bytes
{
"added_tokens": [
{
"id": 0,
"content": "<unk>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
},
{
"id": 1,
"content": "<bos>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
},
{
"id": 2,
"content": "</eos>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
},
{
"id": 3,
"content": "<mask>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
}
],
"bos_token": "<bos>",
"eos_token": "</eos>",
"clean_up_tokenization_spaces": true,
"model_input_names": [
"input_ids",
"attention_mask"
],
"tokenizer_class": "PreTrainedTokenizerFast"
}