TTS_AkylAI_2 / special_tokens_map.json
Simonlob's picture
Upload tokenizer
3bcb543 verified
raw
history blame
276 Bytes
{
"pad_token": {
"content": "ж",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}