Octopus-v2 / special_tokens_map.json
Zack Li
Upload tokenizer
3eaf24a verified
raw
history blame
957 Bytes
{
"additional_special_tokens": [
"<nexa_0>",
"<nexa_1>",
"<nexa_2>",
"<nexa_3>",
"<nexa_4>",
"<nexa_5>",
"<nexa_6>",
"<nexa_7>",
"<nexa_8>",
"<nexa_9>",
"<nexa_10>",
"<nexa_11>",
"<nexa_12>",
"<nexa_13>",
"<nexa_14>",
"<nexa_15>",
"<nexa_16>",
"<nexa_17>",
"<nexa_18>",
"<nexa_19>",
"<nexa_20>",
"<nexa_end>"
],
"bos_token": {
"content": "<bos>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<eos>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<pad>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}