llama2-13b-de-quatrain-conditioned / special_tokens_map.json
cyr19's picture
Upload tokenizer
2bf58bb verified
raw
history blame contribute delete
No virus
972 Bytes
{
"additional_special_tokens": [
"<PAD>",
"<quatrain>",
"</quatrain>",
"[ABCD]",
"[AABC]",
"[ABAC]",
"[ABCC]",
"[ABBA]",
"[ABAB]",
"[ABCB]",
"[ABBC]",
"[ABBB]",
"[AABA]",
"[AABB]",
"[AAAB]",
"[ABCA]",
"[AAAA]",
"[ABAA]",
"<iambus>",
"<trochee>",
"<anapaest>",
"<dactyl>",
"<other>",
"<amphibrach>",
"<alexandrine>"
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<PAD>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}