llama2-13b-en-quatrain-conditioned / special_tokens_map.json
cyr19's picture
Upload tokenizer
da17412 verified
raw
history blame
972 Bytes
{
"additional_special_tokens": [
"<PAD>",
"<quatrain>",
"</quatrain>",
"[ABCD]",
"[AABC]",
"[ABAC]",
"[ABCC]",
"[ABBA]",
"[ABAB]",
"[ABCB]",
"[ABBC]",
"[ABBB]",
"[AABA]",
"[AABB]",
"[AAAB]",
"[ABCA]",
"[AAAA]",
"[ABAA]",
"<iambus>",
"<trochee>",
"<anapaest>",
"<dactyl>",
"<other>",
"<amphibrach>",
"<alexandrine>"
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<PAD>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}