gpt2-large-de-quatrain-conditioned / special_tokens_map.json
cyr19's picture
Upload tokenizer
e086282 verified
{
"additional_special_tokens": [
"<PAD>",
"<quatrain>",
"</quatrain>",
"[ABCD]",
"[AABC]",
"[ABAC]",
"[ABCC]",
"[ABBA]",
"[ABAB]",
"[ABCB]",
"[ABBC]",
"[ABBB]",
"[AABA]",
"[AABB]",
"[AAAB]",
"[ABCA]",
"[AAAA]",
"[ABAA]",
"<iambus>",
"<trochee>",
"<anapaest>",
"<dactyl>",
"<other>",
"<amphibrach>",
"<alexandrine>"
],
"bos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<PAD>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}