squareV2 / special_tokens_map.json
jla25's picture
Upload tokenizer
733355c verified
raw
history blame
2.13 kB
{
"additional_special_tokens": [
"__af__",
"__am__",
"__ar__",
"__ast__",
"__az__",
"__ba__",
"__be__",
"__bg__",
"__bn__",
"__br__",
"__bs__",
"__ca__",
"__ceb__",
"__cs__",
"__cy__",
"__da__",
"__de__",
"__el__",
"__en__",
"__es__",
"__et__",
"__fa__",
"__ff__",
"__fi__",
"__fr__",
"__fy__",
"__ga__",
"__gd__",
"__gl__",
"__gu__",
"__ha__",
"__he__",
"__hi__",
"__hr__",
"__ht__",
"__hu__",
"__hy__",
"__id__",
"__ig__",
"__ilo__",
"__is__",
"__it__",
"__ja__",
"__jv__",
"__ka__",
"__kk__",
"__km__",
"__kn__",
"__ko__",
"__lb__",
"__lg__",
"__ln__",
"__lo__",
"__lt__",
"__lv__",
"__mg__",
"__mk__",
"__ml__",
"__mn__",
"__mr__",
"__ms__",
"__my__",
"__ne__",
"__nl__",
"__no__",
"__ns__",
"__oc__",
"__or__",
"__pa__",
"__pl__",
"__ps__",
"__pt__",
"__ro__",
"__ru__",
"__sd__",
"__si__",
"__sk__",
"__sl__",
"__so__",
"__sq__",
"__sr__",
"__ss__",
"__su__",
"__sv__",
"__sw__",
"__ta__",
"__th__",
"__tl__",
"__tn__",
"__tr__",
"__uk__",
"__ur__",
"__uz__",
"__vi__",
"__wo__",
"__xh__",
"__yi__",
"__yo__",
"__zh__",
"__zu__"
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<pad>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"sep_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}