mitra-mnt-zh-en / tokenizer_config.json
snehrdich's picture
Upload tokenizer
fe1cd1f
{
"added_tokens_decoder": {
"0": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"1": {
"content": "<pad>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"2": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"3": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128001": {
"content": "__en__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128002": {
"content": "__ha__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128003": {
"content": "__is__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128004": {
"content": "__ja__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128005": {
"content": "__cs__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128006": {
"content": "__ru__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128007": {
"content": "__zh__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"128008": {
"content": "__de__",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"additional_special_tokens": [
"__en__",
"__ha__",
"__is__",
"__ja__",
"__cs__",
"__ru__",
"__zh__",
"__de__"
],
"bos_token": "<s>",
"clean_up_tokenization_spaces": true,
"eos_token": "</s>",
"language_codes": "wmt21",
"model_max_length": 1000000000000000019884624838656,
"num_madeup_words": 0,
"pad_token": "<pad>",
"sep_token": "</s>",
"sp_model_kwargs": {},
"src_lang": "en",
"tgt_lang": "en",
"tokenizer_class": "M2M100Tokenizer",
"unk_token": "<unk>"
}