LingoWhale-8B / tokenizer_config.json
DeepLangLvcc's picture
upload model
fa4621e
raw
history blame contribute delete
No virus
999 Bytes
{
"auto_map": {
"AutoTokenizer": [
"tokenization_lingowhale.LingoWhaleTokenizer",
null
]
},
"add_bos_token": false,
"add_eos_token": false,
"use_fast": false,
"clean_up_tokenization_spaces": false,
"model_max_length": 8192,
"sp_model_kwargs": {},
"tokenizer_class": "LingoWhaleTokenizer",
"bos_token": {
"__type": "AddedToken",
"content": "<!!BOS!!>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": true
},
"eos_token": {
"__type": "AddedToken",
"content": "<!!EOS!!>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": true
},
"pad_token": {
"__type": "AddedToken",
"content": "<!!UNK!!>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": true
},
"unk_token": {
"__type": "AddedToken",
"content": "<!!UNK!!>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": true
}
}