|
{ |
|
"add_bos_token": true, |
|
"add_eos_token": false, |
|
"added_tokens_decoder": { |
|
"0": { |
|
"content": "<unk>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": false |
|
}, |
|
"1": { |
|
"content": "<s>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": false |
|
}, |
|
"2": { |
|
"content": "</s>", |
|
"lstrip": false, |
|
"normalized": false, |
|
"rstrip": false, |
|
"single_word": false, |
|
"special": false |
|
} |
|
}, |
|
"additional_special_tokens": [], |
|
"bos_token": "<s>", |
|
"clean_up_tokenization_spaces": false, |
|
"eos_token": "</s>", |
|
"legacy": false, |
|
"model_max_length": 1000000000000000019884624838656, |
|
"pad_token": null, |
|
"padding_side": "right", |
|
"sp_model_kwargs": {}, |
|
"spaces_between_special_tokens": false, |
|
"tokenizer_class": "LlamaTokenizer", |
|
"tokenizer_file": "/root/.cache/huggingface/hub/models--LeoLM--leo-hessianai-7b/snapshots/88c5ac07006ea8f1b5d10aa4f03f0d624dd27e56/tokenizer.json", |
|
"unk_token": "<unk>", |
|
"use_default_system_prompt": true |
|
} |
|
|