File size: 279 Bytes
82f4bda
5669fe3
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
    "tokenizer_class": "LlamaTokenizer",
    "model_max_length": 2048,
    "padding_side": "left",
    "bos_token": "<s>",
    "eos_token": "</s>",
    "unk_token": "<unk>",
    "clean_up_tokenization_spaces": false,
    "special_tokens_map_file": "special_tokens_map.json"  
}