File size: 333 Bytes
34666fd
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
    "tokenizer_class": "LlamaTokenizer",
    "add_bos_token": false,
    "add_eos_token": false,
    "model_max_length": 2048,
    "padding_side": "left",
    "bos_token": "<s>",
    "eos_token": "</s>",
    "unk_token": "<unk>",
    "clean_up_tokenization_spaces": false,
    "special_tokens_map_file": "special_tokens_map.json"
}