File size: 277 Bytes
911b95c
178aa60
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
    "tokenizer_class": "LlamaTokenizer",
    "model_max_length": 2048,
    "padding_side": "left",
    "bos_token": "<s>",
    "eos_token": "</s>",
    "unk_token": "<unk>",
    "clean_up_tokenization_spaces": false,
    "special_tokens_map_file": "special_tokens_map.json"
}