File size: 420 Bytes
36f7606
 
 
 
 
 
 
 
 
 
 
16f61c2
 
36f7606
1
2
3
4
5
6
7
8
9
10
11
12
13
14
{
    "model": "llama",
    "hidden_size": 4096,
    "intermediate_size": 11008, 
    "num_attention_heads": 32,
    "num_hidden_layers": 32,
    "hidden_act": "silu",
    "vocab_size": 32000,
    "segment_vocab_size": 0,
    "skip_init": true,
    "layer_norm_eps": 1e-6,
    "rope_rank": "updown",
    "generation_config": {"tokenizer_config":  {"skip_special_tokens": true, "add_special_tokens": false},"end_id": 2}
}