File size: 301 Bytes
36f7606 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"model": "llama",
"hidden_size": 4096,
"intermediate_size": 11008,
"num_attention_heads": 32,
"num_hidden_layers": 32,
"layer_norm_eps": 1e-06,
"hidden_act": "silu",
"vocab_size": 32000,
"segment_vocab_size": 0,
"skip_init": true,
"rope_rank": "updown"
} |