{ | |
"model": "llama", | |
"hidden_size": 2048, | |
"intermediate_size": 5632, | |
"num_attention_heads": 32, | |
"multi_query_group_num": 4, | |
"num_hidden_layers": 22, | |
"hidden_act": "silu", | |
"vocab_size": 64793, | |
"segment_vocab_size": 0, | |
"layer_norm_eps": 1e-06, | |
"rope_rank": "updown", | |
"flash_attention": true, | |
"tie_emb_prj_weight": true, | |
"is_causal": true | |
} |