pbllm_llama3_8b_50_g128 / original_params.json
K2C1's picture
Upload 5 files
e14b092 verified
raw
history blame contribute delete
211 Bytes
{
"dim": 4096,
"n_layers": 32,
"n_heads": 32,
"n_kv_heads": 8,
"vocab_size": 128256,
"multiple_of": 1024,
"ffn_dim_multiplier": 1.3,
"norm_eps": 1e-05,
"rope_theta": 500000.0
}