{ "dim": 2048, "n_layers": 16, "n_heads": 32, "n_kv_heads": 8, "vocab_size": 128256, "ffn_dim_multiplier": 1.5, "multiple_of": 256, "norm_eps": 1e-05, "rope_theta": 500000.0, "use_scaled_rope": true, "quantization_args": { "group_size": 32 } }