{ "vocab_size": 81920, "max_position_embeddings": 4096, "hidden_size": 7168, "intermediate_size": 16384, "num_hidden_layers": 64, "num_attention_heads": 64, "rms_norm_eps": 1e-06, "rope_theta": 10000, "quantization": { "group_size": 64, "bits": 4 }, "model_type": "yayi" }