File size: 1,395 Bytes
80a5566 1624e76 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
{
"_from_model_config": true,
"_name_or_path": "openbmb/cpm-bee-5b",
"architectures": [
"CpmBeeForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_cpmbee.CpmBeeConfig",
"AutoModel": "modeling_cpmbee.CpmBeeForCausalLM",
"AutoModelForCausalLM": "modeling_cpmbee.CpmBeeForCausalLM"
},
"vocab_size": 86583,
"hidden_size": 4096,
"dim_ff" : 10240,
"num_hidden_layers" : 48,
"num_attention_heads": 32,
"dim_head" : 128,
"dropout_p" : 0.0,
"position_bias_num_buckets" : 256,
"position_bias_num_segment_buckets": 256,
"position_bias_max_distance" : 2048,
"eps" : 1e-6,
"half" : true,
"model_type": "cpmbee",
"mask_modules": [[false, false], [true, false], [false, false], [true, false], [true, true], [true, false], [true, true], [true, true], [false, false], [false, false], [true, true], [true, false], [true, false], [true, true], [false, false], [true, true], [false, false], [false, true], [true, false], [true, true], [false, false], [false, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [false, false], [true, true], [true, false], [true, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [false, false]]
} |