{ "_from_model_config": true, "_name_or_path": "openbmb/cpm-bee-2b", "architectures": [ "CpmBeeForCausalLM" ], "auto_map": { "AutoConfig": "configuration_cpmbee.CpmBeeConfig", "AutoModel": "modeling_cpmbee.CpmBeeForCausalLM", "AutoModelForCausalLM": "modeling_cpmbee.CpmBeeForCausalLM" }, "vocab_size": 86583, "hidden_size": 4096, "dim_ff" : 5120, "num_hidden_layers" : 48, "num_attention_heads": 32, "dim_head" : 64, "dropout_p" : 0.0, "position_bias_num_buckets" : 256, "position_bias_num_segment_buckets": 256, "position_bias_max_distance" : 2048, "eps" : 1e-6, "half" : true, "model_type": "cpmbee", "mask_modules": [[false, false], [true, false], [false, false], [true, false], [true, true], [true, false], [true, true], [true, true], [false, false], [false, false], [true, true], [true, false], [true, false], [true, true], [false, false], [true, true], [false, false], [false, true], [true, false], [true, true], [false, false], [false, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [false, false], [true, true], [true, false], [true, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [false, false]] }