File size: 1,442 Bytes
2fba039
98441e1
 
 
 
 
 
 
 
 
 
2fba039
657c6f3
2fba039
657c6f3
 
2fba039
 
 
 
 
 
 
98441e1
2fba039
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
{
    "_from_model_config": true,
    "_name_or_path": "openbmb/cpm-bee-2b",
    "architectures": [
      "CpmBeeForCausalLM"
    ],
    "auto_map": {
      "AutoConfig": "configuration_cpmbee.CpmBeeConfig",
      "AutoModel": "modeling_cpmbee.CpmBeeForCausalLM",
      "AutoModelForCausalLM": "modeling_cpmbee.CpmBeeForCausalLM"
    },
    "vocab_size": 86583,
    "hidden_size": 4096,
    "dim_ff" : 5120,
    "num_hidden_layers" : 48,
    "num_attention_heads": 32,
    "dim_head" : 64,
    "dropout_p" : 0.0,
    "position_bias_num_buckets" : 256,
    "position_bias_num_segment_buckets": 256,
    "position_bias_max_distance" : 2048,
    "eps" : 1e-6,
    "half" : true,
    "model_type": "cpmbee",
    "mask_modules": [[false, false], [true, false], [false, false], [true, false], [true, true], [true, false], [true, true], [true, true], [false, false], [false, false], [true, true], [true, false], [true, false], [true, true], [false, false], [true, true], [false, false], [false, true], [true, false], [true, true], [false, false], [false, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [false, false], [true, true], [true, false], [true, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [false, false]]
}