File size: 1,095 Bytes
319aff6
 
 
 
 
 
 
 
 
 
0aeada7
319aff6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
{
    "architectures": [
        "DebertaV2ForCausalLM"
    ],
    "auto_map": {
        "AutoConfig": "configuration_deberta.DebertaV2Config",
        "AutoModel": "modeling_deberta.DebertaV2Model",
        "AutoModelForMaskedLM": "modeling_deberta.DebertaV2ForMaskedLM",
        "AutoModelForCausalLM": "modeling_deberta.DebertaV2ForCausalLM"
    },
    "cls_token_id": 1,
    "sep_token_id": 2,
    "mask_token_id": 128000,
    "attention_probs_dropout_prob": 0.1,
    "hidden_act": "gelu",
    "hidden_dropout_prob": 0.1,
    "hidden_size": 1536,
    "initializer_range": 0.02,
    "intermediate_size": 6144,
    "max_position_embeddings": 512,
    "relative_attention": true,
    "position_buckets": 256,
    "norm_rel_ebd": "layer_norm",
    "share_att_key": true,
    "pos_att_type": "p2c|c2p",
    "layer_norm_eps": 1e-7,
    "conv_kernel_size": 3,
    "conv_act": "gelu",
    "max_relative_positions": -1,
    "position_biased_input": false,
    "num_attention_heads": 24,
    "attention_head_size": 64,
    "num_hidden_layers": 48,
    "type_vocab_size": 0,
    "vocab_size": 128100
}