detok-deberta-xl / config.json
1
{
2
  "_name_or_path": "microsoft/deberta-v2-xlarge",
3
  "architectures": [
4
    "DebertaV2ForTokenClassification"
5
  ],
6
  "attention_head_size": 64,
7
  "attention_probs_dropout_prob": 0.1,
8
  "conv_act": "gelu",
9
  "conv_kernel_size": 3,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 1536,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 6144,
15
  "layer_norm_eps": 1e-07,
16
  "max_position_embeddings": 512,
17
  "max_relative_positions": -1,
18
  "model_type": "deberta-v2",
19
  "norm_rel_ebd": "layer_norm",
20
  "num_attention_heads": 24,
21
  "num_hidden_layers": 24,
22
  "pad_token_id": 0,
23
  "pooler_dropout": 0,
24
  "pooler_hidden_act": "gelu",
25
  "pooler_hidden_size": 1536,
26
  "pos_att_type": [
27
    "p2c",
28
    "c2p"
29
  ],
30
  "position_biased_input": false,
31
  "position_buckets": 256,
32
  "relative_attention": true,
33
  "share_att_key": true,
34
  "transformers_version": "4.5.0",
35
  "type_vocab_size": 0,
36
  "vocab_size": 128100
37
}
38