albert_xxlarge_tweetqa / config.json
1
{
2
  "_name_or_path": "albert-xxlarge-v2",
3
  "architectures": [
4
    "AlbertForQuestionAnswering"
5
  ],
6
  "attention_probs_dropout_prob": 0,
7
  "bos_token_id": 2,
8
  "classifier_dropout_prob": 0.1,
9
  "down_scale_factor": 1,
10
  "embedding_size": 128,
11
  "eos_token_id": 3,
12
  "gap_size": 0,
13
  "hidden_act": "gelu_new",
14
  "hidden_dropout_prob": 0,
15
  "hidden_size": 4096,
16
  "initializer_range": 0.02,
17
  "inner_group_num": 1,
18
  "intermediate_size": 16384,
19
  "layer_norm_eps": 1e-12,
20
  "layers_to_keep": [],
21
  "max_position_embeddings": 512,
22
  "model_type": "albert",
23
  "net_structure_type": 0,
24
  "num_attention_heads": 64,
25
  "num_hidden_groups": 1,
26
  "num_hidden_layers": 12,
27
  "num_memory_blocks": 0,
28
  "pad_token_id": 0,
29
  "type_vocab_size": 2,
30
  "vocab_size": 30000
31
}
32