layoutlm-base-uncased / config.json
1
{
2
  "_name_or_path": "microsoft/layoutlm-base-uncased",
3
  "attention_probs_dropout_prob": 0.1,
4
  "hidden_act": "gelu",
5
  "hidden_dropout_prob": 0.1,
6
  "hidden_size": 768,
7
  "initializer_range": 0.02,
8
  "intermediate_size": 3072,
9
  "layer_norm_eps": 1e-12,
10
  "max_2d_position_embeddings": 1024,
11
  "max_position_embeddings": 512,
12
  "model_type": "layoutlm",
13
  "num_attention_heads": 12,
14
  "num_hidden_layers": 12,
15
  "output_past": true,
16
  "pad_token_id": 0,
17
  "position_embedding_type": "absolute",
18
  "transformers_version": "4.4.0.dev0",
19
  "type_vocab_size": 2,
20
  "use_cache": true,
21
  "vocab_size": 30522
22
}
23