hjb
added tf model 419bf45
1
{
2
  "_name_or_path": "Maltehb/-l-ctra-danish-electra-small-cased-ner-dane",
3
  "architectures": [
4
    "ElectraForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "embedding_size": 128,
8
  "generator_size": "0.25",
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 256,
12
  "id2label": {
13
    "0": "B-PER",
14
    "1": "I-PER",
15
    "2": "B-LOC",
16
    "3": "I-LOC",
17
    "4": "B-ORG",
18
    "5": "I-ORG",
19
    "6": "O",
20
    "7": "[PAD]",
21
    "8": "[CLS]",
22
    "9": "[SEP]"
23
  },
24
  "initializer_range": 0.02,
25
  "intermediate_size": 1024,
26
  "label2id": {
27
    "B-LOC": 2,
28
    "B-ORG": 4,
29
    "B-PER": 0,
30
    "I-LOC": 3,
31
    "I-ORG": 5,
32
    "I-PER": 1,
33
    "O": 6,
34
    "[CLS]": 8,
35
    "[PAD]": 7,
36
    "[SEP]": 9
37
  },
38
  "layer_norm_eps": 1e-12,
39
  "max_position_embeddings": 512,
40
  "model_type": "electra",
41
  "num_attention_heads": 4,
42
  "num_hidden_layers": 12,
43
  "pad_token_id": 0,
44
  "position_embedding_type": "absolute",
45
  "summary_activation": "gelu",
46
  "summary_last_dropout": 0.1,
47
  "summary_type": "first",
48
  "summary_use_proj": true,
49
  "transformers_version": "4.6.1",
50
  "type_vocab_size": 2,
51
  "vocab_size": 32000
52
}
53