system HF staff commited on
Commit
37dd4ec
1 Parent(s): 3de8df0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +47 -33
config.json CHANGED
@@ -1,57 +1,71 @@
1
  {
 
 
 
2
  "attention_probs_dropout_prob": 0.1,
3
- "finetuning_task": "ner",
 
 
 
4
  "hidden_act": "gelu",
5
  "hidden_dropout_prob": 0.1,
6
  "hidden_size": 768,
7
  "id2label": {
8
- "0": "EVN",
9
- "1": "LOC",
10
- "2": "LOC/LOC",
11
- "3": "LOC/ORG",
12
- "4": "LOC/PRS",
13
- "5": "MSR",
14
- "6": "O",
15
- "7": "OBJ",
16
- "8": "OBJ/ORG",
17
- "9": "ORG",
18
- "10": "ORG/PRS",
19
- "11": "PRS",
20
- "12": "PRS/WRK",
21
- "13": "TME",
22
- "14": "WRK"
23
  },
24
  "initializer_range": 0.02,
25
  "intermediate_size": 3072,
26
  "is_decoder": false,
27
  "label2id": {
28
- "EVN": 0,
29
- "LOC": 1,
30
- "LOC/LOC": 2,
31
- "LOC/ORG": 3,
32
- "LOC/PRS": 4,
33
- "MSR": 5,
34
- "O": 6,
35
- "OBJ": 7,
36
- "OBJ/ORG": 8,
37
- "ORG": 9,
38
- "ORG/PRS": 10,
39
- "PRS": 11,
40
- "PRS/WRK": 12,
41
- "TME": 13,
42
- "WRK": 14
43
  },
44
  "layer_norm_eps": 1e-12,
 
 
45
  "max_position_embeddings": 512,
 
46
  "num_attention_heads": 12,
 
47
  "num_hidden_layers": 12,
48
- "num_labels": 15,
 
49
  "output_attentions": false,
50
  "output_hidden_states": false,
51
  "output_past": true,
 
52
  "pruned_heads": {},
 
 
 
 
53
  "torchscript": false,
54
  "type_vocab_size": 2,
55
  "use_bfloat16": false,
56
- "vocab_size": 50002
57
  }
 
1
  {
2
+ "architectures": [
3
+ "BertForTokenClassification"
4
+ ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 0,
7
+ "do_sample": false,
8
+ "eos_token_ids": 0,
9
+ "finetuning_task": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "id2label": {
14
+ "0": "O",
15
+ "1": "OBJ",
16
+ "2": "TME",
17
+ "3": "ORG/PRS",
18
+ "4": "OBJ/ORG",
19
+ "5": "PRS/WRK",
20
+ "6": "WRK",
21
+ "7": "LOC",
22
+ "8": "ORG",
23
+ "9": "PER",
24
+ "10": "LOC/PRS",
25
+ "11": "LOC/ORG",
26
+ "12": "MSR",
27
+ "13": "EVN"
 
28
  },
29
  "initializer_range": 0.02,
30
  "intermediate_size": 3072,
31
  "is_decoder": false,
32
  "label2id": {
33
+ "EVN": 13,
34
+ "LOC": 7,
35
+ "LOC/ORG": 11,
36
+ "LOC/PRS": 10,
37
+ "MSR": 12,
38
+ "O": 0,
39
+ "OBJ": 1,
40
+ "OBJ/ORG": 4,
41
+ "ORG": 8,
42
+ "ORG/PRS": 3,
43
+ "PER": 9,
44
+ "PRS/WRK": 5,
45
+ "TME": 2,
46
+ "WRK": 6
 
47
  },
48
  "layer_norm_eps": 1e-12,
49
+ "length_penalty": 1.0,
50
+ "max_length": 20,
51
  "max_position_embeddings": 512,
52
+ "model_type": "bert",
53
  "num_attention_heads": 12,
54
+ "num_beams": 1,
55
  "num_hidden_layers": 12,
56
+ "num_labels": 14,
57
+ "num_return_sequences": 1,
58
  "output_attentions": false,
59
  "output_hidden_states": false,
60
  "output_past": true,
61
+ "pad_token_id": 0,
62
  "pruned_heads": {},
63
+ "repetition_penalty": 1.0,
64
+ "temperature": 1.0,
65
+ "top_k": 50,
66
+ "top_p": 1.0,
67
  "torchscript": false,
68
  "type_vocab_size": 2,
69
  "use_bfloat16": false,
70
+ "vocab_size": 50325
71
  }