system commited on
Commit
1447b71
1 Parent(s): a5cddc8

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +57 -0
config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attention_probs_dropout_prob": 0.1,
3
+ "finetuning_task": "ner",
4
+ "hidden_act": "gelu",
5
+ "hidden_dropout_prob": 0.1,
6
+ "hidden_size": 768,
7
+ "id2label": {
8
+ "0": "EVN",
9
+ "1": "LOC",
10
+ "2": "LOC/LOC",
11
+ "3": "LOC/ORG",
12
+ "4": "LOC/PRS",
13
+ "5": "MSR",
14
+ "6": "O",
15
+ "7": "OBJ",
16
+ "8": "OBJ/ORG",
17
+ "9": "ORG",
18
+ "10": "ORG/PRS",
19
+ "11": "PRS",
20
+ "12": "PRS/WRK",
21
+ "13": "TME",
22
+ "14": "WRK"
23
+ },
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 3072,
26
+ "is_decoder": false,
27
+ "label2id": {
28
+ "EVN": 0,
29
+ "LOC": 1,
30
+ "LOC/LOC": 2,
31
+ "LOC/ORG": 3,
32
+ "LOC/PRS": 4,
33
+ "MSR": 5,
34
+ "O": 6,
35
+ "OBJ": 7,
36
+ "OBJ/ORG": 8,
37
+ "ORG": 9,
38
+ "ORG/PRS": 10,
39
+ "PRS": 11,
40
+ "PRS/WRK": 12,
41
+ "TME": 13,
42
+ "WRK": 14
43
+ },
44
+ "layer_norm_eps": 1e-12,
45
+ "max_position_embeddings": 512,
46
+ "num_attention_heads": 12,
47
+ "num_hidden_layers": 12,
48
+ "num_labels": 15,
49
+ "output_attentions": false,
50
+ "output_hidden_states": false,
51
+ "output_past": true,
52
+ "pruned_heads": {},
53
+ "torchscript": false,
54
+ "type_vocab_size": 2,
55
+ "use_bfloat16": false,
56
+ "vocab_size": 50002
57
+ }