system HF staff commited on
Commit
84de8d0
1 Parent(s): 7058bef

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +11 -27
config.json CHANGED
@@ -1,16 +1,23 @@
1
  {
 
2
  "architectures": [
3
  "BertForTokenClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
- "do_sample": false,
7
- "finetuning_task": null,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
  "0": "B-WRK",
13
  "1": "I-PER",
 
 
 
 
 
 
 
 
14
  "10": "I-TME",
15
  "11": "B-OBJORG",
16
  "12": "I-MSR",
@@ -21,20 +28,11 @@
21
  "17": "B-ORG",
22
  "18": "B-PER",
23
  "19": "B-ORGPRS",
24
- "2": "I-LOC",
25
  "20": "I-ORG",
26
- "21": "O",
27
- "3": "I-OBJ",
28
- "4": "B-EVN",
29
- "5": "I-EVN",
30
- "6": "B-LOC",
31
- "7": "B-MSR",
32
- "8": "B-OBJ",
33
- "9": "B-PRSWRK"
34
  },
35
  "initializer_range": 0.02,
36
  "intermediate_size": 3072,
37
- "is_decoder": false,
38
  "label2id": {
39
  "B-EVN": 4,
40
  "B-LOC": 6,
@@ -60,26 +58,12 @@
60
  "O": 21
61
  },
62
  "layer_norm_eps": 1e-12,
63
- "length_penalty": 1.0,
64
- "max_length": 20,
65
  "max_position_embeddings": 512,
66
  "model_type": "bert",
67
  "num_attention_heads": 12,
68
- "num_beams": 1,
69
  "num_hidden_layers": 12,
70
- "num_labels": 22,
71
- "num_return_sequences": 1,
72
- "output_attentions": false,
73
- "output_hidden_states": false,
74
  "output_past": true,
75
  "pad_token_id": 0,
76
- "pruned_heads": {},
77
- "repetition_penalty": 1.0,
78
- "temperature": 1.0,
79
- "top_k": 50,
80
- "top_p": 1.0,
81
- "torchscript": false,
82
  "type_vocab_size": 2,
83
- "use_bfloat16": false,
84
  "vocab_size": 50325
85
- }
1
  {
2
+ "_num_labels": 22,
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
 
 
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "id2label": {
11
  "0": "B-WRK",
12
  "1": "I-PER",
13
+ "2": "I-LOC",
14
+ "3": "I-OBJ",
15
+ "4": "B-EVN",
16
+ "5": "I-EVN",
17
+ "6": "B-LOC",
18
+ "7": "B-MSR",
19
+ "8": "B-OBJ",
20
+ "9": "B-PRSWRK",
21
  "10": "I-TME",
22
  "11": "B-OBJORG",
23
  "12": "I-MSR",
28
  "17": "B-ORG",
29
  "18": "B-PER",
30
  "19": "B-ORGPRS",
 
31
  "20": "I-ORG",
32
+ "21": "O"
 
 
 
 
 
 
 
33
  },
34
  "initializer_range": 0.02,
35
  "intermediate_size": 3072,
 
36
  "label2id": {
37
  "B-EVN": 4,
38
  "B-LOC": 6,
58
  "O": 21
59
  },
60
  "layer_norm_eps": 1e-12,
 
 
61
  "max_position_embeddings": 512,
62
  "model_type": "bert",
63
  "num_attention_heads": 12,
 
64
  "num_hidden_layers": 12,
 
 
 
 
65
  "output_past": true,
66
  "pad_token_id": 0,
 
 
 
 
 
 
67
  "type_vocab_size": 2,
 
68
  "vocab_size": 50325
69
+ }