system HF staff commited on
Commit
5a9e5f5
1 Parent(s): f5bf8c5

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +14 -40
config.json CHANGED
@@ -1,48 +1,22 @@
1
  {
2
- "_num_labels": 9,
3
  "architectures": [
4
- "BertForTokenClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "directionality": "bidi",
8
- "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.1,
10
- "hidden_size": 2,
11
- "id2label": {
12
- "0": "O",
13
- "1": "B-MISC",
14
- "2": "I-MISC",
15
- "3": "B-PER",
16
- "4": "I-PER",
17
- "5": "B-ORG",
18
- "6": "I-ORG",
19
- "7": "B-LOC",
20
- "8": "I-LOC"
21
- },
22
  "initializer_range": 0.02,
23
- "intermediate_size": 4,
24
- "label2id": {
25
- "B-LOC": 7,
26
- "B-MISC": 1,
27
- "B-ORG": 5,
28
- "B-PER": 3,
29
- "I-LOC": 8,
30
- "I-MISC": 2,
31
- "I-ORG": 6,
32
- "I-PER": 4,
33
- "O": 0
34
- },
35
- "layer_norm_eps": 1e-12,
36
  "max_position_embeddings": 512,
37
- "model_type": "bert",
38
- "num_attention_heads": 2,
39
- "num_hidden_layers": 2,
 
40
  "pad_token_id": 0,
41
- "pooler_fc_size": 768,
42
- "pooler_num_attention_heads": 12,
43
- "pooler_num_fc_layers": 3,
44
- "pooler_size_per_head": 128,
45
- "pooler_type": "first_token_transform",
46
- "type_vocab_size": 2,
47
  "vocab_size": 28996
48
  }
1
  {
2
+ "activation": "gelu",
3
  "architectures": [
4
+ "DistilBertForQuestionAnswering"
5
  ],
6
+ "attention_dropout": 0.1,
7
+ "dim": 2,
8
+ "dropout": 0.1,
9
+ "hidden_dim": 2,
 
 
 
 
 
 
 
 
 
 
 
 
10
  "initializer_range": 0.02,
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  "max_position_embeddings": 512,
12
+ "model_type": "distilbert",
13
+ "n_heads": 2,
14
+ "n_layers": 2,
15
+ "output_past": true,
16
  "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": true,
20
+ "tie_weights_": true,
 
 
21
  "vocab_size": 28996
22
  }