system HF staff commited on
Commit
9d7b00b
1 Parent(s): c2dad00

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -9
config.json CHANGED
@@ -1,28 +1,22 @@
1
  {
 
2
  "architectures": [
3
  "DistilBertForMaskedLM"
4
  ],
5
- "activation": "gelu",
6
  "attention_dropout": 0.1,
7
  "dim": 768,
8
  "dropout": 0.1,
9
- "finetuning_task": null,
10
  "hidden_dim": 3072,
11
  "initializer_range": 0.02,
12
- "is_decoder": false,
13
  "max_position_embeddings": 512,
 
14
  "n_heads": 12,
15
  "n_layers": 6,
16
- "num_labels": 2,
17
- "output_attentions": false,
18
- "output_hidden_states": false,
19
  "output_past": true,
20
- "pruned_heads": {},
21
  "qa_dropout": 0.1,
22
  "seq_classif_dropout": 0.2,
23
  "sinusoidal_pos_embds": false,
24
  "tie_weights_": true,
25
- "torchscript": false,
26
- "use_bfloat16": false,
27
  "vocab_size": 119547
28
  }
 
1
  {
2
+ "activation": "gelu",
3
  "architectures": [
4
  "DistilBertForMaskedLM"
5
  ],
 
6
  "attention_dropout": 0.1,
7
  "dim": 768,
8
  "dropout": 0.1,
 
9
  "hidden_dim": 3072,
10
  "initializer_range": 0.02,
 
11
  "max_position_embeddings": 512,
12
+ "model_type": "distilbert",
13
  "n_heads": 12,
14
  "n_layers": 6,
 
 
 
15
  "output_past": true,
16
+ "pad_token_id": 0,
17
  "qa_dropout": 0.1,
18
  "seq_classif_dropout": 0.2,
19
  "sinusoidal_pos_embds": false,
20
  "tie_weights_": true,
 
 
21
  "vocab_size": 119547
22
  }