zyberg2091 commited on
Commit
31530ec
1 Parent(s): 69b3483

Updated config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,22 +1,22 @@
1
  {
2
  "activation": "gelu",
3
  "architectures": [
4
- "DistilBertForMaskedLM"
5
  ],
6
  "attention_dropout": 0.1,
7
  "dim": 768,
8
  "dropout": 0.1,
 
9
  "hidden_dim": 3072,
10
  "initializer_range": 0.02,
11
  "max_position_embeddings": 512,
12
  "model_type": "distilbert",
13
  "n_heads": 12,
14
  "n_layers": 6,
15
- "output_past": true,
16
  "pad_token_id": 0,
17
  "qa_dropout": 0.1,
18
  "seq_classif_dropout": 0.2,
19
  "sinusoidal_pos_embds": false,
20
  "tie_weights_": true,
21
- "vocab_size": 119547
22
  }
 
1
  {
2
  "activation": "gelu",
3
  "architectures": [
4
+ "DistilBertForSequenceClassification"
5
  ],
6
  "attention_dropout": 0.1,
7
  "dim": 768,
8
  "dropout": 0.1,
9
+ "finetuning_task": "Toxicity-classification",
10
  "hidden_dim": 3072,
11
  "initializer_range": 0.02,
12
  "max_position_embeddings": 512,
13
  "model_type": "distilbert",
14
  "n_heads": 12,
15
  "n_layers": 6,
 
16
  "pad_token_id": 0,
17
  "qa_dropout": 0.1,
18
  "seq_classif_dropout": 0.2,
19
  "sinusoidal_pos_embds": false,
20
  "tie_weights_": true,
21
+ "vocab_size": 30522
22
  }