pkyriakis commited on
Commit
9a3b9fd
1 Parent(s): 500ac4b

Add model configuration

Browse files
Files changed (2) hide show
  1. README.md +3 -3
  2. config.json +23 -0
README.md CHANGED
@@ -1,14 +1,14 @@
1
  ---
2
- language:
3
  - en
4
- thumbnail: https://github.com/karanchahal/distiller/blob/master/distiller.jpg
5
  tags:
6
  - question-answering
7
- license: apache-2.0
8
  datasets:
9
  - squad
10
  metrics:
11
  - squad
 
12
  ---
13
 
14
  # DistilBERT with a second step of distillation
 
1
  ---
2
+ language:
3
  - en
4
+ license: apache-2.0
5
  tags:
6
  - question-answering
 
7
  datasets:
8
  - squad
9
  metrics:
10
  - squad
11
+ thumbnail: https://github.com/karanchahal/distiller/blob/master/distiller.jpg
12
  ---
13
 
14
  # DistilBERT with a second step of distillation
config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert-base-uncased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForMaskedLM"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "distilbert",
14
+ "n_heads": 12,
15
+ "n_layers": 6,
16
+ "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": false,
20
+ "tie_weights_": true,
21
+ "transformers_version": "4.38.0.dev0",
22
+ "vocab_size": 30522
23
+ }