Lev McKinney commited on
Commit
8851746
1 Parent(s): 5f7d166

removed extra keys from config

Browse files
lens/gpt2-large/config.json CHANGED
@@ -1 +1,7 @@
1
- {"dropout": 0.0, "identity_init": true, "include_input": true, "layer_norm": false, "mlp_hidden_sizes": [], "rank": null, "shared_mlp_hidden_sizes": [], "share_weights": false, "sublayers": false, "num_layers": 36, "vocab_size": 50257, "bias": true, "d_model": 1280}
 
 
 
 
 
 
 
1
+ {
2
+ "include_input": true,
3
+ "num_layers": 36,
4
+ "vocab_size": 50257,
5
+ "bias": true,
6
+ "d_model": 1280
7
+ }
lens/gpt2-xl/config.json CHANGED
@@ -1 +1,7 @@
1
- {"bias": true, "identity_init": true, "include_input": true, "include_final": false, "orthogonal": false, "rank": null, "sublayers": false, "d_model": 1600, "num_layers": 48, "vocab_size": 50257}
 
 
 
 
 
 
 
1
+ {
2
+ "bias": true,
3
+ "include_input": true,
4
+ "d_model": 1600,
5
+ "num_layers": 48,
6
+ "vocab_size": 50257
7
+ }