snoop2head commited on
Commit
e2a7a7a
1 Parent(s): fc4fae8
Files changed (1) hide show
  1. config.json +4 -0
config.json CHANGED
@@ -1,5 +1,8 @@
1
  {
2
  "activation_function": "gelu",
 
 
 
3
  "attention_heads": 8,
4
  "decoder_layers": 1,
5
  "dropout": 0.1,
@@ -10,5 +13,6 @@
10
  "model_type": "transformer",
11
  "src_vocab_size": 10000,
12
  "tgt_vocab_size": 10000,
 
13
  "transformers_version": "4.17.0"
14
  }
 
1
  {
2
  "activation_function": "gelu",
3
+ "architectures": [
4
+ "DeepShallowModel"
5
+ ],
6
  "attention_heads": 8,
7
  "decoder_layers": 1,
8
  "dropout": 0.1,
 
13
  "model_type": "transformer",
14
  "src_vocab_size": 10000,
15
  "tgt_vocab_size": 10000,
16
+ "torch_dtype": "float32",
17
  "transformers_version": "4.17.0"
18
  }