randomb0tt commited on
Commit
1ef3206
1 Parent(s): 90230a4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -11,7 +11,7 @@
11
  "intermediate_size": 10240,
12
  "layer_norm_eps": 1e-05,
13
  "max_position_embeddings": 2048,
14
- "model_type": "GPT-NeoX",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "rotary_emb_base": 10000,
 
11
  "intermediate_size": 10240,
12
  "layer_norm_eps": 1e-05,
13
  "max_position_embeddings": 2048,
14
+ "model_type": "gpt_neox",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "rotary_emb_base": 10000,