Abhaykoul commited on
Commit
22069ae
1 Parent(s): f33284e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "intermediate_size": 5504,
14
  "max_position_embeddings": 4096,
15
  "max_window_layers": 21,
16
- "model_type": "HelpingAI",
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 24,
19
  "num_key_value_heads": 16,
 
13
  "intermediate_size": 5504,
14
  "max_position_embeddings": 4096,
15
  "max_window_layers": 21,
16
+ "model_type": "llama",
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 24,
19
  "num_key_value_heads": 16,