Abhaykoul commited on
Commit
6d0e350
1 Parent(s): fcc5d4e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
@@ -12,7 +12,7 @@
12
  "intermediate_size": 11008,
13
  "max_position_embeddings": 4096,
14
  "mlp_bias": false,
15
- "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_experts_per_tok": 2,
18
  "num_hidden_layers": 48,
 
1
  {
2
  "architectures": [
3
+ "MixtralForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
12
  "intermediate_size": 11008,
13
  "max_position_embeddings": 4096,
14
  "mlp_bias": false,
15
+ "model_type": "mixtral",
16
  "num_attention_heads": 32,
17
  "num_experts_per_tok": 2,
18
  "num_hidden_layers": 48,