SE6446 commited on
Commit
f6f76c8
1 Parent(s): 0a79c3e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
@@ -11,7 +11,7 @@
11
  "initializer_range": 0.02,
12
  "intermediate_size": 5632,
13
  "max_position_embeddings": 2048,
14
- "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_experts_per_tok": 2,
17
  "num_hidden_layers": 22,
 
1
  {
2
  "architectures": [
3
+ "MixtralForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 5632,
13
  "max_position_embeddings": 2048,
14
+ "model_type": "mixtral",
15
  "num_attention_heads": 32,
16
  "num_experts_per_tok": 2,
17
  "num_hidden_layers": 22,