CISCai commited on
Commit
7d0e500
1 Parent(s): 31558a2

Incorrect intermediate_size

Browse files

This fixes the model for `llama.cpp` at least, untested on `transformers`.

Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -9,7 +9,7 @@
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
12
- "intermediate_size": 18944,
13
  "max_position_embeddings": 32768,
14
  "max_window_layers": 28,
15
  "model_type": "qwen2_moe",
@@ -32,4 +32,4 @@
32
  "use_cache": true,
33
  "use_sliding_window": false,
34
  "vocab_size": 151936
35
- }
 
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 20480,
13
  "max_position_embeddings": 32768,
14
  "max_window_layers": 28,
15
  "model_type": "qwen2_moe",
 
32
  "use_cache": true,
33
  "use_sliding_window": false,
34
  "vocab_size": 151936
35
+ }