TheBloke commited on
Commit
0a2ddfb
1 Parent(s): 466f166

Fix config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -8,9 +8,9 @@
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
- "bf16": true,
12
  "emb_dropout_prob": 0.0,
13
- "fp16": false,
14
  "fp32": false,
15
  "hidden_size": 5120,
16
  "initializer_range": 0.02,
@@ -60,7 +60,7 @@
60
  "use_cache_kernel": false,
61
  "use_cache_quantization": false,
62
  "use_dynamic_ntk": true,
63
- "use_flash_attn": true,
64
  "use_logn_attn": true,
65
  "vocab_size": 152064
66
- }
 
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
+ "bf16": false,
12
  "emb_dropout_prob": 0.0,
13
+ "fp16": true,
14
  "fp32": false,
15
  "hidden_size": 5120,
16
  "initializer_range": 0.02,
 
60
  "use_cache_kernel": false,
61
  "use_cache_quantization": false,
62
  "use_dynamic_ntk": true,
63
+ "use_flash_attn": "auto",
64
  "use_logn_attn": true,
65
  "vocab_size": 152064
66
+ }