simonJJJ commited on
Commit
7727b21
1 Parent(s): 0dfece6

fix fp16 bug

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -8,7 +8,7 @@
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
- "bf16": true,
12
  "emb_dropout_prob": 0.0,
13
  "fp16": false,
14
  "fp32": false,
 
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
+ "bf16": false,
12
  "emb_dropout_prob": 0.0,
13
  "fp16": false,
14
  "fp32": false,