lewtun HF staff commited on
Commit
e41b7df
1 Parent(s): e183ebb

Fix architecture

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "architectures": [
5
- "BloomModel"
6
  ],
7
  "attention_softmax_in_fp32": true,
8
  "bias_dropout_fusion": true,
 
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "architectures": [
5
+ "BloomModelForCausalLM"
6
  ],
7
  "attention_softmax_in_fp32": true,
8
  "bias_dropout_fusion": true,