lewtun HF staff commited on
Commit
a23b9a7
1 Parent(s): c6989d1

Fix architecture

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "architectures": [
4
- "BloomModel"
5
  ],
6
  "attention_dropout": 0.0,
7
  "attention_softmax_in_fp32": true,
@@ -28,4 +28,4 @@
28
  "unk_token_id": 0,
29
  "use_cache": true,
30
  "vocab_size": 250880
31
- }
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "architectures": [
4
+ "BloomForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "attention_softmax_in_fp32": true,
28
  "unk_token_id": 0,
29
  "use_cache": true,
30
  "vocab_size": 250880
31
+ }