cakiki lewtun HF staff commited on
Commit
64a6f17
1 Parent(s): 842d973

Fix architecture (#6)

Browse files

- Fix architecture (2eb9bb8e672a2c9f5839ee4c60b39004b58dcc09)


Co-authored-by: Lewis Tunstall <lewtun@users.noreply.huggingface.co>

Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "architectures": [
5
- "BloomModelForCausalLM"
6
  ],
7
  "attention_softmax_in_fp32": true,
8
  "bias_dropout_fusion": true,
@@ -27,4 +27,4 @@
27
  "transformers_version": "4.20.0",
28
  "use_cache": true,
29
  "vocab_size": 250880
30
- }
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "architectures": [
5
+ "BloomForCausalLM"
6
  ],
7
  "attention_softmax_in_fp32": true,
8
  "bias_dropout_fusion": true,
27
  "transformers_version": "4.20.0",
28
  "use_cache": true,
29
  "vocab_size": 250880
30
+ }