Fix architecture

#7
by lewtun HF staff - opened
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -4,7 +4,7 @@
4
  "attention_softmax_in_fp32": true,
5
  "bias_dropout_fusion": true,
6
  "architectures": [
7
- "BloomModelForCausalLM"
8
  ],
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
@@ -27,4 +27,4 @@
27
  "transformers_version": "4.20.0",
28
  "use_cache": true,
29
  "vocab_size": 250880
30
- }
 
4
  "attention_softmax_in_fp32": true,
5
  "bias_dropout_fusion": true,
6
  "architectures": [
7
+ "BloomForCausalLM"
8
  ],
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
 
27
  "transformers_version": "4.20.0",
28
  "use_cache": true,
29
  "vocab_size": 250880
30
+ }