ybelkada HF staff commited on
Commit
85e429f
1 Parent(s): fbda88e

Remove .dev from transformers version (#21)

Browse files

- Remove .dev from transformers version (89303001337263eb28ee69dd51ec28fac73ac3ef)
- add architecture (5efe5164c6fab318760c2a98485c73732fdfab78)

Files changed (1) hide show
  1. config.json +4 -1
config.json CHANGED
@@ -1,6 +1,9 @@
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
 
 
 
4
  "attention_softmax_in_fp32": true,
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
@@ -15,7 +18,7 @@
15
  "num_attention_heads": 112,
16
  "pretraining_tp": 4,
17
  "slow_but_exact": false,
18
- "transformers_version": "4.21.0.dev0",
19
  "use_cache": true,
20
  "vocab_size": 250880
21
  }
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
+ "architectures": [
5
+ "BloomModel"
6
+ ],
7
  "attention_softmax_in_fp32": true,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
18
  "num_attention_heads": 112,
19
  "pretraining_tp": 4,
20
  "slow_but_exact": false,
21
+ "transformers_version": "4.21.0",
22
  "use_cache": true,
23
  "vocab_size": 250880
24
  }