avnishkr commited on
Commit
e5f9805
1 Parent(s): ead2586

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -4
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
- "RWForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
@@ -21,10 +21,11 @@
21
  "hidden_size": 4544,
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
- "model_type": "RefinedWebModel",
25
  "multi_query": true,
26
- "n_head": 71,
27
- "n_layer": 32,
 
28
  "parallel_attn": true,
29
  "torch_dtype": "bfloat16",
30
  "transformers_version": "4.30.0.dev0",
 
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
+ "FalconForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
 
21
  "hidden_size": 4544,
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
+ "model_type": "falcon",
25
  "multi_query": true,
26
+ "new_decoder_architecture": false,
27
+ "num_attention_heads": 71,
28
+ "num_hidden_layers": 32,
29
  "parallel_attn": true,
30
  "torch_dtype": "bfloat16",
31
  "transformers_version": "4.30.0.dev0",