Matt commited on
Commit
f19979f
1 Parent(s): 7fb349a

Move to in-library checkpoint

Browse files
Files changed (2) hide show
  1. config.json +6 -9
  2. generation_config.json +2 -2
config.json CHANGED
@@ -2,13 +2,9 @@
2
  "alibi": true,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
- "RWForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_RW.RWConfig",
10
- "AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
11
- },
12
  "bias": true,
13
  "bos_token_id": 1,
14
  "eos_token_id": 2,
@@ -16,13 +12,14 @@
16
  "hidden_size": 2048,
17
  "initializer_range": 0.02,
18
  "layer_norm_epsilon": 1e-05,
19
- "model_type": "RefinedWebModel",
20
  "multi_query": false,
21
- "n_head": 32,
22
- "n_layer": 24,
 
23
  "parallel_attn": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.27.4",
26
  "use_cache": true,
27
  "vocab_size": 50304
28
- }
 
2
  "alibi": true,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
+ "FalconForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
 
 
 
 
8
  "bias": true,
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
 
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
  "layer_norm_epsilon": 1e-05,
15
+ "model_type": "falcon",
16
  "multi_query": false,
17
+ "new_decoder_architecture": false,
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 24,
20
  "parallel_attn": false,
21
  "torch_dtype": "bfloat16",
22
  "transformers_version": "4.27.4",
23
  "use_cache": true,
24
  "vocab_size": 50304
25
+ }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.27.4"
6
- }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.31.0.dev0"
6
+ }