worldboss commited on
Commit
3b305ff
1 Parent(s): eae2fdd

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +18 -31
config.json CHANGED
@@ -1,32 +1,19 @@
1
  {
2
- "alibi": false,
3
- "apply_residual_connection_post_layernorm": false,
4
- "architectures": [
5
- "RWForCausalLM"
6
- ],
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_RW.RWConfig",
10
- "AutoModel": "modelling_RW.RWModel",
11
- "AutoModelForSequenceClassification": "modelling_RW.RWForSequenceClassification",
12
- "AutoModelForTokenClassification": "modelling_RW.RWForTokenClassification",
13
- "AutoModelForQuestionAnswering": "modelling_RW.RWForQuestionAnswering",
14
- "AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
15
- },
16
- "bias": false,
17
- "bos_token_id": 11,
18
- "eos_token_id": 11,
19
- "hidden_dropout": 0.0,
20
- "hidden_size": 4544,
21
- "initializer_range": 0.02,
22
- "layer_norm_epsilon": 1e-05,
23
- "model_type": "RefinedWebModel",
24
- "multi_query": true,
25
- "n_head": 71,
26
- "n_layer": 32,
27
- "parallel_attn": true,
28
- "torch_dtype": "bfloat16",
29
- "transformers_version": "4.27.4",
30
- "use_cache": true,
31
- "vocab_size": 65024
32
- }
 
1
  {
2
+ "architectures": ["LLaMAForCausalLM"],
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "hidden_act": "silu",
6
+ "hidden_size": 8192,
7
+ "intermediate_size": 22016,
8
+ "initializer_range": 0.02,
9
+ "max_sequence_length": 2048,
10
+ "model_type": "llama",
11
+ "num_attention_heads": 64,
12
+ "num_hidden_layers": 80,
13
+ "pad_token_id": 0,
14
+ "rms_norm_eps": 1e-05,
15
+ "torch_dtype": "float16",
16
+ "transformers_version": "4.28.0.dev0",
17
+ "use_cache": true,
18
+ "vocab_size": 32000
19
+ }