KaleiNeely commited on
Commit
1910e08
1 Parent(s): 77919cc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -7
config.json CHANGED
@@ -1,24 +1,24 @@
1
  {
2
  "architectures": [
3
- "Rwkv5ForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
- "AutoModelForCausalLM": "modeling_rwkv5.Rwkv5ForCausalLM"
8
  },
9
- "attention_hidden_size": 2048,
10
  "bos_token_id": 0,
11
  "context_length": 4096,
12
  "eos_token_id": 0,
13
  "head_size": 64,
14
- "hidden_size": 2048,
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
18
- "num_hidden_layers": 24,
19
  "rescale_every": 6,
20
  "tie_word_embeddings": false,
21
- "transformers_version": "4.33.1",
22
  "use_cache": true,
23
  "vocab_size": 65536
24
- }
 
1
  {
2
  "architectures": [
3
+ "RwkvForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
+ "AutoModelForCausalLM": "modeling_rwkv5.RwkvForCausalLM"
8
  },
9
+ "attention_hidden_size": 2560,
10
  "bos_token_id": 0,
11
  "context_length": 4096,
12
  "eos_token_id": 0,
13
  "head_size": 64,
14
+ "hidden_size": 2560,
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
18
+ "num_hidden_layers": 32,
19
  "rescale_every": 6,
20
  "tie_word_embeddings": false,
21
+ "transformers_version": "4.34.0",
22
  "use_cache": true,
23
  "vocab_size": 65536
24
+ }