KaleiNeely commited on
Commit
d9d34f0
1 Parent(s): 14f6f8d

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -3
config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "architectures": [
3
- "RwkvForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
- "AutoModelForCausalLM": "modeling_rwkv5.RwkvForCausalLM"
8
  },
9
  "attention_hidden_size": 2048,
10
  "bos_token_id": 0,
@@ -15,7 +15,6 @@
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
18
- "model_version": "5_2",
19
  "num_hidden_layers": 24,
20
  "rescale_every": 6,
21
  "tie_word_embeddings": false,
 
1
  {
2
  "architectures": [
3
+ "Rwkv5ForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
+ "AutoModelForCausalLM": "modeling_rwkv5.Rwkv5ForCausalLM"
8
  },
9
  "attention_hidden_size": 2048,
10
  "bos_token_id": 0,
 
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
 
18
  "num_hidden_layers": 24,
19
  "rescale_every": 6,
20
  "tie_word_embeddings": false,