breadlicker45 commited on
Commit
89b0e29
1 Parent(s): 3bda613

Upload RwkvForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "context_length": 4096,
8
  "eos_token_id": 0,
9
  "hidden_size": 24,
10
- "intermediate_size": 21000,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "rwkv",
13
  "num_attention_heads": 24,
 
7
  "context_length": 4096,
8
  "eos_token_id": 0,
9
  "hidden_size": 24,
10
+ "intermediate_size": 4200,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "rwkv",
13
  "num_attention_heads": 24,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:207b8e3dc2b56ac0a994e0c1dafd0fea36625fb6193bbf60bd35215e8fde3bee
3
- size 301257363
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f87891d30888d04db6f5322d9b727a71553cdd713b7ab33c3af6cd3b07f5fa2
3
+ size 69014163