minpeter commited on
Commit
ef1b21d
·
verified ·
1 Parent(s): 1f78c8b

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -15,7 +15,7 @@
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 12,
18
- "num_hidden_layers": 32,
19
  "num_key_value_heads": 4,
20
  "pad_token_id": 0,
21
  "pretraining_tp": 1,
 
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 12,
18
+ "num_hidden_layers": 28,
19
  "num_key_value_heads": 4,
20
  "pad_token_id": 0,
21
  "pretraining_tp": 1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:37ef9aa20f4a3004ce1be8b793145c870b6768b7f68fdbb273d895893c65d69c
3
- size 866093696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:729644dbdb009ccb84f1e805678ed542f008e1cc5df9239a43af9b3c1c8adbbc
3
+ size 770120384