cankarakus commited on
Commit
2b23771
1 Parent(s): 7eab60d

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -16,7 +16,7 @@
16
  "model_type": "llama",
17
  "num_attention_heads": 4,
18
  "num_hidden_layers": 8,
19
- "num_key_value_heads": 32,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": null,
 
16
  "model_type": "llama",
17
  "num_attention_heads": 4,
18
  "num_hidden_layers": 8,
19
+ "num_key_value_heads": 4,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d52362f8245c857f93a46bec0474e4e3d036f01ecd289bfeaf63bab55d69ae13
3
- size 727091328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37cec5de5b63f629606161a2013da4b24db54c16820f01661a5646b5cfcaac35
3
+ size 609650800