cszhzleo commited on
Commit
105b9c0
1 Parent(s): 6cee542

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +8 -0
  2. checkpoint/config.json +29 -0
  3. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  4. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  5. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  6. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  7. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  8. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  15. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  16. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  17. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  18. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  19. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  25. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  26. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  27. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  28. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  29. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  35. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  36. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  37. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  38. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  39. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  45. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  46. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  47. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
  48. checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
  49. checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/1744545db8406da6398e.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/1bb198a13076d1853641.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/2e107447c6e0a7f19573.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/51f8a515591560780d48.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/a37ede77a746b866b69d.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/c3741198db69eb019273.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/d632e5ec42990d4ee01b.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/d95062416ef36beb6494.neff filter=lfs diff=lfs merge=lfs -text
checkpoint/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 32,
19
+ "pad_token_id": 0,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float16",
26
+ "transformers_version": "4.36.2",
27
+ "use_cache": true,
28
+ "vocab_size": 32000
29
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
3
+ size 26397
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08d0867a28a65061a2270b8f40ace72583d4a094e2de61b7e8b147693e57c6f8
3
+ size 262144789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b4e6149a45208bed36f69ddf9763ea22b9c10b6815b5318cd495a870f29b244
3
+ size 33555324
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05d17b7eee688c445265f516e8042533df607f87fe1aa339d2f24a15a46b3398
3
+ size 33555327
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87ee525c222d633c94d0727f284df98f2ecc5a4fcdcbcf0ccd67b8309dde2d00
3
+ size 33555333
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d920d80bb886d054d3f944205856a8996706649e0591e6bc5e95e0e79d064e46
3
+ size 33555333
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fffefa23acdecd4ca2986a11c9573ceaa37f5877415e214b0c582c1d502eb8d
3
+ size 33555333
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0bbd492a2e36e12403e327a7aa00d4040e3b503efc0c2b40d99c8b50b2f56df
3
+ size 33555333
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f56b9d7d7030efc58119697ef7cd49c1e13e049602f9cfd1f13c95a68c55d88
3
+ size 90178428
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92ceaed40b266bcf0a5a486a3750bbdc8d6b9e0d15a04b42152ec8393a026522
3
+ size 90178422
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:affce5815af7f4e847757f5685238ccf579fc521384471262608952a740e4ecd
3
+ size 90178428
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac8799bf94e69caaae5db3517ef83f8475fcf47e1bd98e06129813a0da5adf6f
3
+ size 9090
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5582754fc4a8ccc52ee6fe181e0bcacb2e1284a389bea2dc3c1e50d09a8ef848
3
+ size 9117
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da62f2fb30c8c08097e67be37e4baf8fdaae4b2818d0a1163a4f25e213f69692
3
+ size 33555333
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c14c43769b3942606106a61d6882a13518bcd96c105e6e6c132aabed1da9fa8a
3
+ size 33555327
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbe084bb2999683d021f5ea156e59e1dc893429d9315f42e5e9f9542db389726
3
+ size 33555333
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95c5ff0f36b460404c6933380cf9ffb3013a2162fd4f37f2757ca1dd6695f4ed
3
+ size 33555333
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7b549f50a25dc026f4ce2ddccd27257a51bd91e003911ca86012082cecf726d
3
+ size 33555333
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff3f64a083d267b9b720a0547e5c019deb0455371543e1f5e80925137d84b98e
3
+ size 90178428
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc35efe89a9eb9e8906f45e278b4bceb0334954564fea01a461d3d927ea7fda
3
+ size 90178422
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0a7ab25172808e3585082a1666ce6260932235226558208f12aad9c068300c9
3
+ size 90178428
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35768b712794627f73abf4c7beb0d9b7fc10fb9675ac3ecb76a9019d1b262e43
3
+ size 9090
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2052931ab0c9bc9a889d48e1ac4c0c936fd934ede1f7fd34a0c2dc9d3c5c196f
3
+ size 9117
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19f3b3d03084a040b6de3ac47614f7222b125c17d584240518e592aa2cdab8fe
3
+ size 33555333
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df69613fe22871d2e66fb5894d7339bcc9679837ed1a8fc659d55df2bea71a10
3
+ size 33555333
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d931798eabba5f4391201204c0ef75bce9ef9c9bf0476e893805992db268c4b
3
+ size 33555327
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:332c057db2d95cc1c3f5468a86270d8460eb5011f5c6930b7cc8ca4eeaedf937
3
+ size 33555333
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7d2f5f32e7d108962561d1056a947dd1a8e5f8564d0fa7efa07bb917fe958b
3
+ size 33555333
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c796e03a2572973bae0cfb751ac1f4e134b2200262663acc41be19894d86a1b3
3
+ size 90178428
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cfb3bed6657c60ca3fe318712ddacb503b3b9979d88d3f86438c5494a73f31f
3
+ size 90178422
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23a6a7feeb744e9bb818cdd7ecc5e3b787199002778bf2749f7e0b8b618acf19
3
+ size 90178428
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e91bf2a5e6bb1ea9c56c06c40e1e5adf29e458d85162f305e6d87169aad77da
3
+ size 9090
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38d31bfde725920c6ba7b952f72d3803fcee3b8d7ec256d7462b4d1c0f215c31
3
+ size 9117
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8073ee1946c35e1a67bed64e38e2eb2e9110d85b635b9ea75fd5f992b869e5e7
3
+ size 33555333
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e5ace7548ba431723655afccfdda220da34ad927fd3a23669285f8a8cc1f5cc
3
+ size 33555333
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef005ddd95d8dd01d64fb8fe4dfc7c5b343439aefd4425623e05657e0930e8db
3
+ size 33555333
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f655f5e03826487f1f7b0254d7b7c1ab9a039c9d777b8d1fbc39893b62d6893
3
+ size 33555327
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc12a081bdc152039d687257ab054e8c92a2b497e31e5d1ea4acf7d0c4a39a08
3
+ size 33555333
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed8297daa92bfdfd993ccdc6ec0a81382b4ca80a6303e7cacaf39963428a6088
3
+ size 90178428
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac384ec8f6311ad346caee5a1b2508394d57a8ae77987f111e71356b2a96e4d0
3
+ size 90178422
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4a0a1b66741f231788a696b5fbfabf61c91ac3241ca725718ac906a34b06d58
3
+ size 90178428
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af1ae0e75143ebf2582cd222aab5765335a0fecaa4aafe5470463da26fac9fbe
3
+ size 9090
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d3e6a57c7a682c45673bfbb5719659acc70d84cfb03c97bc916c277989adba9
3
+ size 9117
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53b8a9168be795c6442b86bb41a4f3ca0b8b456b37b24347ce7a396a58067cfb
3
+ size 33555333
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56b80dc76ea8d674dc8da50dc4f023cd0a4fe6ca5103179acd12694570c5d6f0
3
+ size 33555333
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e417684c6a4072e4c3830752655d0cc24364f3691ad076453bcc79f2a02beec
3
+ size 33555333
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f3844092ee13aa0678346022cec1070e77ea9a7b3737e5ab1dd8752653088c0
3
+ size 33555333
checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:290d028ca0d32afd8d98e5d6a7757bd63df14baab527f9d5ac24a3024508f851
3
+ size 90178422