kctung commited on
Commit
72de4d9
1 Parent(s): cf03871

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +10 -0
  2. checkpoint/config.json +28 -0
  3. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  4. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  5. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  6. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  7. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  8. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  15. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  16. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  17. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  18. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  19. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  25. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  26. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  27. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  28. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  29. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  35. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  36. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  37. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  38. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  39. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  45. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  46. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  47. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
  48. checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
  49. checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/14892c3697fe75fcf5ba.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/4324bbbf2efe3b020cb0.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/6713ac5e3508807f155d.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/7930f68549763720b2a6.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/80db0227b4687a604b6d.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/a0500faabeaec34a49a2.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/a9afbefd753d1e7532db.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/b6d8960b990399119584.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/b9fd84fff0c75faa8a25.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/f19b229ae92965a53136.neff filter=lfs diff=lfs merge=lfs -text
checkpoint/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NousResearch/Llama-2-13b-chat-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.35.0",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:accc6d0b7e8ac18844510c952c638af18d0928c1f979af789d331fe83a76804b
3
+ size 33029
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a70f42406e1e8a70eab9801768882204bc61fa42b1c00086a75396864eb2a30d
3
+ size 655360789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e23f82dd586b7f380291efd305c75358c18d3d03276672b2b9c67a9ba0daafe
3
+ size 104858492
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2705124d92bdb57d34ba3b244d7a3922d6f1566718371fdafc49ed2cdae381a3
3
+ size 104858495
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86da01376c45fa705be6d408ea34d1baff0b7d3bcd95d6bd776a33358722b7e4
3
+ size 104858501
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6aced1a333bb27e7eb558bb1025a96b8294157dacdd03601bb5a349b7f7f3f89
3
+ size 104858501
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d9a9bed6e328c60ec42d254113aabd7990a233074d5bbd95d29df2868ed8d02
3
+ size 104858501
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7208aa99c4eac6130c6377462d5476cbf1ee64db224cb8ddd230d51cb5ee31ea
3
+ size 104858501
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a87c5e3db3f12f3a185a8e469f496aefcf3c8361b2bd0c619124b5d7e3b73bde
3
+ size 283116412
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7a98efb1e6288dc51cc22c2a73ce0ff6ab0ae20b940e2a61ead265e11070639
3
+ size 283116406
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46a33463dc5ab3c8e76c1a3f70489f84a3143678afb02e9e56feed5af2b10586
3
+ size 283116412
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:874fe79b1df337b75ec15fe481087968fffe92583ecbd0e4fc94239532777a31
3
+ size 21378
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43dde238c2485384443ce7f85a46011101bf21b5646f3549800d862acdbf7018
3
+ size 21405
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8681c5dcae4e93670fb4ed924849524d55911abc47a64b7323104f5a7450667b
3
+ size 104858501
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ba0a0d31b1818250f02a7ec79116d779cdf9d98952f39888b8ac5f4bafccf6d
3
+ size 104858495
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bbe2e3c1bd5c9a2736f4117986d9f839febc10673964cdaca83cad6948a3ef8
3
+ size 104858501
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5e8163d49489b4ce1ef2e0c4f79b59bdbcc8d0c2d43304ee7f8accf19fadf45
3
+ size 104858501
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4312408a0ef75b560d9c395e700f50aca1e21bbbbed2e91d7d10606052ef810e
3
+ size 104858501
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb68737e2512a2fa6842bddd5a60e803dc6f72ddaa3696ad95f430a5366a40fa
3
+ size 283116412
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29f423a60a5ace1084a09dfb664a5e3dc186a91cce35c9037a4f0959427c37a1
3
+ size 283116406
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:823d34f25312ad5af52ba8bb75c3742c328e23f027ff7a1ac51bcc4f63fe7b82
3
+ size 283116412
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:204ceaa2e372cd4d3f459f62f77af6b9c3f3ab5acc633b6a3e02aeba3da8fd07
3
+ size 21378
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29b1b0f8c51e932a4a99b931ff947b53dcce7dd1219fd815ff55952185c2ca51
3
+ size 21405
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c9d43e983cc973fdd0c75d1199916274aeb85f60bcc9dbe6d2b0b67c8521a52
3
+ size 104858501
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:325d749de784539b03a2050be7d39bbed96f1b7876ed179a21861ac54c1367df
3
+ size 104858501
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae9817015666add006c74981572f75266504e78b10411a40e6497465162c35e5
3
+ size 104858495
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:626058f8e26db4858209d8e04dbb6d0e6f6ac48df444cbab1b6ae172344afc08
3
+ size 104858501
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2862e2d3c120b00f1fd3cfecbc18bc61594d8e6d66177c2cbaf13870f52c99e
3
+ size 104858501
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7aa4582181e02904e885b20a6f17ac1561cc96e614ce497dd469969841ecebb
3
+ size 283116412
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b809624284a3e4b6a50c193e3f8588f0d0938373324419c3657097d0fc43e970
3
+ size 283116406
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:785239d54941559308867bc5dce1f3c8d608e260b7bdaf3faa8855d7bbaf17ce
3
+ size 283116412
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae668da63467e37f52a94c18996901f53efcb7a7d866a7f1fca3321b1aaf966c
3
+ size 21378
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54a58a73a2b16e09e04ab0ead5b212276df6d3769166c44ee3692aca268d94cc
3
+ size 21405
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a47b96d9f145e29123f20503d77ec20eb7504a208aebb63b9d1fbc8b2f323247
3
+ size 104858501
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f40713cbac23ad06ea58a91ef24394aee38b92dff3cda5113f1b7c56e790dc3a
3
+ size 104858501
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02ff6f108905abd62a48cc419750c409bfe450a742080954450563f0c22d981f
3
+ size 104858501
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd14f0f65224420a4bed7fd3c17933ef72f3afeea07a28ad24eb78c9e2356147
3
+ size 104858495
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ee826b85af05086409a846f2de00b92d627ad84c1ace7a43a2b08d22366743e
3
+ size 104858501
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c01688d231ecac41c557ea19e4604a5c6d5891b5dbd2e7dede2f98a835270f4a
3
+ size 283116412
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:014d03ec8be090ee181b1465b4a8b3aa969a4c7f349f3361a9ecd25d9ea2ffff
3
+ size 283116406
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11def60c9a0ac1c33508d9cb3ebeaafa3364f485eb734572e7aa1633cf0f94a7
3
+ size 283116412
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:480d421f83359eb10b1c3a55a2585b35a81d2285075e6a7ddf42ba816126d093
3
+ size 21378
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c72755052ec9e5930343d8516ace43c05573430c559245221c3ac2e90f3818bd
3
+ size 21405
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7adc636044dec99dbdb38fe4f8995d368a94ea2b94d0f7f72e0ad8acb8c31d24
3
+ size 104858501
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5517320ed8bd64faf402848a0e527eb0e64817b8a3d4e056cc4ff8dfa646bb09
3
+ size 104858501
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5764cdc50985d2aa15d27ab5d30b0b2974d7ef5801e03ec8351f6a342e7c16a
3
+ size 104858501
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb7cc83947212024d44b9dd9c17b666bc8206e2a197baab48b79774954ddd6af
3
+ size 104858501
checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c42482573723dc0147bcef88c4f73c795b8d194daea3bfcd8b8919fa8518c4eb
3
+ size 283116406