Upload folder using huggingface_hub

#1
This view is limited to 50 files because it contains too many changes.  See the raw diff here.
Files changed (50) hide show
  1. .gitattributes +8 -0
  2. checkpoint/config.json +29 -0
  3. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  4. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  5. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  6. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  7. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  8. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  15. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  16. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  17. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  18. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  19. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  25. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  26. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  27. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  28. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  29. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  35. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  36. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  37. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  38. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  39. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  45. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  46. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  47. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
  48. checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
  49. checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/043ef3de2405203f7bd3.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/0a7ed4c30b45af937a34.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/808b62be7f2953e68ae6.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/8eb82f172ecf7451bd42.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/9086e4272d7b57eb390e.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/b7d11aba4f1cba2bfcd3.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/de2d44d1c147da9b9efc.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/fc06285e6b30438b6252.neff filter=lfs diff=lfs merge=lfs -text
checkpoint/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NousResearch/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 13824,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 40,
17
+ "num_hidden_layers": 40,
18
+ "num_key_value_heads": 40,
19
+ "pad_token_id": 0,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float16",
26
+ "transformers_version": "4.36.2",
27
+ "use_cache": true,
28
+ "vocab_size": 32000
29
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:accc6d0b7e8ac18844510c952c638af18d0928c1f979af789d331fe83a76804b
3
+ size 33029
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:124df459758e2c509b7996bc062bc1371bd0d3d17cca9372a2d014179a2cf1d5
3
+ size 327680789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:986bc1471da92591e6fa6c371bb82770cf893bd62924b9f46927b50e3c56c8af
3
+ size 52429692
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:441d69dd83af4c82d2a782a199ae3692ff9b1d3453911d5f4d3bd9b6780f8e4a
3
+ size 52429695
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3c663193f8b001345f84aba579f26409043dab13244fca3b8849c9c93ddb6fb
3
+ size 52429701
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fca728db6d113a66b8e1b465a658e740bafe452491cb79d63054645c197672e
3
+ size 52429701
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bd82d3444ba9909d99f14736e9cce584593f4e908ac69bd8d4154c6e322d443
3
+ size 52429701
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2ccb6f06814cd843665730e2731e94e5cbd6932d353ca4c7dcbbcac01f8fdfb
3
+ size 52429701
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c62266905be5ef9567a695cbff8d1c23ad71fc677dd6d113530b05cdef05bac
3
+ size 141558652
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b59c311a1278fe74fa75863060cdaa9295c9584f2318ba1cdc11a15e7090acf4
3
+ size 141558646
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fdbfe957e212725e7c6e497f681b27c378fa22fd688e6a9a43341bb89e90f2d
3
+ size 141558652
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00dd95e9d149840ab472bd1e28300fa4e2bdc1ee992f0f2d632f9eee7ce7530a
3
+ size 11138
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70e85033e8c6de3a1a04bb0e350e4252f600e6064f2e1e77d977c1ff82d50b57
3
+ size 11165
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02b7e239e43d681911f57de8e6b5f9b20f8c4ed5062a428f28ce71be6320b5ac
3
+ size 52429701
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e43420c4a27f71df4346e38b2385c1b39d12982acc65f8559c290f2cae072de
3
+ size 52429695
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:159fac4a74a541e008bd19844145b114cc939b86066dfa20fdcef952ce6db57c
3
+ size 52429701
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e266bd8be1e0e5b033d95b91106eea2e184a4878f5cff86b7ae441f852c0dd0
3
+ size 52429701
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b9fa850b81cd506c786e4be972a1667f342087452e0bbc340944d9dad349d4b
3
+ size 52429701
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd5c8ff106fa396f205ecf65a8357e2508ddd001cc9e9fa944159878f895eccb
3
+ size 141558652
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:707867c3afb47d0ecb5cbb4ff33c3937bbd7a92e63b3b3032e48c7e07ea2f416
3
+ size 141558646
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50ddaab56f0949a69f334e8aa53fa302d01f247a029b2a93bc5920c6269e6468
3
+ size 141558652
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcd6da5c2993a3a85e157f38b649ca97eca90db3c7280678531a62bf64202907
3
+ size 11138
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99e5e66c2c19fdf13efb8d7e4bd057beb663c6565566ebe36682b35b4ef9f0ff
3
+ size 11165
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4efcb1c69a787081c89bb44b7ec44448e24858b57b246fba32ba7a293faa544e
3
+ size 52429701
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5af29933ba17aea7d8dc7fea60fad99ae0048de40b9c3b39a71758654266567c
3
+ size 52429701
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6974218e060172bcf43aac2a6e4eb2b3aeb893fb49a19d49addf7e0621713871
3
+ size 52429695
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c79c756a536be3a63f46b59706bf41e218f8f97f37709ada00489b72fe565099
3
+ size 52429701
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c706695e50461518555a3a50fdbf8cf96f33e42a0db46e8a3cf37ddefe29516
3
+ size 52429701
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:237823b099afd7401a5300f6e726a0afa536dd7606557ff14d23ea0938388a5b
3
+ size 141558652
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f8baeb107e8b362c5ff4312fbfb77098eb7998dfd7a8806561ffc50d02324d
3
+ size 141558646
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef34b3f1aa0fde0fff0c38a5bba2bb168135268b2abeadbbca73ed4ca14f15f9
3
+ size 141558652
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4655dfc82c68649254517d9ad935ca9100cca42fcdbed736f235b20003207b77
3
+ size 11138
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0ba5d3578d2a7e327029e97ae6cabec46658c10852d21542b114ce66245ddf6
3
+ size 11165
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b3090db0b4f087eb37239fe14ffdf3544dcca60276a8a53e746548a9f717369
3
+ size 52429701
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73009b3cad4e4dd4a339004b1f071488ac0b11c0540c15ffe8901ad4172f7fd8
3
+ size 52429701
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a2a70182456d4652d39500725f0f6de411fcdf17a2cb7f8c15cbedfe923b81
3
+ size 52429701
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16860d4d3dd820ef338c66a4c3777f5d06249ad4a4a6ffbf16e44bde34faf2fb
3
+ size 52429695
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1259a469d4fff8ff9e233c17fb34c665fa5a301a44f3e6d3fab78a1119fc2fc0
3
+ size 52429701
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d1ca754db1ab611166e904dc1dabac7e5cfc24b05c15d90fa0a29135144af75
3
+ size 141558652
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ee42be4ce1f4d461d5141291cedad3a99e97b75319c9a7b0ad780d3292b81c0
3
+ size 141558646
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23366fedaceff0d3fe538bc797306f8709e955d20c6a020f6e8af926a8d1913f
3
+ size 141558652
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e65ac39eaa872f81114238626777754800b9affa044c154f7804c68b071747c8
3
+ size 11138
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65d2d6b71b00287983be27742143dc7338b653109d8b05fe4816d5edc2f7fb61
3
+ size 11165
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e46266b3e2c20fb0949cf1dc30f799710493594cdd3c370cb3d41d804d76c70
3
+ size 52429701
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0d36e985af8c4efc1ea1da57485da380526b32984f50bb5c26398b6c3fafced
3
+ size 52429701
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3088355eb4fdac26ae45d3dc03b325dc1afbaf3bb1c45ca91d581ee20e8d52a5
3
+ size 52429701
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abf90dcce13319ac7f54bf839d60d1693dc72d5101f2daca9c781f807bcc7f20
3
+ size 52429701
checkpoint/pytorch_model.bin/p14.model.layers.1.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:addcb7e44384f325d16da4c59f76cf85a78069d90b45d7b58e81f53134c13fcf
3
+ size 141558646