Diogo-V commited on
Commit
4a68d7f
1 Parent(s): 08d5992

Upload learned parameters for llama3 in bit 8

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/llama3/8/norm_nlr/comb_13/init/lm_head/_0.pt +3 -0
  2. params/llama3/8/norm_nlr/comb_13/init/lm_head/_1.pt +3 -0
  3. params/llama3/8/norm_nlr/comb_13/init/lm_head/_s.pt +3 -0
  4. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_0.pt +3 -0
  5. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_1.pt +3 -0
  6. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  7. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_0.pt +3 -0
  8. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_1.pt +3 -0
  9. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  10. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_0.pt +3 -0
  11. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_1.pt +3 -0
  12. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  13. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_0.pt +3 -0
  14. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_1.pt +3 -0
  15. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  16. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_0.pt +3 -0
  17. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_1.pt +3 -0
  18. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  19. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_0.pt +3 -0
  20. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_1.pt +3 -0
  21. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  22. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_0.pt +3 -0
  23. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_1.pt +3 -0
  24. params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  25. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_0.pt +3 -0
  26. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_1.pt +3 -0
  27. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  28. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_0.pt +3 -0
  29. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_1.pt +3 -0
  30. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  31. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_0.pt +3 -0
  32. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_1.pt +3 -0
  33. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  34. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_0.pt +3 -0
  35. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_1.pt +3 -0
  36. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  37. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_0.pt +3 -0
  38. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_1.pt +3 -0
  39. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  40. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_0.pt +3 -0
  41. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_1.pt +3 -0
  42. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  43. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_0.pt +3 -0
  44. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_1.pt +3 -0
  45. params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  46. params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_0.pt +3 -0
  47. params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_1.pt +3 -0
  48. params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  49. params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.gate_proj/_0.pt +3 -0
  50. params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.gate_proj/_1.pt +3 -0
params/llama3/8/norm_nlr/comb_13/init/lm_head/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66ecbaa863d39e4d47eebf9a4356857a13d22db5d94a81c1d138fdc53328cec6
3
+ size 514115
params/llama3/8/norm_nlr/comb_13/init/lm_head/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:904337ebfa597681bfbbfa8b559ba2095597cbcf4e26241efd09a9b2855889d8
3
+ size 514115
params/llama3/8/norm_nlr/comb_13/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f74050ef0f9ab34776cd1a5936cbaa45fa76f5475aef4e9889c0d8854525bcfa
3
+ size 514115
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:679200fba0cf9ed65b9a32d946a395f67aba822eb86452b2f457a79f1d991e0f
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaa46ca73b880e3d45c366d1d3d9d538b21235c6f696675ace1cdfbcd33e9579
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4010e3f95de9ecdf2aa79f4c65ea39d4fca4f8adc1f32149a9326c411a6e05b
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddbc7f1d7666bc2025d04a53490c2b4b581f67040f8af64b4f8221133e99d15f
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaa46ca73b880e3d45c366d1d3d9d538b21235c6f696675ace1cdfbcd33e9579
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da62d2ce96c91202ec62150858dd830d63db5e1dc4bc1e0791eac195b9b24f5d
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddbc7f1d7666bc2025d04a53490c2b4b581f67040f8af64b4f8221133e99d15f
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b2bf0dd42b7e184b6475179e63ae6256dc3660f9bf7985af0e984775039bad3
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c93ffb9727abae5932e182fa9a8c8ab806fcfc2d7462b17d8b3ad6e620bf958a
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762f4060c38e25da334753dab0ba34d06e53ee87af145c5fb322da3c292a21a1
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa544c5930834d2ff10075d529e43bb8067baf8b9cf40eaa25345bc204d3d291
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a1aa3608aafecde6c30c632180c82006e209c91709343944e7767917c855e2d
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b2bf0dd42b7e184b6475179e63ae6256dc3660f9bf7985af0e984775039bad3
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:643ebae93814c18f0d966ab18cfd780a63ab8f3c9f31b43aafa5a5a0750b7b30
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762f4060c38e25da334753dab0ba34d06e53ee87af145c5fb322da3c292a21a1
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4892301f4116e2c75a386aa8dfd7fb5ec645f1570af4c83ee1d0b5d05d59b76a
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaa46ca73b880e3d45c366d1d3d9d538b21235c6f696675ace1cdfbcd33e9579
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f984cf9e9d5d0486638851d98efb859402eb8db99b123839ff210ea8943377c8
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddbc7f1d7666bc2025d04a53490c2b4b581f67040f8af64b4f8221133e99d15f
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaa46ca73b880e3d45c366d1d3d9d538b21235c6f696675ace1cdfbcd33e9579
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56b4884878730f8ccee9e811c9c1c9a3d9f5a68e7dd58f5eaba15e61d4c07103
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddbc7f1d7666bc2025d04a53490c2b4b581f67040f8af64b4f8221133e99d15f
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b2bf0dd42b7e184b6475179e63ae6256dc3660f9bf7985af0e984775039bad3
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fece8d039f54b27c05b441dcf3fb3bc5e83ebca5123dc91ee67b03bdf6f944f
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762f4060c38e25da334753dab0ba34d06e53ee87af145c5fb322da3c292a21a1
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c5fb8bdf840f756fb3741b520df7d724005c62df3fe91173eefadc6d760809d
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59be746e994e20ae89a23febfe8d0ec2334f77d5c09750051431fd305e11aacc
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b2bf0dd42b7e184b6475179e63ae6256dc3660f9bf7985af0e984775039bad3
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dac76cc0dc8a24d45ed6dd72e7df96595c685babe7a069ea04fd196fcdb9807d
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762f4060c38e25da334753dab0ba34d06e53ee87af145c5fb322da3c292a21a1
3
+ size 5187
params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a1f89734062f512c80f275148f142ae2c35b2cfbba68b998d7a61ce7f806082
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fba2574942416108980a55c48758319a54e6d2ed734a22fcc8959f0b2740cd09
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2456aba13a6dfcd0f3a7003ed980a431d40fe17c33bae558f44badae8de9a58
3
+ size 17475
params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaa46ca73b880e3d45c366d1d3d9d538b21235c6f696675ace1cdfbcd33e9579
3
+ size 58435
params/llama3/8/norm_nlr/comb_13/init/model.layers.10.mlp.gate_proj/_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2553af7a3ab9c143ad189c859ff04620cc7933dd9161ed4c9b959937565f5f43
3
+ size 58435