Diogo-V commited on
Commit
324b404
1 Parent(s): c0f773d

Upload learned parameters for llama3 in bit 8

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/llama3/8/fixed/woq/init/lm_head/scale.pt +3 -0
  2. params/llama3/8/fixed/woq/init/lm_head/zp.pt +3 -0
  3. params/llama3/8/fixed/woq/init/model.layers.0.mlp.down_proj/scale.pt +3 -0
  4. params/llama3/8/fixed/woq/init/model.layers.0.mlp.down_proj/zp.pt +3 -0
  5. params/llama3/8/fixed/woq/init/model.layers.0.mlp.gate_proj/scale.pt +3 -0
  6. params/llama3/8/fixed/woq/init/model.layers.0.mlp.gate_proj/zp.pt +3 -0
  7. params/llama3/8/fixed/woq/init/model.layers.0.mlp.up_proj/scale.pt +3 -0
  8. params/llama3/8/fixed/woq/init/model.layers.0.mlp.up_proj/zp.pt +3 -0
  9. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.k_proj/scale.pt +3 -0
  10. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.k_proj/zp.pt +3 -0
  11. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.o_proj/scale.pt +3 -0
  12. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.o_proj/zp.pt +3 -0
  13. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.q_proj/scale.pt +3 -0
  14. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.q_proj/zp.pt +3 -0
  15. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.v_proj/scale.pt +3 -0
  16. params/llama3/8/fixed/woq/init/model.layers.0.self_attn.v_proj/zp.pt +3 -0
  17. params/llama3/8/fixed/woq/init/model.layers.1.mlp.down_proj/scale.pt +3 -0
  18. params/llama3/8/fixed/woq/init/model.layers.1.mlp.down_proj/zp.pt +3 -0
  19. params/llama3/8/fixed/woq/init/model.layers.1.mlp.gate_proj/scale.pt +3 -0
  20. params/llama3/8/fixed/woq/init/model.layers.1.mlp.gate_proj/zp.pt +3 -0
  21. params/llama3/8/fixed/woq/init/model.layers.1.mlp.up_proj/scale.pt +3 -0
  22. params/llama3/8/fixed/woq/init/model.layers.1.mlp.up_proj/zp.pt +3 -0
  23. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.k_proj/scale.pt +3 -0
  24. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.k_proj/zp.pt +3 -0
  25. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.o_proj/scale.pt +3 -0
  26. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.o_proj/zp.pt +3 -0
  27. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.q_proj/scale.pt +3 -0
  28. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.q_proj/zp.pt +3 -0
  29. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.v_proj/scale.pt +3 -0
  30. params/llama3/8/fixed/woq/init/model.layers.1.self_attn.v_proj/zp.pt +3 -0
  31. params/llama3/8/fixed/woq/init/model.layers.10.mlp.down_proj/scale.pt +3 -0
  32. params/llama3/8/fixed/woq/init/model.layers.10.mlp.down_proj/zp.pt +3 -0
  33. params/llama3/8/fixed/woq/init/model.layers.10.mlp.gate_proj/scale.pt +3 -0
  34. params/llama3/8/fixed/woq/init/model.layers.10.mlp.gate_proj/zp.pt +3 -0
  35. params/llama3/8/fixed/woq/init/model.layers.10.mlp.up_proj/scale.pt +3 -0
  36. params/llama3/8/fixed/woq/init/model.layers.10.mlp.up_proj/zp.pt +3 -0
  37. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.k_proj/scale.pt +3 -0
  38. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.k_proj/zp.pt +3 -0
  39. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.o_proj/scale.pt +3 -0
  40. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.o_proj/zp.pt +3 -0
  41. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.q_proj/scale.pt +3 -0
  42. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.q_proj/zp.pt +3 -0
  43. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.v_proj/scale.pt +3 -0
  44. params/llama3/8/fixed/woq/init/model.layers.10.self_attn.v_proj/zp.pt +3 -0
  45. params/llama3/8/fixed/woq/init/model.layers.11.mlp.down_proj/scale.pt +3 -0
  46. params/llama3/8/fixed/woq/init/model.layers.11.mlp.down_proj/zp.pt +3 -0
  47. params/llama3/8/fixed/woq/init/model.layers.11.mlp.gate_proj/scale.pt +3 -0
  48. params/llama3/8/fixed/woq/init/model.layers.11.mlp.gate_proj/zp.pt +3 -0
  49. params/llama3/8/fixed/woq/init/model.layers.11.mlp.up_proj/scale.pt +3 -0
  50. params/llama3/8/fixed/woq/init/model.layers.11.mlp.up_proj/zp.pt +3 -0
params/llama3/8/fixed/woq/init/lm_head/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb9d51b0b94d1935a400d14d8f3cbc22b0aec223a5e505bc6a3dc41348fc1d0e
3
+ size 514130
params/llama3/8/fixed/woq/init/lm_head/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.mlp.down_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe4683945d46ff4ed628c3b74ed720194cad1369a67b3b2b5107defe0ae6c8e8
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.0.mlp.down_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.mlp.gate_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e0db87a63cc83d107d95891da9bca895dc2cbfb5deda09306ccfcf580219096
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.0.mlp.gate_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.mlp.up_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12b98e8d5a4535c783269458b838688172481d46a2d8ad397a717f9b331b72a1
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.0.mlp.up_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.k_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83a06f4d3221982e8c28dd8b988f0d8afb25ab4bd8e34341385a9b97c7a7413d
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.k_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.o_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ecfda1c9ec59d8b8dd7f7efe92b1effe4110e78a6273863776509d2fc184b75
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.o_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.q_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4e98bfee0d092272e23972669062008c398c6bd8b1ad81344ecf525c7dd7cca
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.q_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.v_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c5d614d74cf5c6475890fff7574179d1b169e280b63008fbc97dbde60c92294
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.0.self_attn.v_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.mlp.down_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31562411c5e0471d7caedc03deac3084487a9a57b0fd7dda9603f9c59cbe81c7
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.1.mlp.down_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.mlp.gate_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c916059615b9060b1418b5429cff29a201869a3e67ac582a3e9f6ab782ae0993
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.1.mlp.gate_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.mlp.up_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cffd4e6e6377b14ee80044a3714e14cfaa05e17916cfbc3b52f87ea058bc502
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.1.mlp.up_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.k_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97fbaff4637395887e3905d60489dd61d258816d19979a21f29131df5928a99c
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.k_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.o_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d27db9d27af5733a215394b42ba50f6380649df4c061b409412efd0c71f3303
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.o_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.q_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef5491dcdef8a296b97e63fcc5638d860cb86cc4c4c9665b1b575801bf6ee4c7
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.q_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.v_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44c02d7782cb3feb92852943a37eb1aa0fd09b76aa27cab53b1edd30428f5c2f
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.1.self_attn.v_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.mlp.down_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a48a9480321c3a23644550d866226249781bd81d34672a4c3f6835463feafd5
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.10.mlp.down_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.mlp.gate_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e788c187e29cde7fe2037d445193a0d6cc8fa7878ea2e84951dcff215439244
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.10.mlp.gate_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.mlp.up_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:849750d6f07605af58b003ea1940b16dd61997f7abf195c7d99950b1f16657fd
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.10.mlp.up_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.k_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c36fb2f9af402757d4acd996e17d862bb4e2e7d125fb32ef47df72d6746a86a6
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.k_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.o_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e14879ff5129da1b3e822650e52cfc27c4bb44936fea1d59b702d3b09ea5fa2
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.o_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.q_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:946f1b200a5d6fe90bda40b3c7d7afb4b3ecfc2b0cb202fdf8e1ea743f4aadc9
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.q_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.v_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:030452a0f6d248a3505a8672386d64d5e3b42fbd89a90cb6b9d122a4556f1541
3
+ size 5202
params/llama3/8/fixed/woq/init/model.layers.10.self_attn.v_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.11.mlp.down_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:631713d454154a3948051f6c89fd4c0b089f3b7bbd3e2227d034f518623ba702
3
+ size 17490
params/llama3/8/fixed/woq/init/model.layers.11.mlp.down_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.11.mlp.gate_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:917313853f2bea281fac29dd09303ff5c12799b8ec59d342d683f085bc050ade
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.11.mlp.gate_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091
params/llama3/8/fixed/woq/init/model.layers.11.mlp.up_proj/scale.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8517e9344eab44d3d32c7fb4c1b8fd1114329b20f623b5de4c313d5edc7d324
3
+ size 58450
params/llama3/8/fixed/woq/init/model.layers.11.mlp.up_proj/zp.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74cbd027a8e6a7253e3ee35cfc1ea4eabe7d830e85cc2a975144d18821839da
3
+ size 1091