aapot commited on
Commit
9671331
1 Parent(s): d290503

Add 160k train step

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. .gitignore +1 -0
  3. checkpoint_160000/checkpoint +3 -0
  4. checkpoint_160000/state.param_states.decoder.decoder_norm.scale.v/.zarray +3 -0
  5. checkpoint_160000/state.param_states.decoder.decoder_norm.scale.v/0 +3 -0
  6. checkpoint_160000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  7. checkpoint_160000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  8. checkpoint_160000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  9. checkpoint_160000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/0 +3 -0
  10. checkpoint_160000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  11. checkpoint_160000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/0 +3 -0
  12. checkpoint_160000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  13. checkpoint_160000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  14. checkpoint_160000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  15. checkpoint_160000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/0 +3 -0
  16. checkpoint_160000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  17. checkpoint_160000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/0 +3 -0
  18. checkpoint_160000/state.param_states.decoder.layers_2.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  19. checkpoint_160000/state.param_states.decoder.layers_2.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  20. checkpoint_160000/state.param_states.decoder.layers_2.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  21. checkpoint_160000/state.param_states.decoder.layers_2.pre_mlp_layer_norm.scale.v/0 +3 -0
  22. checkpoint_160000/state.param_states.decoder.layers_2.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  23. checkpoint_160000/state.param_states.decoder.layers_2.pre_self_attention_layer_norm.scale.v/0 +3 -0
  24. checkpoint_160000/state.param_states.decoder.layers_3.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  25. checkpoint_160000/state.param_states.decoder.layers_3.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  26. checkpoint_160000/state.param_states.decoder.layers_3.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  27. checkpoint_160000/state.param_states.decoder.layers_3.pre_mlp_layer_norm.scale.v/0 +3 -0
  28. checkpoint_160000/state.param_states.decoder.layers_3.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  29. checkpoint_160000/state.param_states.decoder.layers_3.pre_self_attention_layer_norm.scale.v/0 +3 -0
  30. checkpoint_160000/state.param_states.decoder.layers_4.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  31. checkpoint_160000/state.param_states.decoder.layers_4.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  32. checkpoint_160000/state.param_states.decoder.layers_4.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  33. checkpoint_160000/state.param_states.decoder.layers_4.pre_mlp_layer_norm.scale.v/0 +3 -0
  34. checkpoint_160000/state.param_states.decoder.layers_4.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  35. checkpoint_160000/state.param_states.decoder.layers_4.pre_self_attention_layer_norm.scale.v/0 +3 -0
  36. checkpoint_160000/state.param_states.decoder.layers_5.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  37. checkpoint_160000/state.param_states.decoder.layers_5.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  38. checkpoint_160000/state.param_states.decoder.layers_5.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  39. checkpoint_160000/state.param_states.decoder.layers_5.pre_mlp_layer_norm.scale.v/0 +3 -0
  40. checkpoint_160000/state.param_states.decoder.layers_5.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  41. checkpoint_160000/state.param_states.decoder.layers_5.pre_self_attention_layer_norm.scale.v/0 +3 -0
  42. checkpoint_160000/state.param_states.decoder.layers_6.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  43. checkpoint_160000/state.param_states.decoder.layers_6.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  44. checkpoint_160000/state.param_states.decoder.layers_6.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  45. checkpoint_160000/state.param_states.decoder.layers_6.pre_mlp_layer_norm.scale.v/0 +3 -0
  46. checkpoint_160000/state.param_states.decoder.layers_6.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  47. checkpoint_160000/state.param_states.decoder.layers_6.pre_self_attention_layer_norm.scale.v/0 +3 -0
  48. checkpoint_160000/state.param_states.decoder.layers_7.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  49. checkpoint_160000/state.param_states.decoder.layers_7.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  50. checkpoint_160000/state.param_states.decoder.layers_7.pre_mlp_layer_norm.scale.v/.zarray +3 -0
.gitattributes CHANGED
@@ -26,3 +26,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
26
  *.zip filter=lfs diff=lfs merge=lfs -text
27
  *.zstandard filter=lfs diff=lfs merge=lfs -text
28
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
26
  *.zip filter=lfs diff=lfs merge=lfs -text
27
  *.zstandard filter=lfs diff=lfs merge=lfs -text
28
  *tfevents* filter=lfs diff=lfs merge=lfs -text
29
+ checkpoint*/** filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ __pycache__/
checkpoint_160000/checkpoint ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4a791e03860d3a1d2425a53817639e59747ebe25982eb6368401979c6011b8d
3
+ size 1037969
checkpoint_160000/state.param_states.decoder.decoder_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.decoder_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ea476e9b5c7cacc82fd758e8ef1d445e3b25c84079508721cb24fe09d6db733
3
+ size 1393
checkpoint_160000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adb236417f09f0f7070e8eeaffc347cd057a47b86e6281eb47a292944dbb193b
3
+ size 1421
checkpoint_160000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36c2fd703c20dca06732087efa547399b47134f6b43abd5e0a3942c1294a5170
3
+ size 1385
checkpoint_160000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed7c0fe38f4b37af74c06d2c4b97c02f2de0ec56cec4de93816a1f8932fbc5c6
3
+ size 1433
checkpoint_160000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a852f6534c057b3f5e5389772d52eaafbb36cdb73295a06062b6509b5ef168bc
3
+ size 1407
checkpoint_160000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ca8a8ba109b4125a7bf7eb1f4d82fdbec157a5d41bc126db15a25e71d6e87ad
3
+ size 1387
checkpoint_160000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1adf9651488ec4f9a9a4c044377892d8bc1d62994eb966547f4ffdb8fe336d8
3
+ size 1457
checkpoint_160000/state.param_states.decoder.layers_2.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_2.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72bc4944f88adba07d7f9270c46e799e30e80f681a0721a6f4ea92a91bd19106
3
+ size 1427
checkpoint_160000/state.param_states.decoder.layers_2.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_2.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72feea1bdb5dd62bc94d26561c5f9ec58147809ac91f3d97e16cefa8fa5982fa
3
+ size 1393
checkpoint_160000/state.param_states.decoder.layers_2.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_2.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32637fbd0ecad37e7c63a1698de2f6c72e9fdb993d2c40a4fa62d5879aeb3eca
3
+ size 1434
checkpoint_160000/state.param_states.decoder.layers_3.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_3.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7a53119fbfec57495fc1ec9031e8f7d063b0ed34ff9880d7dd1ca845337e432
3
+ size 1414
checkpoint_160000/state.param_states.decoder.layers_3.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_3.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b44e47d7a4064de5d57280fa334133907678f105b7ec0131f2e5c7a1855b14b8
3
+ size 1408
checkpoint_160000/state.param_states.decoder.layers_3.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_3.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fbabd726edbf9989660456f258b4d9acf2876dd7389538c9ac407f698548878
3
+ size 1413
checkpoint_160000/state.param_states.decoder.layers_4.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_4.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ccb4387f092cff92f17cb51d9287ea7b1701b55acc3b5f01524b375525c5c6e
3
+ size 1415
checkpoint_160000/state.param_states.decoder.layers_4.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_4.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a376c419ff4a5e274fa59b71482183a4b19adf7ddc9a0b7c17b942e3c93eb6ef
3
+ size 1379
checkpoint_160000/state.param_states.decoder.layers_4.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_4.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb34cde5488558f39dbd83320591a41800c4e8f3fe6e01fe4cbd1860f58d474a
3
+ size 1395
checkpoint_160000/state.param_states.decoder.layers_5.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_5.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8426ba0b5dce5f57255981bd70a4c452da5ce70e7888cee4caad0385380f1ad9
3
+ size 1391
checkpoint_160000/state.param_states.decoder.layers_5.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_5.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d7df647430be4d40108930792ab846ce9af4d236f69c7a010a275671d91d899
3
+ size 1408
checkpoint_160000/state.param_states.decoder.layers_5.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_5.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29d40f4ac5b0ad98772b7d06ba55da60aae7d54da5243c4736664f6d66f36651
3
+ size 1390
checkpoint_160000/state.param_states.decoder.layers_6.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_6.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edbcafd93f6dd7fcdce3c78e6f2a0e290b1e52af33383c54730966a2239b6088
3
+ size 1426
checkpoint_160000/state.param_states.decoder.layers_6.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_6.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d8e699ab47077e68b59c8711a953000aba4803f37b81f121726d93226ad38f5
3
+ size 1421
checkpoint_160000/state.param_states.decoder.layers_6.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_6.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dde2ca096ed07fe1d5c9d72a4c248e623d2b19295bc94d7e7bac24bde1e1d09
3
+ size 1429
checkpoint_160000/state.param_states.decoder.layers_7.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168
checkpoint_160000/state.param_states.decoder.layers_7.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8c039764714f7cce9e323ded2b76740aab4b74eac9aa126ea4641c5db3f1888
3
+ size 1406
checkpoint_160000/state.param_states.decoder.layers_7.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0abc950cac11ef626f934932fe9764223fb94e32a0a61ee59b0e5bb8cb2426eb
3
+ size 168