aapot commited on
Commit
e26543b
1 Parent(s): 51631bf

Add 520k train step and HF flax model

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. base_nl36_pretrain.gin +1 -1
  2. checkpoint_520000/checkpoint +3 -0
  3. checkpoint_520000/state.param_states.decoder.decoder_norm.scale.v/.zarray +3 -0
  4. checkpoint_520000/state.param_states.decoder.decoder_norm.scale.v/0 +3 -0
  5. checkpoint_520000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  6. checkpoint_520000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  7. checkpoint_520000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  8. checkpoint_520000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/0 +3 -0
  9. checkpoint_520000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  10. checkpoint_520000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/0 +3 -0
  11. checkpoint_520000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  12. checkpoint_520000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  13. checkpoint_520000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  14. checkpoint_520000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/0 +3 -0
  15. checkpoint_520000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  16. checkpoint_520000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/0 +3 -0
  17. checkpoint_520000/state.param_states.decoder.layers_10.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  18. checkpoint_520000/state.param_states.decoder.layers_10.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  19. checkpoint_520000/state.param_states.decoder.layers_10.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  20. checkpoint_520000/state.param_states.decoder.layers_10.pre_mlp_layer_norm.scale.v/0 +3 -0
  21. checkpoint_520000/state.param_states.decoder.layers_10.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  22. checkpoint_520000/state.param_states.decoder.layers_10.pre_self_attention_layer_norm.scale.v/0 +3 -0
  23. checkpoint_520000/state.param_states.decoder.layers_11.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  24. checkpoint_520000/state.param_states.decoder.layers_11.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  25. checkpoint_520000/state.param_states.decoder.layers_11.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  26. checkpoint_520000/state.param_states.decoder.layers_11.pre_mlp_layer_norm.scale.v/0 +3 -0
  27. checkpoint_520000/state.param_states.decoder.layers_11.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  28. checkpoint_520000/state.param_states.decoder.layers_11.pre_self_attention_layer_norm.scale.v/0 +3 -0
  29. checkpoint_520000/state.param_states.decoder.layers_12.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  30. checkpoint_520000/state.param_states.decoder.layers_12.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  31. checkpoint_520000/state.param_states.decoder.layers_12.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  32. checkpoint_520000/state.param_states.decoder.layers_12.pre_mlp_layer_norm.scale.v/0 +3 -0
  33. checkpoint_520000/state.param_states.decoder.layers_12.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  34. checkpoint_520000/state.param_states.decoder.layers_12.pre_self_attention_layer_norm.scale.v/0 +3 -0
  35. checkpoint_520000/state.param_states.decoder.layers_13.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  36. checkpoint_520000/state.param_states.decoder.layers_13.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  37. checkpoint_520000/state.param_states.decoder.layers_13.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  38. checkpoint_520000/state.param_states.decoder.layers_13.pre_mlp_layer_norm.scale.v/0 +3 -0
  39. checkpoint_520000/state.param_states.decoder.layers_13.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  40. checkpoint_520000/state.param_states.decoder.layers_13.pre_self_attention_layer_norm.scale.v/0 +3 -0
  41. checkpoint_520000/state.param_states.decoder.layers_14.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  42. checkpoint_520000/state.param_states.decoder.layers_14.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  43. checkpoint_520000/state.param_states.decoder.layers_14.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  44. checkpoint_520000/state.param_states.decoder.layers_14.pre_mlp_layer_norm.scale.v/0 +3 -0
  45. checkpoint_520000/state.param_states.decoder.layers_14.pre_self_attention_layer_norm.scale.v/.zarray +3 -0
  46. checkpoint_520000/state.param_states.decoder.layers_14.pre_self_attention_layer_norm.scale.v/0 +3 -0
  47. checkpoint_520000/state.param_states.decoder.layers_15.pre_cross_attention_layer_norm.scale.v/.zarray +3 -0
  48. checkpoint_520000/state.param_states.decoder.layers_15.pre_cross_attention_layer_norm.scale.v/0 +3 -0
  49. checkpoint_520000/state.param_states.decoder.layers_15.pre_mlp_layer_norm.scale.v/.zarray +3 -0
  50. checkpoint_520000/state.param_states.decoder.layers_15.pre_mlp_layer_norm.scale.v/0 +3 -0
base_nl36_pretrain.gin CHANGED
@@ -19,6 +19,6 @@ utils.SaveCheckpointConfig:
19
  MIXTURE_OR_TASK_NAME = "pretrain_finnish"
20
  USE_CACHED_TASKS = False
21
  TASK_FEATURE_LENGTHS = {"inputs": 512, "targets": 512}
22
- TRAIN_STEPS = 500000
23
  DROPOUT_RATE = 0.0
24
  BATCH_SIZE = 64
 
19
  MIXTURE_OR_TASK_NAME = "pretrain_finnish"
20
  USE_CACHED_TASKS = False
21
  TASK_FEATURE_LENGTHS = {"inputs": 512, "targets": 512}
22
+ TRAIN_STEPS = 1000000
23
  DROPOUT_RATE = 0.0
24
  BATCH_SIZE = 64
checkpoint_520000/checkpoint ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab821c793570b61ed00fe7146a994c50026f5621bb12c0beab8c7ae9c99ac341
3
+ size 6520637
checkpoint_520000/state.param_states.decoder.decoder_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.decoder_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef52dd79173aa8b96481afd3ddb75c92517f5874d5b0316357ca594e06e0679
3
+ size 2667
checkpoint_520000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_0.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28dbc829cfcbafbcf93c4863b574706ccd3fd9cbbc7848222e8b8e436897dc07
3
+ size 2838
checkpoint_520000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_0.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c1353a42c408644d20a83052f1d0bf305f615df2f2b137e0af4956e75eed2ce
3
+ size 2871
checkpoint_520000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2000fdeb6057ead3c4453a3cf6a158eb7d6c4c1606efd98b55583de4e4dae206
3
+ size 2856
checkpoint_520000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db476c2e3e7cc8e173fd80fea9b7ec7b2df7ee6c858855eec964fb50d8406844
3
+ size 2851
checkpoint_520000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_1.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50990409881510833553c733bb9e051f915d952fc9680500584f5dae3310ce1a
3
+ size 2863
checkpoint_520000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_1.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07db68ef0cb70ff99de775928fc1d65a1def121a95d401fc3362e36210e43a58
3
+ size 2872
checkpoint_520000/state.param_states.decoder.layers_10.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_10.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:801e57944a2eafd280b62fc99f2cc55b50d3b14f7bd80ac83e4ec1762895938d
3
+ size 2851
checkpoint_520000/state.param_states.decoder.layers_10.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_10.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49f2485fbec45785649ba9be30ebbee3de94f2ec11fb583912baf085c828131f
3
+ size 2864
checkpoint_520000/state.param_states.decoder.layers_10.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_10.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:119d2e4daa722e1cfdc832115d695e88edef594db49a6d8128325d7804ff0370
3
+ size 2856
checkpoint_520000/state.param_states.decoder.layers_11.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_11.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdf4d87949b7f53452161d6cd22aeffd074479092007e89ea5764190de6313c3
3
+ size 2821
checkpoint_520000/state.param_states.decoder.layers_11.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_11.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfd9cd63e3625de1e36496e9a1d1e49e67f3eda9c1819fb8b900355bd581c23f
3
+ size 2851
checkpoint_520000/state.param_states.decoder.layers_11.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_11.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b763cb9518f32a335646330731bf51fb1c8b9fb2e1ee27e027febdf6d1c2e054
3
+ size 2886
checkpoint_520000/state.param_states.decoder.layers_12.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_12.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:792d1ec1611eeda71204a1b52367f7ad34760c16116dba1b1b68fe8c21fedd37
3
+ size 2840
checkpoint_520000/state.param_states.decoder.layers_12.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_12.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a884db93338f3258c011cb4143ef03c8f1386b09aa19f2627271f679a1282e4
3
+ size 2865
checkpoint_520000/state.param_states.decoder.layers_12.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_12.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b48d95c712b3d870a6058087ed9ad6b2437eebab4dad8f5a99717b3cebc746
3
+ size 2847
checkpoint_520000/state.param_states.decoder.layers_13.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_13.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0818ad04c31af18a654b4173254c48ca5909e77b35c7eed0a7bc365284d87d50
3
+ size 2862
checkpoint_520000/state.param_states.decoder.layers_13.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_13.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0061ba445280bb97967e972905a978734fa111e78cabf85fbfe28315ef84297
3
+ size 2872
checkpoint_520000/state.param_states.decoder.layers_13.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_13.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c345640010097e0003956b87861460d7050144c4c1a0dad3f575730692c6903
3
+ size 2849
checkpoint_520000/state.param_states.decoder.layers_14.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_14.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eda22c11e9e42fc8a752845e5bc433cb4af78de60a2a123b628ef7da1bf8720
3
+ size 2786
checkpoint_520000/state.param_states.decoder.layers_14.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_14.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d03a21cb7ca16fbe0cbc83f63d60d57103fefea38bc3c3edb039bdb4fb540046
3
+ size 2838
checkpoint_520000/state.param_states.decoder.layers_14.pre_self_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_14.pre_self_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1385b16a7acf90bd8b7fbff4d728d8022a0082a7e3a954e07d048f3c8dc9f9c
3
+ size 2895
checkpoint_520000/state.param_states.decoder.layers_15.pre_cross_attention_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_15.pre_cross_attention_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd4f3fb1eed00e93e67e269e13f9d8a5ee8b8fb35d16722f45276a55ffc11621
3
+ size 2777
checkpoint_520000/state.param_states.decoder.layers_15.pre_mlp_layer_norm.scale.v/.zarray ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:daaea0b8b1a0dcfac0b0d9f58d65a2da57a63bbfb167fa23901e6a9fc9155af8
3
+ size 168
checkpoint_520000/state.param_states.decoder.layers_15.pre_mlp_layer_norm.scale.v/0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60546ac08f486413e463988fcaca37bdf269394abe8992e789c24d7075b0d68c
3
+ size 2826