peterdavidfagan commited on
Commit
a438342
1 Parent(s): 79c40a7

decdc90770b1adc1bc439a7b74c63a91fdf79cadbb3091abd8237c9b48f30053

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +6 -0
  2. default/checkpoint +1 -0
  3. default/metrics._reduction_counter.value/.zarray +1 -0
  4. default/metrics._reduction_counter.value/0 +0 -0
  5. default/metrics.loss.count/.zarray +1 -0
  6. default/metrics.loss.count/0 +0 -0
  7. default/metrics.loss.total/.zarray +1 -0
  8. default/metrics.loss.total/0 +0 -0
  9. default/opt_state.1.0.count/.zarray +1 -0
  10. default/opt_state.1.0.count/0 +0 -0
  11. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray +1 -0
  12. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 +0 -0
  13. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray +1 -0
  14. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 +0 -0
  15. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray +1 -0
  16. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 +0 -0
  17. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray +1 -0
  18. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 +0 -0
  19. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray +1 -0
  20. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 +0 -0
  21. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray +1 -0
  22. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 +3 -0
  23. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray +1 -0
  24. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 +0 -0
  25. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray +1 -0
  26. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 +3 -0
  27. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray +1 -0
  28. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 +0 -0
  29. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray +1 -0
  30. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 +3 -0
  31. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray +1 -0
  32. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 +0 -0
  33. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray +1 -0
  34. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 +3 -0
  35. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray +1 -0
  36. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 +0 -0
  37. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray +1 -0
  38. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 +3 -0
  39. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray +1 -0
  40. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 +0 -0
  41. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray +1 -0
  42. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 +3 -0
  43. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray +1 -0
  44. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 +0 -0
  45. default/opt_state.1.0.mu.continuous_action_head.Dense_0.bias/.zarray +1 -0
  46. default/opt_state.1.0.mu.continuous_action_head.Dense_0.bias/0 +0 -0
  47. default/opt_state.1.0.mu.continuous_action_head.Dense_0.kernel/.zarray +1 -0
  48. default/opt_state.1.0.mu.continuous_action_head.Dense_0.kernel/0.0 +0 -0
  49. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray +1 -0
  50. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 +0 -0
.gitattributes CHANGED
@@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
37
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
38
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
39
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
40
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
41
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
default/checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ ��categorical_train_step��continuous_train_step��diffusion_train_step��metrics��_reduction_counter��value�.PLACEHOLDER://metrics._reduction_counter.value�loss��count� PLACEHOLDER://metrics.loss.count�total� PLACEHOLDER://metrics.loss.total�opt_state�����count�!PLACEHOLDER://opt_state.1.0.count�mu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding�continuous_action_head��Dense_0��bias�BPLACEHOLDER://opt_state.1.0.mu.continuous_action_head.Dense_0.bias�kernel�DPLACEHOLDER://opt_state.1.0.mu.continuous_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.mu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.shared.embedding�nu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.nu.attention_blocks.posembed_input.pos_embedding�continuous_action_head��Dense_0��bias�BPLACEHOLDER://opt_state.1.0.nu.continuous_action_head.Dense_0.bias�kernel�DPLACEHOLDER://opt_state.1.0.nu.continuous_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.nu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.shared.embedding���count�!PLACEHOLDER://opt_state.1.2.count�params��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�BPLACEHOLDER://params.attention_blocks.posembed_input.pos_embedding�continuous_action_head��Dense_0��bias�8PLACEHOLDER://params.continuous_action_head.Dense_0.bias�kernel�:PLACEHOLDER://params.continuous_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_0.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_1.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_2.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�BPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.bias�kernel�DPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�2PLACEHOLDER://params.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�oPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�GPLACEHOLDER://params.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�8PLACEHOLDER://params.text_encoder.model.shared.embedding�rngs��diffusion�PLACEHOLDER://rngs.diffusion�dropout�PLACEHOLDER://rngs.dropout�params�PLACEHOLDER://rngs.params�patch_encoding�!PLACEHOLDER://rngs.patch_encoding�step�PLACEHOLDER://step�text_tokenize_fn�
default/metrics._reduction_counter.value/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics._reduction_counter.value/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.count/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.total/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.total/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/opt_state.1.0.count/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 ADDED
Binary file (34 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 ADDED
Binary file (137 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:550762c8e85c42b7c34d9f9e5758298f613be016f86825fba6473fcf125aca2b
3
+ size 105071752
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 ADDED
Binary file (21.7 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:251df3376e2557d4ae235d17460ff648770ea5d96cb5cf04b7f4d47b2d9f4f29
3
+ size 104935763
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 ADDED
Binary file (34.4 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0433c986195b192bb1c90a1d3a1135596d79d2a8fb7610bb97bebffed16bbf10
3
+ size 26210485
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 ADDED
Binary file (21.9 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3eb2f7bd158dd7cc2837d32a8213ea43193a1aab005cad769803ab12b9a6f85
3
+ size 26202353
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35e6c5cd62b71f2b764855e1682f1dc56eedcbb5a31818f711945df34800e746
3
+ size 26216435
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2554ac75f75e2df2e23734418b61d2a951494ccf6785b06c1654c35364075944
3
+ size 26184149
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1,74,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[1,74,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 ADDED
Binary file (211 kB). View file
 
default/opt_state.1.0.mu.continuous_action_head.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[7],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[7],"zarr_format":2}
default/opt_state.1.0.mu.continuous_action_head.Dense_0.bias/0 ADDED
Binary file (37 Bytes). View file
 
default/opt_state.1.0.mu.continuous_action_head.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[768,7],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[768,7],"zarr_format":2}
default/opt_state.1.0.mu.continuous_action_head.Dense_0.kernel/0.0 ADDED
Binary file (19.8 kB). View file
 
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[64],"zarr_format":2}
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 ADDED
Binary file (265 Bytes). View file