peterdavidfagan commited on
Commit
720a9e3
1 Parent(s): 3caf13a

5607941d6369e28c63912c54d72e028198c06d52703bf97db3f87d0698a3b8fe

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +6 -0
  2. default/checkpoint +1 -0
  3. default/metrics._reduction_counter.value/.zarray +1 -0
  4. default/metrics._reduction_counter.value/0 +0 -0
  5. default/metrics.loss.count/.zarray +1 -0
  6. default/metrics.loss.count/0 +0 -0
  7. default/metrics.loss.total/.zarray +1 -0
  8. default/metrics.loss.total/0 +0 -0
  9. default/opt_state.1.0.count/.zarray +1 -0
  10. default/opt_state.1.0.count/0 +0 -0
  11. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray +1 -0
  12. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 +0 -0
  13. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray +1 -0
  14. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 +0 -0
  15. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray +1 -0
  16. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 +0 -0
  17. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray +1 -0
  18. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 +0 -0
  19. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray +1 -0
  20. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 +0 -0
  21. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray +1 -0
  22. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 +3 -0
  23. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray +1 -0
  24. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 +0 -0
  25. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray +1 -0
  26. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 +3 -0
  27. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray +1 -0
  28. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 +0 -0
  29. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray +1 -0
  30. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 +3 -0
  31. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray +1 -0
  32. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 +0 -0
  33. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray +1 -0
  34. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 +3 -0
  35. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray +1 -0
  36. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 +0 -0
  37. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray +1 -0
  38. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 +3 -0
  39. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray +1 -0
  40. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 +0 -0
  41. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray +1 -0
  42. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 +3 -0
  43. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray +1 -0
  44. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 +0 -0
  45. default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/.zarray +1 -0
  46. default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/0 +0 -0
  47. default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/.zarray +1 -0
  48. default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/0.0 +0 -0
  49. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray +1 -0
  50. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 +0 -0
.gitattributes CHANGED
@@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
37
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
38
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
39
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
40
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
41
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
default/checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ ��categorical_train_step��continuous_train_step��diffusion_train_step��metrics��_reduction_counter��value�.PLACEHOLDER://metrics._reduction_counter.value�loss��count� PLACEHOLDER://metrics.loss.count�total� PLACEHOLDER://metrics.loss.total�opt_state�����count�!PLACEHOLDER://opt_state.1.0.count�mu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�CPLACEHOLDER://opt_state.1.0.mu.categorical_action_head.Dense_0.bias�kernel�EPLACEHOLDER://opt_state.1.0.mu.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.mu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.shared.embedding�nu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.nu.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�CPLACEHOLDER://opt_state.1.0.nu.categorical_action_head.Dense_0.bias�kernel�EPLACEHOLDER://opt_state.1.0.nu.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.nu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.shared.embedding���count�!PLACEHOLDER://opt_state.1.2.count�params��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�BPLACEHOLDER://params.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�9PLACEHOLDER://params.categorical_action_head.Dense_0.bias�kernel�;PLACEHOLDER://params.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_0.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_1.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_2.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�BPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.bias�kernel�DPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�2PLACEHOLDER://params.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�oPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�GPLACEHOLDER://params.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�8PLACEHOLDER://params.text_encoder.model.shared.embedding�rngs��diffusion�PLACEHOLDER://rngs.diffusion�dropout�PLACEHOLDER://rngs.dropout�params�PLACEHOLDER://rngs.params�patch_encoding�!PLACEHOLDER://rngs.patch_encoding�step�PLACEHOLDER://step�text_tokenize_fn�
default/metrics._reduction_counter.value/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics._reduction_counter.value/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.count/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.total/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.total/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/opt_state.1.0.count/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 ADDED
Binary file (34.3 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 ADDED
Binary file (137 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ab9c46d92d57770008bde7290df6e51acb48ace3c9ed6cb1899540f85c8a9c0
3
+ size 105014374
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 ADDED
Binary file (22.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b767c24a5059549de2d6828a30b421db96faa2982aa2dc987ec44fd699f83796
3
+ size 105047183
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 ADDED
Binary file (34.3 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c06054557722b5f7d388d473edd7d6cbed0381c50ded743c3c3932902855ea3c
3
+ size 26234609
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 ADDED
Binary file (23.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e2e87fdafd1d0354b9b50b4cfbaab8ce8ae26ce8a7ef3f59483f4e61db9bacf
3
+ size 26233188
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40b99903cd2559482e62f4662ef6302efc1786c6aa031265942345f9932a0ba9
3
+ size 26241526
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d05b7a4ba7d168bb05bf252405ce3bc9ea6ec31ba26aca605a0810b5888abf0
3
+ size 26223309
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1,80,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[1,80,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 ADDED
Binary file (230 kB). View file
 
default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[256],"zarr_format":2}
default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/0 ADDED
Binary file (1.04 kB). View file
 
default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[768,256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[768,256],"zarr_format":2}
default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/0.0 ADDED
Binary file (745 kB). View file
 
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[64],"zarr_format":2}
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 ADDED
Binary file (265 Bytes). View file