peterdavidfagan commited on
Commit
ea2ef9c
1 Parent(s): 3caf13a

Upload folder using huggingface_hub (#1)

Browse files

- 5607941d6369e28c63912c54d72e028198c06d52703bf97db3f87d0698a3b8fe (720a9e3304c17a0df6029e3af1fa6a02f1b3942b)
- 20ea2c70bbca6035da2fb0edbdfa8ae7de517e94c415bd2d4a975bae89706098 (74bcd2230c3638a759c2fcf0348be3bae4314758)
- 7fae11f440b7adff3e35de256a0b723b2d83bd120f11b58068f55cd05e3d8278 (7e01556a46d3ef27eb1aeb049c6f3ad83911780a)
- b4b5c69298ff0fe3f8bf5f4b5968d2ac1e046f2afb3232daa96777ccf4a15488 (9208dbe76b89960120bb576734d3cec4704f2743)
- caa3832e76ecb2e3120e3271f7b8214847858bf8a14c179b847fff5a97c42457 (17e4d014fda69d8cc96accbeacb25c5d16d1370d)
- ca925b788c4b548c343a28b197262d95894fd64fac276222fa81acfee8a151c7 (c2da55612bbb6ad031ddf2708be37f98bac36d58)
- 1e942d424660ce20045dc26ae23bb14d632a2f594b4fe30f01da4e0a32a8ecb6 (3d4a6d9463ab9d0cbfb5b1ea8d437824a2f200b7)
- 884b5e3c328bacce2a39b7142171b8806676ffd6842d224c1d3419cfad86d6be (4a140104a84ea3129c5b62a212bd883690829bee)
- 799766e61f3559a807431c5332acb9eeeacd8989ed48fb797f55d4549ee8b97a (0edce8f34b01dafeca348a8ce4c72218e488741d)
- f48111962c74a3518a95c2035a9fb6d0d7dc3a9f805c0ccf2ce5c5d5b66ffd7b (16e67333d40f80f410bbf9519511429fb520876a)
- e2446e8ccb2230cfd6b4924f8517cd21cb960e8f1260394a419517033c01b18e (c4ada2dd5269d48c76115406f6e4e34d27dda114)
- 712ae42a8b8f8fdc941bd38acad79331f71e9ba08164be78d9086f2853f72fb8 (9addaaa2690fd90501d410c151ea75ae51774bd6)
- 3d9d6a6536706862ee4243953cbcf7e0d7244d4a2fc97cea1db2d47348ce3b30 (b3dcb356795c9441b56fd5b459bc3321280a1121)
- 51b834a5aa4bc79eef15699b2d6c3091dc561c4bf06b8cd06a44ef9dd7afbec6 (234a67bc6a51251253d68f67ca622712a0dc2785)
- a412e7eb9ddb5eeed5906c58c2098ada10e6537c9b64dfa2a31d715b87a1fb3c (ce4973c899dc86cd2056cc46e6d8dc9f36c91445)
- a004576c3755e5bcbd5475bd813538134f2bca1242aaa99f6bcbf332abb0645d (b7a13d1c03edbbd097e4168e9a09fea13cd6db22)
- 7ac9dcfee8ba177a2d24a45358feb73bdd6c73593b5e9b20ef7d9078a79c07f7 (88d538838704d47d4accd948ffc6ed5ee8b8b520)

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +94 -0
  2. default/checkpoint +1 -0
  3. default/metrics._reduction_counter.value/.zarray +1 -0
  4. default/metrics._reduction_counter.value/0 +0 -0
  5. default/metrics.loss.count/.zarray +1 -0
  6. default/metrics.loss.count/0 +0 -0
  7. default/metrics.loss.total/.zarray +1 -0
  8. default/metrics.loss.total/0 +0 -0
  9. default/opt_state.1.0.count/.zarray +1 -0
  10. default/opt_state.1.0.count/0 +0 -0
  11. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray +1 -0
  12. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 +0 -0
  13. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray +1 -0
  14. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 +0 -0
  15. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray +1 -0
  16. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 +0 -0
  17. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray +1 -0
  18. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 +0 -0
  19. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray +1 -0
  20. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 +0 -0
  21. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray +1 -0
  22. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 +3 -0
  23. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray +1 -0
  24. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 +0 -0
  25. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray +1 -0
  26. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 +3 -0
  27. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray +1 -0
  28. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 +0 -0
  29. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray +1 -0
  30. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 +3 -0
  31. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray +1 -0
  32. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 +0 -0
  33. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray +1 -0
  34. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 +3 -0
  35. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray +1 -0
  36. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 +0 -0
  37. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray +1 -0
  38. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 +3 -0
  39. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray +1 -0
  40. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 +0 -0
  41. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray +1 -0
  42. default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 +3 -0
  43. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray +1 -0
  44. default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 +0 -0
  45. default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/.zarray +1 -0
  46. default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/0 +0 -0
  47. default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/.zarray +1 -0
  48. default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/0.0 +0 -0
  49. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray +1 -0
  50. default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 +0 -0
.gitattributes CHANGED
@@ -33,3 +33,97 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
37
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
38
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
39
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
40
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
41
+ default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
42
+ default/opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
43
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
44
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
45
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
46
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
47
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
48
+ default/opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
49
+ default/opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
50
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
51
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 filter=lfs diff=lfs merge=lfs -text
52
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
53
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
54
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
55
+ default/params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 filter=lfs diff=lfs merge=lfs -text
56
+ default/params.image_encoder.embedding_function.Dense_0.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
57
+ default/params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
58
+ default/params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
59
+ default/params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
60
+ default/params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
61
+ default/params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
62
+ default/params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
63
+ default/params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
64
+ default/params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
65
+ default/params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
66
+ default/params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
67
+ default/params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
68
+ default/params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
69
+ default/params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
70
+ default/params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
71
+ default/params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
72
+ default/params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
73
+ default/params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
74
+ default/params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
75
+ default/params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
76
+ default/params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
77
+ default/params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
78
+ default/params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
79
+ default/params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
80
+ default/params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
81
+ default/params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
82
+ default/params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
83
+ default/params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
84
+ default/params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
85
+ default/params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
86
+ default/params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
87
+ default/params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
88
+ default/params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
89
+ default/params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
90
+ default/params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
91
+ default/params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
92
+ default/params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
93
+ default/params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
94
+ default/params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
95
+ default/params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
96
+ default/params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
97
+ default/params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
98
+ default/params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
99
+ default/params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
100
+ default/params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
101
+ default/params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
102
+ default/params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
103
+ default/params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
104
+ default/params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
105
+ default/params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
106
+ default/params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
107
+ default/params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
108
+ default/params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
109
+ default/params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
110
+ default/params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
111
+ default/params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
112
+ default/params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
113
+ default/params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
114
+ default/params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
115
+ default/params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
116
+ default/params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
117
+ default/params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
118
+ default/params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
119
+ default/params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
120
+ default/params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
121
+ default/params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
122
+ default/params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
123
+ default/params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
124
+ default/params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
125
+ default/params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
126
+ default/params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
127
+ default/params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
128
+ default/params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel/0.0 filter=lfs diff=lfs merge=lfs -text
129
+ default/params.text_encoder.model.shared.embedding/0.0 filter=lfs diff=lfs merge=lfs -text
default/checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ ��categorical_train_step��continuous_train_step��diffusion_train_step��metrics��_reduction_counter��value�.PLACEHOLDER://metrics._reduction_counter.value�loss��count� PLACEHOLDER://metrics.loss.count�total� PLACEHOLDER://metrics.loss.total�opt_state�����count�!PLACEHOLDER://opt_state.1.0.count�mu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�CPLACEHOLDER://opt_state.1.0.mu.categorical_action_head.Dense_0.bias�kernel�EPLACEHOLDER://opt_state.1.0.mu.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.mu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.mu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.mu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.mu.text_encoder.model.shared.embedding�nu��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�UPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�VPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�\PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�^PLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�lPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�nPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�pPLACEHOLDER://opt_state.1.0.nu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�LPLACEHOLDER://opt_state.1.0.nu.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�CPLACEHOLDER://opt_state.1.0.nu.categorical_action_head.Dense_0.bias�kernel�EPLACEHOLDER://opt_state.1.0.nu.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�KPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.bias�kernel�MPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�LPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.bias�kernel�NPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�PPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.bias�scale�QPLACEHOLDER://opt_state.1.0.nu.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�SPLACEHOLDER://opt_state.1.0.nu.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�<PLACEHOLDER://opt_state.1.0.nu.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�yPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�aPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�cPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�\PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�`PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�bPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�[PLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�QPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�BPLACEHOLDER://opt_state.1.0.nu.text_encoder.model.shared.embedding���count�!PLACEHOLDER://opt_state.1.2.count�params��attention_blocks��ScanEncoder1DBlock_0��LayerNorm_0��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale�LayerNorm_1��bias�KPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias�scale�LPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale�MLPBlock_0��Dense_0��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel�Dense_1��bias�RPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias�kernel�TPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel�MultiHeadDotProductAttention_0��key��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel�out��bias�bPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias�kernel�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel�query��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel�value��bias�dPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias�kernel�fPLACEHOLDER://params.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel�posembed_input��pos_embedding�BPLACEHOLDER://params.attention_blocks.posembed_input.pos_embedding�categorical_action_head��Dense_0��bias�9PLACEHOLDER://params.categorical_action_head.Dense_0.bias�kernel�;PLACEHOLDER://params.categorical_action_head.Dense_0.kernel�image_encoder��embedding_function��Conv_0��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_0.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_0.kernel�Conv_1��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_1.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_1.kernel�Conv_2��bias�APLACEHOLDER://params.image_encoder.embedding_function.Conv_2.bias�kernel�CPLACEHOLDER://params.image_encoder.embedding_function.Conv_2.kernel�Dense_0��bias�BPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.bias�kernel�DPLACEHOLDER://params.image_encoder.embedding_function.Dense_0.kernel�GroupNorm_0��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_0.scale�GroupNorm_1��bias�FPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.bias�scale�GPLACEHOLDER://params.image_encoder.embedding_function.GroupNorm_1.scale�image_col_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_col_position_embedding.embedding�image_row_position_embedding��embedding�IPLACEHOLDER://params.image_encoder.image_row_position_embedding.embedding�readout_encoder��pos_embedding�2PLACEHOLDER://params.readout_encoder.pos_embedding�text_encoder��model��encoder��block��0��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.q.kernel�relative_attention_bias��embedding�oPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.relative_attention_bias.embedding�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.0.layer.1.layer_norm.weight�1��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.1.layer.1.layer_norm.weight�10��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.10.layer.1.layer_norm.weight�11��layer��0��SelfAttention��k��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.k.kernel�o��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.o.kernel�q��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.q.kernel�v��kernel�WPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.SelfAttention.v.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wi.kernel�wo��kernel�YPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�RPLACEHOLDER://params.text_encoder.model.encoder.block.11.layer.1.layer_norm.weight�2��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.2.layer.1.layer_norm.weight�3��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.3.layer.1.layer_norm.weight�4��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.4.layer.1.layer_norm.weight�5��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.5.layer.1.layer_norm.weight�6��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.6.layer.1.layer_norm.weight�7��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.7.layer.1.layer_norm.weight�8��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.8.layer.1.layer_norm.weight�9��layer��0��SelfAttention��k��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.k.kernel�o��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.o.kernel�q��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.q.kernel�v��kernel�VPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.SelfAttention.v.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.0.layer_norm.weight�1��DenseReluDense��wi��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wi.kernel�wo��kernel�XPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.DenseReluDense.wo.kernel�layer_norm��weight�QPLACEHOLDER://params.text_encoder.model.encoder.block.9.layer.1.layer_norm.weight�final_layer_norm��weight�GPLACEHOLDER://params.text_encoder.model.encoder.final_layer_norm.weight�shared��embedding�8PLACEHOLDER://params.text_encoder.model.shared.embedding�rngs��diffusion�PLACEHOLDER://rngs.diffusion�dropout�PLACEHOLDER://rngs.dropout�params�PLACEHOLDER://rngs.params�patch_encoding�!PLACEHOLDER://rngs.patch_encoding�step�PLACEHOLDER://step�text_tokenize_fn�
default/metrics._reduction_counter.value/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics._reduction_counter.value/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.count/0 ADDED
Binary file (13 Bytes). View file
 
default/metrics.loss.total/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/metrics.loss.total/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.count/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<i4","fill_value":null,"filters":null,"order":"C","shape":[],"zarr_format":2}
default/opt_state.1.0.count/0 ADDED
Binary file (13 Bytes). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.bias/0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_0.scale/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.bias/0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.LayerNorm_1.scale/0.0 ADDED
Binary file (34.3 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.bias/0.0 ADDED
Binary file (137 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,3072],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,3072],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_0.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ab9c46d92d57770008bde7290df6e51acb48ace3c9ed6cb1899540f85c8a9c0
3
+ size 105014374
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.bias/0.0 ADDED
Binary file (22.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,3072,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,3072,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MLPBlock_0.Dense_1.kernel/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b767c24a5059549de2d6828a30b421db96faa2982aa2dc987ec44fd699f83796
3
+ size 105047183
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.bias/0.0.0 ADDED
Binary file (34.3 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.key.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c06054557722b5f7d388d473edd7d6cbed0381c50ded743c3c3932902855ea3c
3
+ size 26234609
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.bias/0.0 ADDED
Binary file (23.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.out.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e2e87fdafd1d0354b9b50b4cfbaab8ce8ae26ce8a7ef3f59483f4e61db9bacf
3
+ size 26233188
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.bias/0.0.0 ADDED
Binary file (34.2 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.query.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40b99903cd2559482e62f4662ef6302efc1786c6aa031265942345f9932a0ba9
3
+ size 26241526
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.bias/0.0.0 ADDED
Binary file (34.1 kB). View file
 
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[12,768,12,64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[12,768,12,64],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.ScanEncoder1DBlock_0.MultiHeadDotProductAttention_0.value.kernel/0.0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d05b7a4ba7d168bb05bf252405ce3bc9ea6ec31ba26aca605a0810b5888abf0
3
+ size 26223309
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1,80,768],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[1,80,768],"zarr_format":2}
default/opt_state.1.0.mu.attention_blocks.posembed_input.pos_embedding/0.0.0 ADDED
Binary file (230 kB). View file
 
default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[256],"zarr_format":2}
default/opt_state.1.0.mu.categorical_action_head.Dense_0.bias/0 ADDED
Binary file (1.04 kB). View file
 
default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[768,256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[768,256],"zarr_format":2}
default/opt_state.1.0.mu.categorical_action_head.Dense_0.kernel/0.0 ADDED
Binary file (745 kB). View file
 
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[64],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"<f4","fill_value":null,"filters":null,"order":"C","shape":[64],"zarr_format":2}
default/opt_state.1.0.mu.image_encoder.embedding_function.Conv_0.bias/0 ADDED
Binary file (265 Bytes). View file