HoangHa commited on
Commit
962e79b
1 Parent(s): 24fd987

Saving weights and logs of epoch 6

Browse files
epoch_6/config.json CHANGED
@@ -12,7 +12,7 @@
12
  "initializer_range": 0.02,
13
  "intermediate_size": 3072,
14
  "layer_norm_eps": 1e-12,
15
- "max_position_embeddings": 130,
16
  "model_type": "roberta",
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 12,
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 3072,
14
  "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 66,
16
  "model_type": "roberta",
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 12,
epoch_6/flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b56cb15cbbd290fd226e58f75bd44aceff1b1d46a1f0fa171ba2efbee407f7d8
3
- size 352639294
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f9496ba2a7047e1588bf1ed92173ecb43ace53f476b45a84f08e3cc9d991b9b
3
+ size 352442685