patrickvonplaten commited on
Commit
f89cf10
1 Parent(s): 82478a1
bert/config.json CHANGED
@@ -19,7 +19,7 @@
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.18.0.dev0",
23
  "type_vocab_size": 16,
24
  "use_cache": true,
25
  "vocab_size": 1124
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
22
+ "transformers_version": "4.24.0.dev0",
23
  "type_vocab_size": 16,
24
  "use_cache": true,
25
  "vocab_size": 1124
bert/flax_model-00001-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76be8b528d0075f7aae98d6fa57a6d3c83ae480a8469e668d7b0af968995ac71
3
+ size 1
bert/flax_model-00002-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53ab17c36edd297243da9ce5ebd3b1c270cb7d92b8ee717ac2ef6c965443c756
3
+ size 143937
bert/flax_model-00003-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a5d1aab363fc72b29fdc072cb1b4702bb15348829564686ea8647c5cedbd8a5
3
+ size 65605
bert/flax_model-00004-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1757bde9596ecd2cf3d867791e752a5305cbd3e05a3aab1a67f6c27e646d4c8e
3
+ size 15291
bert/flax_model-00005-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b374dd41ca7854d79d516a45a8271426611918a91405669010eea3eebdc6a7fa
3
+ size 19113
bert/flax_model-00006-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce0bf5e3dd35e797d1ec7f2e8a049f7d73bd9ff55c89430829498fdba5247272
3
+ size 18139
bert/flax_model-00007-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fedaf3a242b497b7801ce8b4ce9da8303032c5836ac1119558074f404315ebbb
3
+ size 18108
bert/flax_model-00008-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b9865f42a2443621f8565ead277d36ac0e55e54aa29736c0a05dd4ac6ab0b1f
3
+ size 19113
bert/flax_model-00009-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67716745c51f065915b61b25595facd0a03c71b7455955fc8f0df617de1259ab
3
+ size 18139
bert/flax_model-00010-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b504e5ca83ed9d2be4c3e1c25219084147e04fc5ccbf673ec329ee590c91a79f
3
+ size 18108
bert/flax_model-00011-of-00011.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28d4863290b2928c669e71fcb88649639b9e2ccee4f22c356576f50a10b791c2
3
+ size 19101
bert/flax_model.msgpack.index.json ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 351716
4
+ },
5
+ "weight_map": {
6
+ "embeddings/LayerNorm/bias": "flax_model-00004-of-00011.msgpack",
7
+ "embeddings/LayerNorm/scale": "flax_model-00004-of-00011.msgpack",
8
+ "embeddings/position_embeddings/embedding": "flax_model-00003-of-00011.msgpack",
9
+ "embeddings/token_type_embeddings/embedding": "flax_model-00004-of-00011.msgpack",
10
+ "embeddings/word_embeddings/embedding": "flax_model-00002-of-00011.msgpack",
11
+ "encoder/layer/0/attention/output/LayerNorm/bias": "flax_model-00005-of-00011.msgpack",
12
+ "encoder/layer/0/attention/output/LayerNorm/scale": "flax_model-00005-of-00011.msgpack",
13
+ "encoder/layer/0/attention/output/dense/bias": "flax_model-00005-of-00011.msgpack",
14
+ "encoder/layer/0/attention/output/dense/kernel": "flax_model-00005-of-00011.msgpack",
15
+ "encoder/layer/0/attention/self/key/bias": "flax_model-00004-of-00011.msgpack",
16
+ "encoder/layer/0/attention/self/key/kernel": "flax_model-00004-of-00011.msgpack",
17
+ "encoder/layer/0/attention/self/query/bias": "flax_model-00004-of-00011.msgpack",
18
+ "encoder/layer/0/attention/self/query/kernel": "flax_model-00004-of-00011.msgpack",
19
+ "encoder/layer/0/attention/self/value/bias": "flax_model-00004-of-00011.msgpack",
20
+ "encoder/layer/0/attention/self/value/kernel": "flax_model-00004-of-00011.msgpack",
21
+ "encoder/layer/0/intermediate/dense/bias": "flax_model-00005-of-00011.msgpack",
22
+ "encoder/layer/0/intermediate/dense/kernel": "flax_model-00005-of-00011.msgpack",
23
+ "encoder/layer/0/output/LayerNorm/bias": "flax_model-00005-of-00011.msgpack",
24
+ "encoder/layer/0/output/LayerNorm/scale": "flax_model-00005-of-00011.msgpack",
25
+ "encoder/layer/0/output/dense/bias": "flax_model-00005-of-00011.msgpack",
26
+ "encoder/layer/0/output/dense/kernel": "flax_model-00005-of-00011.msgpack",
27
+ "encoder/layer/1/attention/output/LayerNorm/bias": "flax_model-00006-of-00011.msgpack",
28
+ "encoder/layer/1/attention/output/LayerNorm/scale": "flax_model-00006-of-00011.msgpack",
29
+ "encoder/layer/1/attention/output/dense/bias": "flax_model-00006-of-00011.msgpack",
30
+ "encoder/layer/1/attention/output/dense/kernel": "flax_model-00006-of-00011.msgpack",
31
+ "encoder/layer/1/attention/self/key/bias": "flax_model-00006-of-00011.msgpack",
32
+ "encoder/layer/1/attention/self/key/kernel": "flax_model-00006-of-00011.msgpack",
33
+ "encoder/layer/1/attention/self/query/bias": "flax_model-00005-of-00011.msgpack",
34
+ "encoder/layer/1/attention/self/query/kernel": "flax_model-00005-of-00011.msgpack",
35
+ "encoder/layer/1/attention/self/value/bias": "flax_model-00006-of-00011.msgpack",
36
+ "encoder/layer/1/attention/self/value/kernel": "flax_model-00006-of-00011.msgpack",
37
+ "encoder/layer/1/intermediate/dense/bias": "flax_model-00006-of-00011.msgpack",
38
+ "encoder/layer/1/intermediate/dense/kernel": "flax_model-00006-of-00011.msgpack",
39
+ "encoder/layer/1/output/LayerNorm/bias": "flax_model-00007-of-00011.msgpack",
40
+ "encoder/layer/1/output/LayerNorm/scale": "flax_model-00007-of-00011.msgpack",
41
+ "encoder/layer/1/output/dense/bias": "flax_model-00007-of-00011.msgpack",
42
+ "encoder/layer/1/output/dense/kernel": "flax_model-00007-of-00011.msgpack",
43
+ "encoder/layer/2/attention/output/LayerNorm/bias": "flax_model-00008-of-00011.msgpack",
44
+ "encoder/layer/2/attention/output/LayerNorm/scale": "flax_model-00008-of-00011.msgpack",
45
+ "encoder/layer/2/attention/output/dense/bias": "flax_model-00008-of-00011.msgpack",
46
+ "encoder/layer/2/attention/output/dense/kernel": "flax_model-00008-of-00011.msgpack",
47
+ "encoder/layer/2/attention/self/key/bias": "flax_model-00007-of-00011.msgpack",
48
+ "encoder/layer/2/attention/self/key/kernel": "flax_model-00007-of-00011.msgpack",
49
+ "encoder/layer/2/attention/self/query/bias": "flax_model-00007-of-00011.msgpack",
50
+ "encoder/layer/2/attention/self/query/kernel": "flax_model-00007-of-00011.msgpack",
51
+ "encoder/layer/2/attention/self/value/bias": "flax_model-00007-of-00011.msgpack",
52
+ "encoder/layer/2/attention/self/value/kernel": "flax_model-00007-of-00011.msgpack",
53
+ "encoder/layer/2/intermediate/dense/bias": "flax_model-00008-of-00011.msgpack",
54
+ "encoder/layer/2/intermediate/dense/kernel": "flax_model-00008-of-00011.msgpack",
55
+ "encoder/layer/2/output/LayerNorm/bias": "flax_model-00008-of-00011.msgpack",
56
+ "encoder/layer/2/output/LayerNorm/scale": "flax_model-00008-of-00011.msgpack",
57
+ "encoder/layer/2/output/dense/bias": "flax_model-00008-of-00011.msgpack",
58
+ "encoder/layer/2/output/dense/kernel": "flax_model-00008-of-00011.msgpack",
59
+ "encoder/layer/3/attention/output/LayerNorm/bias": "flax_model-00009-of-00011.msgpack",
60
+ "encoder/layer/3/attention/output/LayerNorm/scale": "flax_model-00009-of-00011.msgpack",
61
+ "encoder/layer/3/attention/output/dense/bias": "flax_model-00009-of-00011.msgpack",
62
+ "encoder/layer/3/attention/output/dense/kernel": "flax_model-00009-of-00011.msgpack",
63
+ "encoder/layer/3/attention/self/key/bias": "flax_model-00009-of-00011.msgpack",
64
+ "encoder/layer/3/attention/self/key/kernel": "flax_model-00009-of-00011.msgpack",
65
+ "encoder/layer/3/attention/self/query/bias": "flax_model-00008-of-00011.msgpack",
66
+ "encoder/layer/3/attention/self/query/kernel": "flax_model-00008-of-00011.msgpack",
67
+ "encoder/layer/3/attention/self/value/bias": "flax_model-00009-of-00011.msgpack",
68
+ "encoder/layer/3/attention/self/value/kernel": "flax_model-00009-of-00011.msgpack",
69
+ "encoder/layer/3/intermediate/dense/bias": "flax_model-00009-of-00011.msgpack",
70
+ "encoder/layer/3/intermediate/dense/kernel": "flax_model-00009-of-00011.msgpack",
71
+ "encoder/layer/3/output/LayerNorm/bias": "flax_model-00010-of-00011.msgpack",
72
+ "encoder/layer/3/output/LayerNorm/scale": "flax_model-00010-of-00011.msgpack",
73
+ "encoder/layer/3/output/dense/bias": "flax_model-00010-of-00011.msgpack",
74
+ "encoder/layer/3/output/dense/kernel": "flax_model-00010-of-00011.msgpack",
75
+ "encoder/layer/4/attention/output/LayerNorm/bias": "flax_model-00011-of-00011.msgpack",
76
+ "encoder/layer/4/attention/output/LayerNorm/scale": "flax_model-00011-of-00011.msgpack",
77
+ "encoder/layer/4/attention/output/dense/bias": "flax_model-00011-of-00011.msgpack",
78
+ "encoder/layer/4/attention/output/dense/kernel": "flax_model-00011-of-00011.msgpack",
79
+ "encoder/layer/4/attention/self/key/bias": "flax_model-00010-of-00011.msgpack",
80
+ "encoder/layer/4/attention/self/key/kernel": "flax_model-00010-of-00011.msgpack",
81
+ "encoder/layer/4/attention/self/query/bias": "flax_model-00010-of-00011.msgpack",
82
+ "encoder/layer/4/attention/self/query/kernel": "flax_model-00010-of-00011.msgpack",
83
+ "encoder/layer/4/attention/self/value/bias": "flax_model-00010-of-00011.msgpack",
84
+ "encoder/layer/4/attention/self/value/kernel": "flax_model-00010-of-00011.msgpack",
85
+ "encoder/layer/4/intermediate/dense/bias": "flax_model-00011-of-00011.msgpack",
86
+ "encoder/layer/4/intermediate/dense/kernel": "flax_model-00011-of-00011.msgpack",
87
+ "encoder/layer/4/output/LayerNorm/bias": "flax_model-00011-of-00011.msgpack",
88
+ "encoder/layer/4/output/LayerNorm/scale": "flax_model-00011-of-00011.msgpack",
89
+ "encoder/layer/4/output/dense/bias": "flax_model-00011-of-00011.msgpack",
90
+ "encoder/layer/4/output/dense/kernel": "flax_model-00011-of-00011.msgpack",
91
+ "pooler/dense/bias": "flax_model-00011-of-00011.msgpack",
92
+ "pooler/dense/kernel": "flax_model-00011-of-00011.msgpack"
93
+ }
94
+ }