张瀚灵 commited on
Commit
12d5e7e
·
1 Parent(s): 1e76ed1

init commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
mlc-chat-config.json ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "asvd_phi",
4
+ "quantization": "q0f16",
5
+ "model_config": {
6
+ "vocab_size": 51200,
7
+ "hidden_size": 2560,
8
+ "intermediate_size": 10240,
9
+ "num_hidden_layers": 32,
10
+ "num_attention_heads": 32,
11
+ "layer_norm_eps": 1e-05,
12
+ "position_embedding_base": 10000.0,
13
+ "partial_rotary_factor": 0.4,
14
+ "num_key_value_heads": 32,
15
+ "context_window_size": 2048,
16
+ "prefill_chunk_size": 128,
17
+ "head_dim": 80,
18
+ "truncation_ranks": {
19
+ "model.layers.0.mlp.fc1": [
20
+ 1475,
21
+ 163
22
+ ],
23
+ "model.layers.0.mlp.fc2": [
24
+ 1290,
25
+ 143
26
+ ],
27
+ "model.layers.0.self_attn.dense": [
28
+ 922,
29
+ 102
30
+ ],
31
+ "model.layers.0.self_attn.k_proj": [
32
+ 922,
33
+ 102
34
+ ],
35
+ "model.layers.0.self_attn.q_proj": [
36
+ 807,
37
+ 89
38
+ ],
39
+ "model.layers.1.mlp.fc1": [
40
+ 1475,
41
+ 163
42
+ ],
43
+ "model.layers.1.self_attn.dense": [
44
+ 807,
45
+ 89
46
+ ],
47
+ "model.layers.1.self_attn.k_proj": [
48
+ 692,
49
+ 76
50
+ ],
51
+ "model.layers.1.self_attn.v_proj": [
52
+ 576,
53
+ 64
54
+ ],
55
+ "model.layers.10.self_attn.q_proj": [
56
+ 692,
57
+ 76
58
+ ],
59
+ "model.layers.11.self_attn.dense": [
60
+ 461,
61
+ 51
62
+ ],
63
+ "model.layers.11.self_attn.v_proj": [
64
+ 692,
65
+ 76
66
+ ],
67
+ "model.layers.12.self_attn.dense": [
68
+ 1037,
69
+ 115
70
+ ],
71
+ "model.layers.12.self_attn.q_proj": [
72
+ 807,
73
+ 89
74
+ ],
75
+ "model.layers.12.self_attn.v_proj": [
76
+ 922,
77
+ 102
78
+ ],
79
+ "model.layers.13.self_attn.dense": [
80
+ 807,
81
+ 89
82
+ ],
83
+ "model.layers.13.self_attn.q_proj": [
84
+ 576,
85
+ 64
86
+ ],
87
+ "model.layers.14.self_attn.dense": [
88
+ 1037,
89
+ 115
90
+ ],
91
+ "model.layers.14.self_attn.q_proj": [
92
+ 807,
93
+ 89
94
+ ],
95
+ "model.layers.15.self_attn.dense": [
96
+ 1037,
97
+ 115
98
+ ],
99
+ "model.layers.15.self_attn.q_proj": [
100
+ 807,
101
+ 89
102
+ ],
103
+ "model.layers.16.self_attn.dense": [
104
+ 922,
105
+ 102
106
+ ],
107
+ "model.layers.16.self_attn.k_proj": [
108
+ 922,
109
+ 102
110
+ ],
111
+ "model.layers.17.self_attn.dense": [
112
+ 1037,
113
+ 115
114
+ ],
115
+ "model.layers.17.self_attn.q_proj": [
116
+ 807,
117
+ 89
118
+ ],
119
+ "model.layers.18.self_attn.dense": [
120
+ 922,
121
+ 102
122
+ ],
123
+ "model.layers.19.self_attn.k_proj": [
124
+ 922,
125
+ 102
126
+ ],
127
+ "model.layers.19.self_attn.v_proj": [
128
+ 922,
129
+ 102
130
+ ],
131
+ "model.layers.2.mlp.fc1": [
132
+ 1475,
133
+ 163
134
+ ],
135
+ "model.layers.2.self_attn.dense": [
136
+ 807,
137
+ 89
138
+ ],
139
+ "model.layers.2.self_attn.k_proj": [
140
+ 1037,
141
+ 115
142
+ ],
143
+ "model.layers.2.self_attn.q_proj": [
144
+ 461,
145
+ 51
146
+ ],
147
+ "model.layers.20.self_attn.dense": [
148
+ 807,
149
+ 89
150
+ ],
151
+ "model.layers.20.self_attn.q_proj": [
152
+ 1037,
153
+ 115
154
+ ],
155
+ "model.layers.21.self_attn.dense": [
156
+ 692,
157
+ 76
158
+ ],
159
+ "model.layers.21.self_attn.q_proj": [
160
+ 1037,
161
+ 115
162
+ ],
163
+ "model.layers.22.self_attn.dense": [
164
+ 1037,
165
+ 115
166
+ ],
167
+ "model.layers.22.self_attn.q_proj": [
168
+ 807,
169
+ 89
170
+ ],
171
+ "model.layers.23.self_attn.dense": [
172
+ 922,
173
+ 102
174
+ ],
175
+ "model.layers.23.self_attn.q_proj": [
176
+ 922,
177
+ 102
178
+ ],
179
+ "model.layers.23.self_attn.v_proj": [
180
+ 692,
181
+ 76
182
+ ],
183
+ "model.layers.24.self_attn.dense": [
184
+ 461,
185
+ 51
186
+ ],
187
+ "model.layers.24.self_attn.q_proj": [
188
+ 922,
189
+ 102
190
+ ],
191
+ "model.layers.25.self_attn.k_proj": [
192
+ 576,
193
+ 64
194
+ ],
195
+ "model.layers.26.self_attn.k_proj": [
196
+ 922,
197
+ 102
198
+ ],
199
+ "model.layers.26.self_attn.q_proj": [
200
+ 1037,
201
+ 115
202
+ ],
203
+ "model.layers.27.self_attn.dense": [
204
+ 692,
205
+ 76
206
+ ],
207
+ "model.layers.27.self_attn.k_proj": [
208
+ 807,
209
+ 89
210
+ ],
211
+ "model.layers.28.self_attn.dense": [
212
+ 576,
213
+ 64
214
+ ],
215
+ "model.layers.28.self_attn.q_proj": [
216
+ 922,
217
+ 102
218
+ ],
219
+ "model.layers.29.self_attn.q_proj": [
220
+ 922,
221
+ 102
222
+ ],
223
+ "model.layers.29.self_attn.v_proj": [
224
+ 807,
225
+ 89
226
+ ],
227
+ "model.layers.3.self_attn.dense": [
228
+ 922,
229
+ 102
230
+ ],
231
+ "model.layers.3.self_attn.k_proj": [
232
+ 1037,
233
+ 115
234
+ ],
235
+ "model.layers.30.self_attn.dense": [
236
+ 922,
237
+ 102
238
+ ],
239
+ "model.layers.30.self_attn.q_proj": [
240
+ 807,
241
+ 89
242
+ ],
243
+ "model.layers.31.self_attn.dense": [
244
+ 1037,
245
+ 115
246
+ ],
247
+ "model.layers.31.self_attn.q_proj": [
248
+ 1037,
249
+ 115
250
+ ],
251
+ "model.layers.4.self_attn.dense": [
252
+ 807,
253
+ 89
254
+ ],
255
+ "model.layers.4.self_attn.k_proj": [
256
+ 1037,
257
+ 115
258
+ ],
259
+ "model.layers.4.self_attn.q_proj": [
260
+ 692,
261
+ 76
262
+ ],
263
+ "model.layers.5.self_attn.k_proj": [
264
+ 1037,
265
+ 115
266
+ ],
267
+ "model.layers.5.self_attn.q_proj": [
268
+ 807,
269
+ 89
270
+ ],
271
+ "model.layers.6.self_attn.dense": [
272
+ 807,
273
+ 89
274
+ ],
275
+ "model.layers.6.self_attn.q_proj": [
276
+ 922,
277
+ 102
278
+ ],
279
+ "model.layers.7.self_attn.q_proj": [
280
+ 692,
281
+ 76
282
+ ],
283
+ "model.layers.8.self_attn.dense": [
284
+ 1037,
285
+ 115
286
+ ],
287
+ "model.layers.8.self_attn.k_proj": [
288
+ 692,
289
+ 76
290
+ ],
291
+ "model.layers.8.self_attn.q_proj": [
292
+ 1037,
293
+ 115
294
+ ],
295
+ "model.layers.9.self_attn.dense": [
296
+ 692,
297
+ 76
298
+ ],
299
+ "model.layers.9.self_attn.q_proj": [
300
+ 576,
301
+ 64
302
+ ]
303
+ },
304
+ "tensor_parallel_shards": 1,
305
+ "max_batch_size": 128
306
+ },
307
+ "vocab_size": 51200,
308
+ "context_window_size": 2048,
309
+ "sliding_window_size": -1,
310
+ "prefill_chunk_size": 128,
311
+ "attention_sink_size": -1,
312
+ "tensor_parallel_shards": 1,
313
+ "pipeline_parallel_stages": 1,
314
+ "temperature": 1.0,
315
+ "presence_penalty": 0.0,
316
+ "frequency_penalty": 0.0,
317
+ "repetition_penalty": 1.0,
318
+ "top_p": 1.0,
319
+ "tokenizer_files": [
320
+ "tokenizer.json",
321
+ "vocab.json",
322
+ "merges.txt",
323
+ "added_tokens.json",
324
+ "tokenizer_config.json"
325
+ ],
326
+ "tokenizer_info": {
327
+ "token_postproc_method": "byte_level",
328
+ "prepend_space_in_encode": false,
329
+ "strip_space_in_decode": false
330
+ },
331
+ "conv_template": {
332
+ "name": "LM",
333
+ "system_template": "{system_message}",
334
+ "system_message": "",
335
+ "system_prefix_token_ids": [
336
+ 1
337
+ ],
338
+ "add_role_after_system_message": true,
339
+ "roles": {
340
+ "user": "",
341
+ "assistant": ""
342
+ },
343
+ "role_templates": {
344
+ "user": "{user_message}",
345
+ "assistant": "{assistant_message}",
346
+ "tool": "{tool_message}"
347
+ },
348
+ "messages": [],
349
+ "seps": [
350
+ ""
351
+ ],
352
+ "role_content_sep": "",
353
+ "role_empty_sep": "",
354
+ "stop_str": [],
355
+ "stop_token_ids": [
356
+ 2
357
+ ],
358
+ "function_string": "",
359
+ "use_function_calling": false
360
+ },
361
+ "pad_token_id": 0,
362
+ "bos_token_id": 50256,
363
+ "eos_token_id": 50256
364
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76772b626f4941182e17194c49c813293a6c930e99f41c4266bbf320331364bc
3
+ size 262144000
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeed8f9939e08ec774f4b20528a5dcae4145adcd9d6d733b7eb878593f42558c
3
+ size 52428800
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16183de51d729da63f07cedf57d7d38447200758e6937fb475b5bf8aec50994f
3
+ size 52428800
params_shard_100.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93fb492e0e0ff5b6bfa72fd65b6664291a8d936555449f209daaa2956ab8df1d
3
+ size 52428800
params_shard_101.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d776d394839d5005822818435f05792a5f809831fc313bd90a3aa5d65007b4d7
3
+ size 52428800
params_shard_102.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fa323830a21bc65e1afe9d5cb9b8de9b43f20a5d6bb402bfda45dfa67abfb54
3
+ size 21611520
params_shard_103.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9df31839a15b95359b0edaf7824d85b862300279655028d7436081ea7ae45dd9
3
+ size 29501440
params_shard_104.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76a816336979b2eeeac405f248dc84b96758ac26332027fbe5638a9a343097e2
3
+ size 52428800
params_shard_105.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:370b614a4beecb577d66804d6f567d002a15de7324a0919969379b56b751b5c4
3
+ size 52428800
params_shard_106.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b84c00a1b1bd7284ce34018fec9ceef136b55ffdf3e6475649a35804718a29b
3
+ size 26920960
params_shard_107.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e8e2f77488a99d3fe64f4bd723974d20f3446daeeaf001b7558ccbf78c22c19
3
+ size 23603200
params_shard_108.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3aeccd2080c6b511a8d0680cd8f001d1fa2a41b034e6119709d50d5b330ed12
3
+ size 52428800
params_shard_109.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a25b7b6656cc7f0a209fa03657592e8261f978b0b306fda4df6e4eb250c555e7
3
+ size 52428800
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c0b18a42582a54820c88cf8e3043793d7dbb4027f7bb3c22cb382533215ba1c
3
+ size 52428800
params_shard_110.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bcd3fb89ff881fb3015441c39abb8c461540ec939a92db1731eecfb02909c58
3
+ size 26260480
params_shard_111.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e79a45e42f03563c4641ff230f59bd2c0fc12386b53d3030b2b6a841ea5aafc
3
+ size 20981760
params_shard_112.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:286dd1c2d7d9e153a38cefb894b53902500e015281fb6a0633e279d4472c236d
3
+ size 52428800
params_shard_113.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b5ed4b65683ccdb7889d33db459e0b4d3730a208c89db1ad8341df6291c845e
3
+ size 52428800
params_shard_114.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2292240d858315ab0f39e89511d05edcf8d96d516692789a6210a31aee37c417
3
+ size 32814080
params_shard_115.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5f8e1961ad40c75662eb949f40e402f4779728225c1e241dfb68e01b55b36e2
3
+ size 52428800
params_shard_116.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e57b7cbef72448dcc4a0c5d0dd6e6601eb59071537314c8485d1f2920e8e27d
3
+ size 52428800
params_shard_117.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30ade731a0d9a731594b224b362e6e76887a8d04f71cf9997dc5aa40f355d253
3
+ size 32824320
params_shard_118.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:376906f61bf403d59b67e033ca11cf41eaa51f93a9917e40fa25c653263484a7
3
+ size 32778240
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fb8aa4a34ebc1a8bb239ab95baf1c9a99c8086e716ced8969d25966ad1dda15
3
+ size 32819200
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1dda264a34ba2a65adaca8744b812892498818da9e0b67b61039b73084ff9a1
3
+ size 20981760
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c02ae6e577c9fb237a89852f1caebd1a4e0b2df46c22fb4b647bec411df60002
3
+ size 52428800
params_shard_15.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a22bc57bf53f9dadea0bf6b81b1bac140afc6e4a17426f5053c5b1311b25518e
3
+ size 52428800
params_shard_16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4089bfceefa8c4a62127142bb5df044a0ccc583c815639652b9945e25589007b
3
+ size 31508480
params_shard_17.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ffde4961a1fa907fe4995b5ac415087b1d65cbb3cef25102841050bb62f0de3
3
+ size 52428800
params_shard_18.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7dcfc0a1de379ee30df31eeda004bdec8d05529b388bff4165eabab0043c4cd
3
+ size 52428800
params_shard_19.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc76c900df2a7729f968e89390b0db71941c6e580f7d1ccbb5be240fca6859bb
3
+ size 32819200
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:308d082548ba0bac75d949a779225c5aded8cbd091d4f297e7d9e9beb4f01fc2
3
+ size 262144000
params_shard_20.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa5d994175a60d5c9de27963076ec3626f31791e75e5f43adbf9b69091e6243f
3
+ size 52428800
params_shard_21.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46efc839fe87a3cae558933e3046009a90c11da7fc93344b32c58ae5ff0e4556
3
+ size 52428800
params_shard_22.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b92df04d07dab90fe0181aa3482e87ee4d5f044eec0eedbc1d51d3a1c31c191d
3
+ size 32814080
params_shard_23.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbee17aef27542ac668cc323759b75c354ae07666b00af7ff23c038870ce9286
3
+ size 28856320
params_shard_24.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:691c1b56e352707c351e51119be5278bc38c2d8128fdf4ab2abd7537a7a07c60
3
+ size 52428800
params_shard_25.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c36c7522722420bff4c38abcedb95c124b2e6f626f182154809ded56d224901f
3
+ size 52428800
params_shard_26.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:056b49043ccc291a16eba0506b5bacf9e4f6e7c64786fd4d58ceccb95f79f15a
3
+ size 24949760
params_shard_27.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f806ca92e1cdec9a383caf6e0f1f9d3f08210c12ef1ad4dcb2ef51368ba9195a
3
+ size 22292480
params_shard_28.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:061983a4beaf49e6d215c216c6b413ac2171b43790363c5ec7d9b3699b1a2bc3
3
+ size 52428800
params_shard_29.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3a365a5a5fe47a56fb8ff1f2c777fc867ec171f7046087a971893a0ae3186bf
3
+ size 52428800
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:497b234ee8017c00becca5c6871b4fb2ad199a67b554697da8962f6a45fb1ade
3
+ size 30366720
params_shard_30.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27059127c20b97f45c9b4f6f60ad2496a427b13dc93b11c30cadbb5232db6a17
3
+ size 24949760
params_shard_31.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6edb4bf3b3be0ff1c41f98f89ebaa26272d7a3d30f7bb9c2987ba49717c1760a
3
+ size 22292480
params_shard_32.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef556acd0cfbf9564f5c50139682b82eeb2040a5a54aed1034383b6c01b719bf
3
+ size 52428800