zbw commited on
Commit
7522051
·
1 Parent(s): 3bb2038

LC-Rec instruments.

Browse files
added_tokens.json ADDED
@@ -0,0 +1,932 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<a_100>": 32000,
3
+ "<a_102>": 32001,
4
+ "<a_107>": 32002,
5
+ "<a_109>": 32003,
6
+ "<a_111>": 32004,
7
+ "<a_112>": 32005,
8
+ "<a_113>": 32006,
9
+ "<a_114>": 32007,
10
+ "<a_115>": 32008,
11
+ "<a_118>": 32009,
12
+ "<a_11>": 32010,
13
+ "<a_121>": 32011,
14
+ "<a_122>": 32012,
15
+ "<a_124>": 32013,
16
+ "<a_127>": 32014,
17
+ "<a_128>": 32015,
18
+ "<a_129>": 32016,
19
+ "<a_12>": 32017,
20
+ "<a_131>": 32018,
21
+ "<a_132>": 32019,
22
+ "<a_133>": 32020,
23
+ "<a_134>": 32021,
24
+ "<a_136>": 32022,
25
+ "<a_137>": 32023,
26
+ "<a_138>": 32024,
27
+ "<a_13>": 32025,
28
+ "<a_140>": 32026,
29
+ "<a_142>": 32027,
30
+ "<a_144>": 32028,
31
+ "<a_146>": 32029,
32
+ "<a_147>": 32030,
33
+ "<a_148>": 32031,
34
+ "<a_14>": 32032,
35
+ "<a_150>": 32033,
36
+ "<a_152>": 32034,
37
+ "<a_157>": 32035,
38
+ "<a_159>": 32036,
39
+ "<a_15>": 32037,
40
+ "<a_165>": 32038,
41
+ "<a_166>": 32039,
42
+ "<a_167>": 32040,
43
+ "<a_168>": 32041,
44
+ "<a_169>": 32042,
45
+ "<a_16>": 32043,
46
+ "<a_170>": 32044,
47
+ "<a_171>": 32045,
48
+ "<a_172>": 32046,
49
+ "<a_176>": 32047,
50
+ "<a_178>": 32048,
51
+ "<a_179>": 32049,
52
+ "<a_17>": 32050,
53
+ "<a_181>": 32051,
54
+ "<a_182>": 32052,
55
+ "<a_183>": 32053,
56
+ "<a_185>": 32054,
57
+ "<a_186>": 32055,
58
+ "<a_187>": 32056,
59
+ "<a_189>": 32057,
60
+ "<a_190>": 32058,
61
+ "<a_191>": 32059,
62
+ "<a_193>": 32060,
63
+ "<a_194>": 32061,
64
+ "<a_195>": 32062,
65
+ "<a_196>": 32063,
66
+ "<a_197>": 32064,
67
+ "<a_198>": 32065,
68
+ "<a_199>": 32066,
69
+ "<a_1>": 32067,
70
+ "<a_201>": 32068,
71
+ "<a_202>": 32069,
72
+ "<a_206>": 32070,
73
+ "<a_207>": 32071,
74
+ "<a_208>": 32072,
75
+ "<a_209>": 32073,
76
+ "<a_20>": 32074,
77
+ "<a_210>": 32075,
78
+ "<a_211>": 32076,
79
+ "<a_212>": 32077,
80
+ "<a_213>": 32078,
81
+ "<a_214>": 32079,
82
+ "<a_215>": 32080,
83
+ "<a_216>": 32081,
84
+ "<a_218>": 32082,
85
+ "<a_219>": 32083,
86
+ "<a_21>": 32084,
87
+ "<a_221>": 32085,
88
+ "<a_225>": 32086,
89
+ "<a_227>": 32087,
90
+ "<a_228>": 32088,
91
+ "<a_22>": 32089,
92
+ "<a_231>": 32090,
93
+ "<a_232>": 32091,
94
+ "<a_234>": 32092,
95
+ "<a_235>": 32093,
96
+ "<a_238>": 32094,
97
+ "<a_239>": 32095,
98
+ "<a_23>": 32096,
99
+ "<a_244>": 32097,
100
+ "<a_246>": 32098,
101
+ "<a_247>": 32099,
102
+ "<a_248>": 32100,
103
+ "<a_249>": 32101,
104
+ "<a_24>": 32102,
105
+ "<a_250>": 32103,
106
+ "<a_251>": 32104,
107
+ "<a_252>": 32105,
108
+ "<a_254>": 32106,
109
+ "<a_255>": 32107,
110
+ "<a_26>": 32108,
111
+ "<a_28>": 32109,
112
+ "<a_2>": 32110,
113
+ "<a_30>": 32111,
114
+ "<a_31>": 32112,
115
+ "<a_32>": 32113,
116
+ "<a_35>": 32114,
117
+ "<a_36>": 32115,
118
+ "<a_37>": 32116,
119
+ "<a_39>": 32117,
120
+ "<a_40>": 32118,
121
+ "<a_41>": 32119,
122
+ "<a_46>": 32120,
123
+ "<a_47>": 32121,
124
+ "<a_48>": 32122,
125
+ "<a_49>": 32123,
126
+ "<a_4>": 32124,
127
+ "<a_50>": 32125,
128
+ "<a_51>": 32126,
129
+ "<a_53>": 32127,
130
+ "<a_54>": 32128,
131
+ "<a_55>": 32129,
132
+ "<a_56>": 32130,
133
+ "<a_58>": 32131,
134
+ "<a_59>": 32132,
135
+ "<a_60>": 32133,
136
+ "<a_61>": 32134,
137
+ "<a_62>": 32135,
138
+ "<a_63>": 32136,
139
+ "<a_64>": 32137,
140
+ "<a_6>": 32138,
141
+ "<a_70>": 32139,
142
+ "<a_72>": 32140,
143
+ "<a_73>": 32141,
144
+ "<a_74>": 32142,
145
+ "<a_75>": 32143,
146
+ "<a_76>": 32144,
147
+ "<a_79>": 32145,
148
+ "<a_80>": 32146,
149
+ "<a_81>": 32147,
150
+ "<a_84>": 32148,
151
+ "<a_85>": 32149,
152
+ "<a_87>": 32150,
153
+ "<a_88>": 32151,
154
+ "<a_89>": 32152,
155
+ "<a_8>": 32153,
156
+ "<a_90>": 32154,
157
+ "<a_91>": 32155,
158
+ "<a_93>": 32156,
159
+ "<a_94>": 32157,
160
+ "<a_95>": 32158,
161
+ "<a_96>": 32159,
162
+ "<a_99>": 32160,
163
+ "<a_9>": 32161,
164
+ "<b_0>": 32162,
165
+ "<b_100>": 32163,
166
+ "<b_101>": 32164,
167
+ "<b_102>": 32165,
168
+ "<b_103>": 32166,
169
+ "<b_104>": 32167,
170
+ "<b_105>": 32168,
171
+ "<b_106>": 32169,
172
+ "<b_107>": 32170,
173
+ "<b_108>": 32171,
174
+ "<b_109>": 32172,
175
+ "<b_10>": 32173,
176
+ "<b_110>": 32174,
177
+ "<b_111>": 32175,
178
+ "<b_112>": 32176,
179
+ "<b_113>": 32177,
180
+ "<b_114>": 32178,
181
+ "<b_115>": 32179,
182
+ "<b_116>": 32180,
183
+ "<b_117>": 32181,
184
+ "<b_118>": 32182,
185
+ "<b_119>": 32183,
186
+ "<b_11>": 32184,
187
+ "<b_120>": 32185,
188
+ "<b_121>": 32186,
189
+ "<b_122>": 32187,
190
+ "<b_123>": 32188,
191
+ "<b_124>": 32189,
192
+ "<b_125>": 32190,
193
+ "<b_126>": 32191,
194
+ "<b_127>": 32192,
195
+ "<b_128>": 32193,
196
+ "<b_129>": 32194,
197
+ "<b_12>": 32195,
198
+ "<b_130>": 32196,
199
+ "<b_131>": 32197,
200
+ "<b_132>": 32198,
201
+ "<b_133>": 32199,
202
+ "<b_134>": 32200,
203
+ "<b_135>": 32201,
204
+ "<b_136>": 32202,
205
+ "<b_137>": 32203,
206
+ "<b_138>": 32204,
207
+ "<b_139>": 32205,
208
+ "<b_13>": 32206,
209
+ "<b_140>": 32207,
210
+ "<b_141>": 32208,
211
+ "<b_142>": 32209,
212
+ "<b_143>": 32210,
213
+ "<b_144>": 32211,
214
+ "<b_145>": 32212,
215
+ "<b_146>": 32213,
216
+ "<b_147>": 32214,
217
+ "<b_148>": 32215,
218
+ "<b_149>": 32216,
219
+ "<b_14>": 32217,
220
+ "<b_150>": 32218,
221
+ "<b_151>": 32219,
222
+ "<b_152>": 32220,
223
+ "<b_153>": 32221,
224
+ "<b_154>": 32222,
225
+ "<b_155>": 32223,
226
+ "<b_156>": 32224,
227
+ "<b_157>": 32225,
228
+ "<b_158>": 32226,
229
+ "<b_159>": 32227,
230
+ "<b_15>": 32228,
231
+ "<b_160>": 32229,
232
+ "<b_161>": 32230,
233
+ "<b_162>": 32231,
234
+ "<b_163>": 32232,
235
+ "<b_164>": 32233,
236
+ "<b_165>": 32234,
237
+ "<b_166>": 32235,
238
+ "<b_167>": 32236,
239
+ "<b_168>": 32237,
240
+ "<b_169>": 32238,
241
+ "<b_16>": 32239,
242
+ "<b_170>": 32240,
243
+ "<b_171>": 32241,
244
+ "<b_172>": 32242,
245
+ "<b_173>": 32243,
246
+ "<b_174>": 32244,
247
+ "<b_175>": 32245,
248
+ "<b_176>": 32246,
249
+ "<b_177>": 32247,
250
+ "<b_178>": 32248,
251
+ "<b_179>": 32249,
252
+ "<b_17>": 32250,
253
+ "<b_180>": 32251,
254
+ "<b_181>": 32252,
255
+ "<b_182>": 32253,
256
+ "<b_183>": 32254,
257
+ "<b_184>": 32255,
258
+ "<b_185>": 32256,
259
+ "<b_186>": 32257,
260
+ "<b_187>": 32258,
261
+ "<b_188>": 32259,
262
+ "<b_189>": 32260,
263
+ "<b_18>": 32261,
264
+ "<b_190>": 32262,
265
+ "<b_191>": 32263,
266
+ "<b_192>": 32264,
267
+ "<b_193>": 32265,
268
+ "<b_194>": 32266,
269
+ "<b_195>": 32267,
270
+ "<b_196>": 32268,
271
+ "<b_197>": 32269,
272
+ "<b_198>": 32270,
273
+ "<b_199>": 32271,
274
+ "<b_19>": 32272,
275
+ "<b_1>": 32273,
276
+ "<b_200>": 32274,
277
+ "<b_201>": 32275,
278
+ "<b_202>": 32276,
279
+ "<b_203>": 32277,
280
+ "<b_204>": 32278,
281
+ "<b_205>": 32279,
282
+ "<b_206>": 32280,
283
+ "<b_207>": 32281,
284
+ "<b_208>": 32282,
285
+ "<b_209>": 32283,
286
+ "<b_20>": 32284,
287
+ "<b_210>": 32285,
288
+ "<b_211>": 32286,
289
+ "<b_212>": 32287,
290
+ "<b_213>": 32288,
291
+ "<b_214>": 32289,
292
+ "<b_215>": 32290,
293
+ "<b_216>": 32291,
294
+ "<b_217>": 32292,
295
+ "<b_218>": 32293,
296
+ "<b_219>": 32294,
297
+ "<b_21>": 32295,
298
+ "<b_220>": 32296,
299
+ "<b_221>": 32297,
300
+ "<b_222>": 32298,
301
+ "<b_223>": 32299,
302
+ "<b_224>": 32300,
303
+ "<b_225>": 32301,
304
+ "<b_226>": 32302,
305
+ "<b_227>": 32303,
306
+ "<b_228>": 32304,
307
+ "<b_229>": 32305,
308
+ "<b_22>": 32306,
309
+ "<b_230>": 32307,
310
+ "<b_231>": 32308,
311
+ "<b_232>": 32309,
312
+ "<b_233>": 32310,
313
+ "<b_234>": 32311,
314
+ "<b_235>": 32312,
315
+ "<b_236>": 32313,
316
+ "<b_237>": 32314,
317
+ "<b_238>": 32315,
318
+ "<b_239>": 32316,
319
+ "<b_23>": 32317,
320
+ "<b_240>": 32318,
321
+ "<b_241>": 32319,
322
+ "<b_242>": 32320,
323
+ "<b_243>": 32321,
324
+ "<b_244>": 32322,
325
+ "<b_245>": 32323,
326
+ "<b_246>": 32324,
327
+ "<b_247>": 32325,
328
+ "<b_248>": 32326,
329
+ "<b_249>": 32327,
330
+ "<b_24>": 32328,
331
+ "<b_250>": 32329,
332
+ "<b_251>": 32330,
333
+ "<b_252>": 32331,
334
+ "<b_253>": 32332,
335
+ "<b_254>": 32333,
336
+ "<b_255>": 32334,
337
+ "<b_25>": 32335,
338
+ "<b_26>": 32336,
339
+ "<b_27>": 32337,
340
+ "<b_28>": 32338,
341
+ "<b_29>": 32339,
342
+ "<b_2>": 32340,
343
+ "<b_30>": 32341,
344
+ "<b_31>": 32342,
345
+ "<b_32>": 32343,
346
+ "<b_33>": 32344,
347
+ "<b_34>": 32345,
348
+ "<b_35>": 32346,
349
+ "<b_36>": 32347,
350
+ "<b_37>": 32348,
351
+ "<b_38>": 32349,
352
+ "<b_39>": 32350,
353
+ "<b_3>": 32351,
354
+ "<b_40>": 32352,
355
+ "<b_41>": 32353,
356
+ "<b_42>": 32354,
357
+ "<b_43>": 32355,
358
+ "<b_44>": 32356,
359
+ "<b_45>": 32357,
360
+ "<b_46>": 32358,
361
+ "<b_47>": 32359,
362
+ "<b_48>": 32360,
363
+ "<b_49>": 32361,
364
+ "<b_4>": 32362,
365
+ "<b_50>": 32363,
366
+ "<b_51>": 32364,
367
+ "<b_52>": 32365,
368
+ "<b_53>": 32366,
369
+ "<b_54>": 32367,
370
+ "<b_55>": 32368,
371
+ "<b_56>": 32369,
372
+ "<b_57>": 32370,
373
+ "<b_58>": 32371,
374
+ "<b_59>": 32372,
375
+ "<b_5>": 32373,
376
+ "<b_60>": 32374,
377
+ "<b_61>": 32375,
378
+ "<b_62>": 32376,
379
+ "<b_63>": 32377,
380
+ "<b_64>": 32378,
381
+ "<b_65>": 32379,
382
+ "<b_66>": 32380,
383
+ "<b_67>": 32381,
384
+ "<b_68>": 32382,
385
+ "<b_69>": 32383,
386
+ "<b_6>": 32384,
387
+ "<b_70>": 32385,
388
+ "<b_71>": 32386,
389
+ "<b_72>": 32387,
390
+ "<b_73>": 32388,
391
+ "<b_74>": 32389,
392
+ "<b_75>": 32390,
393
+ "<b_76>": 32391,
394
+ "<b_77>": 32392,
395
+ "<b_78>": 32393,
396
+ "<b_79>": 32394,
397
+ "<b_7>": 32395,
398
+ "<b_80>": 32396,
399
+ "<b_81>": 32397,
400
+ "<b_82>": 32398,
401
+ "<b_83>": 32399,
402
+ "<b_84>": 32400,
403
+ "<b_85>": 32401,
404
+ "<b_86>": 32402,
405
+ "<b_87>": 32403,
406
+ "<b_88>": 32404,
407
+ "<b_89>": 32405,
408
+ "<b_8>": 32406,
409
+ "<b_90>": 32407,
410
+ "<b_91>": 32408,
411
+ "<b_92>": 32409,
412
+ "<b_93>": 32410,
413
+ "<b_94>": 32411,
414
+ "<b_95>": 32412,
415
+ "<b_96>": 32413,
416
+ "<b_97>": 32414,
417
+ "<b_98>": 32415,
418
+ "<b_99>": 32416,
419
+ "<b_9>": 32417,
420
+ "<c_0>": 32418,
421
+ "<c_100>": 32419,
422
+ "<c_101>": 32420,
423
+ "<c_102>": 32421,
424
+ "<c_103>": 32422,
425
+ "<c_104>": 32423,
426
+ "<c_105>": 32424,
427
+ "<c_106>": 32425,
428
+ "<c_107>": 32426,
429
+ "<c_108>": 32427,
430
+ "<c_109>": 32428,
431
+ "<c_10>": 32429,
432
+ "<c_110>": 32430,
433
+ "<c_111>": 32431,
434
+ "<c_112>": 32432,
435
+ "<c_113>": 32433,
436
+ "<c_114>": 32434,
437
+ "<c_115>": 32435,
438
+ "<c_116>": 32436,
439
+ "<c_117>": 32437,
440
+ "<c_118>": 32438,
441
+ "<c_119>": 32439,
442
+ "<c_11>": 32440,
443
+ "<c_120>": 32441,
444
+ "<c_121>": 32442,
445
+ "<c_122>": 32443,
446
+ "<c_123>": 32444,
447
+ "<c_124>": 32445,
448
+ "<c_125>": 32446,
449
+ "<c_126>": 32447,
450
+ "<c_127>": 32448,
451
+ "<c_128>": 32449,
452
+ "<c_129>": 32450,
453
+ "<c_12>": 32451,
454
+ "<c_130>": 32452,
455
+ "<c_131>": 32453,
456
+ "<c_132>": 32454,
457
+ "<c_133>": 32455,
458
+ "<c_134>": 32456,
459
+ "<c_135>": 32457,
460
+ "<c_136>": 32458,
461
+ "<c_137>": 32459,
462
+ "<c_138>": 32460,
463
+ "<c_139>": 32461,
464
+ "<c_13>": 32462,
465
+ "<c_140>": 32463,
466
+ "<c_141>": 32464,
467
+ "<c_142>": 32465,
468
+ "<c_143>": 32466,
469
+ "<c_144>": 32467,
470
+ "<c_145>": 32468,
471
+ "<c_146>": 32469,
472
+ "<c_147>": 32470,
473
+ "<c_148>": 32471,
474
+ "<c_149>": 32472,
475
+ "<c_14>": 32473,
476
+ "<c_150>": 32474,
477
+ "<c_151>": 32475,
478
+ "<c_152>": 32476,
479
+ "<c_153>": 32477,
480
+ "<c_154>": 32478,
481
+ "<c_155>": 32479,
482
+ "<c_156>": 32480,
483
+ "<c_157>": 32481,
484
+ "<c_158>": 32482,
485
+ "<c_159>": 32483,
486
+ "<c_15>": 32484,
487
+ "<c_160>": 32485,
488
+ "<c_161>": 32486,
489
+ "<c_162>": 32487,
490
+ "<c_163>": 32488,
491
+ "<c_164>": 32489,
492
+ "<c_165>": 32490,
493
+ "<c_166>": 32491,
494
+ "<c_167>": 32492,
495
+ "<c_168>": 32493,
496
+ "<c_169>": 32494,
497
+ "<c_16>": 32495,
498
+ "<c_170>": 32496,
499
+ "<c_171>": 32497,
500
+ "<c_172>": 32498,
501
+ "<c_173>": 32499,
502
+ "<c_174>": 32500,
503
+ "<c_175>": 32501,
504
+ "<c_176>": 32502,
505
+ "<c_177>": 32503,
506
+ "<c_178>": 32504,
507
+ "<c_179>": 32505,
508
+ "<c_17>": 32506,
509
+ "<c_180>": 32507,
510
+ "<c_181>": 32508,
511
+ "<c_182>": 32509,
512
+ "<c_183>": 32510,
513
+ "<c_184>": 32511,
514
+ "<c_185>": 32512,
515
+ "<c_186>": 32513,
516
+ "<c_187>": 32514,
517
+ "<c_188>": 32515,
518
+ "<c_189>": 32516,
519
+ "<c_18>": 32517,
520
+ "<c_190>": 32518,
521
+ "<c_191>": 32519,
522
+ "<c_192>": 32520,
523
+ "<c_193>": 32521,
524
+ "<c_194>": 32522,
525
+ "<c_195>": 32523,
526
+ "<c_196>": 32524,
527
+ "<c_197>": 32525,
528
+ "<c_198>": 32526,
529
+ "<c_199>": 32527,
530
+ "<c_19>": 32528,
531
+ "<c_1>": 32529,
532
+ "<c_200>": 32530,
533
+ "<c_201>": 32531,
534
+ "<c_202>": 32532,
535
+ "<c_203>": 32533,
536
+ "<c_204>": 32534,
537
+ "<c_205>": 32535,
538
+ "<c_206>": 32536,
539
+ "<c_207>": 32537,
540
+ "<c_208>": 32538,
541
+ "<c_209>": 32539,
542
+ "<c_20>": 32540,
543
+ "<c_210>": 32541,
544
+ "<c_211>": 32542,
545
+ "<c_212>": 32543,
546
+ "<c_213>": 32544,
547
+ "<c_214>": 32545,
548
+ "<c_215>": 32546,
549
+ "<c_216>": 32547,
550
+ "<c_217>": 32548,
551
+ "<c_218>": 32549,
552
+ "<c_219>": 32550,
553
+ "<c_21>": 32551,
554
+ "<c_220>": 32552,
555
+ "<c_221>": 32553,
556
+ "<c_222>": 32554,
557
+ "<c_223>": 32555,
558
+ "<c_224>": 32556,
559
+ "<c_225>": 32557,
560
+ "<c_226>": 32558,
561
+ "<c_227>": 32559,
562
+ "<c_228>": 32560,
563
+ "<c_229>": 32561,
564
+ "<c_22>": 32562,
565
+ "<c_230>": 32563,
566
+ "<c_231>": 32564,
567
+ "<c_232>": 32565,
568
+ "<c_233>": 32566,
569
+ "<c_234>": 32567,
570
+ "<c_235>": 32568,
571
+ "<c_236>": 32569,
572
+ "<c_237>": 32570,
573
+ "<c_238>": 32571,
574
+ "<c_239>": 32572,
575
+ "<c_23>": 32573,
576
+ "<c_240>": 32574,
577
+ "<c_241>": 32575,
578
+ "<c_242>": 32576,
579
+ "<c_243>": 32577,
580
+ "<c_244>": 32578,
581
+ "<c_245>": 32579,
582
+ "<c_246>": 32580,
583
+ "<c_247>": 32581,
584
+ "<c_248>": 32582,
585
+ "<c_249>": 32583,
586
+ "<c_24>": 32584,
587
+ "<c_250>": 32585,
588
+ "<c_251>": 32586,
589
+ "<c_252>": 32587,
590
+ "<c_253>": 32588,
591
+ "<c_254>": 32589,
592
+ "<c_255>": 32590,
593
+ "<c_25>": 32591,
594
+ "<c_26>": 32592,
595
+ "<c_27>": 32593,
596
+ "<c_28>": 32594,
597
+ "<c_29>": 32595,
598
+ "<c_2>": 32596,
599
+ "<c_30>": 32597,
600
+ "<c_31>": 32598,
601
+ "<c_32>": 32599,
602
+ "<c_33>": 32600,
603
+ "<c_34>": 32601,
604
+ "<c_35>": 32602,
605
+ "<c_36>": 32603,
606
+ "<c_37>": 32604,
607
+ "<c_38>": 32605,
608
+ "<c_39>": 32606,
609
+ "<c_3>": 32607,
610
+ "<c_40>": 32608,
611
+ "<c_41>": 32609,
612
+ "<c_42>": 32610,
613
+ "<c_43>": 32611,
614
+ "<c_44>": 32612,
615
+ "<c_45>": 32613,
616
+ "<c_46>": 32614,
617
+ "<c_47>": 32615,
618
+ "<c_48>": 32616,
619
+ "<c_49>": 32617,
620
+ "<c_4>": 32618,
621
+ "<c_50>": 32619,
622
+ "<c_51>": 32620,
623
+ "<c_52>": 32621,
624
+ "<c_53>": 32622,
625
+ "<c_54>": 32623,
626
+ "<c_55>": 32624,
627
+ "<c_56>": 32625,
628
+ "<c_57>": 32626,
629
+ "<c_58>": 32627,
630
+ "<c_59>": 32628,
631
+ "<c_5>": 32629,
632
+ "<c_60>": 32630,
633
+ "<c_61>": 32631,
634
+ "<c_62>": 32632,
635
+ "<c_63>": 32633,
636
+ "<c_64>": 32634,
637
+ "<c_65>": 32635,
638
+ "<c_66>": 32636,
639
+ "<c_67>": 32637,
640
+ "<c_68>": 32638,
641
+ "<c_69>": 32639,
642
+ "<c_6>": 32640,
643
+ "<c_70>": 32641,
644
+ "<c_71>": 32642,
645
+ "<c_72>": 32643,
646
+ "<c_73>": 32644,
647
+ "<c_74>": 32645,
648
+ "<c_75>": 32646,
649
+ "<c_76>": 32647,
650
+ "<c_77>": 32648,
651
+ "<c_78>": 32649,
652
+ "<c_79>": 32650,
653
+ "<c_7>": 32651,
654
+ "<c_80>": 32652,
655
+ "<c_81>": 32653,
656
+ "<c_82>": 32654,
657
+ "<c_83>": 32655,
658
+ "<c_84>": 32656,
659
+ "<c_85>": 32657,
660
+ "<c_86>": 32658,
661
+ "<c_87>": 32659,
662
+ "<c_88>": 32660,
663
+ "<c_89>": 32661,
664
+ "<c_8>": 32662,
665
+ "<c_90>": 32663,
666
+ "<c_91>": 32664,
667
+ "<c_92>": 32665,
668
+ "<c_93>": 32666,
669
+ "<c_94>": 32667,
670
+ "<c_95>": 32668,
671
+ "<c_96>": 32669,
672
+ "<c_97>": 32670,
673
+ "<c_98>": 32671,
674
+ "<c_99>": 32672,
675
+ "<c_9>": 32673,
676
+ "<d_0>": 32674,
677
+ "<d_100>": 32675,
678
+ "<d_101>": 32676,
679
+ "<d_102>": 32677,
680
+ "<d_103>": 32678,
681
+ "<d_104>": 32679,
682
+ "<d_105>": 32680,
683
+ "<d_106>": 32681,
684
+ "<d_107>": 32682,
685
+ "<d_108>": 32683,
686
+ "<d_109>": 32684,
687
+ "<d_10>": 32685,
688
+ "<d_110>": 32686,
689
+ "<d_111>": 32687,
690
+ "<d_112>": 32688,
691
+ "<d_113>": 32689,
692
+ "<d_114>": 32690,
693
+ "<d_115>": 32691,
694
+ "<d_116>": 32692,
695
+ "<d_117>": 32693,
696
+ "<d_118>": 32694,
697
+ "<d_119>": 32695,
698
+ "<d_11>": 32696,
699
+ "<d_120>": 32697,
700
+ "<d_121>": 32698,
701
+ "<d_122>": 32699,
702
+ "<d_123>": 32700,
703
+ "<d_124>": 32701,
704
+ "<d_125>": 32702,
705
+ "<d_126>": 32703,
706
+ "<d_127>": 32704,
707
+ "<d_128>": 32705,
708
+ "<d_129>": 32706,
709
+ "<d_12>": 32707,
710
+ "<d_130>": 32708,
711
+ "<d_131>": 32709,
712
+ "<d_132>": 32710,
713
+ "<d_133>": 32711,
714
+ "<d_134>": 32712,
715
+ "<d_135>": 32713,
716
+ "<d_136>": 32714,
717
+ "<d_137>": 32715,
718
+ "<d_138>": 32716,
719
+ "<d_139>": 32717,
720
+ "<d_13>": 32718,
721
+ "<d_140>": 32719,
722
+ "<d_141>": 32720,
723
+ "<d_142>": 32721,
724
+ "<d_143>": 32722,
725
+ "<d_144>": 32723,
726
+ "<d_145>": 32724,
727
+ "<d_146>": 32725,
728
+ "<d_147>": 32726,
729
+ "<d_148>": 32727,
730
+ "<d_149>": 32728,
731
+ "<d_14>": 32729,
732
+ "<d_150>": 32730,
733
+ "<d_151>": 32731,
734
+ "<d_152>": 32732,
735
+ "<d_153>": 32733,
736
+ "<d_154>": 32734,
737
+ "<d_155>": 32735,
738
+ "<d_156>": 32736,
739
+ "<d_157>": 32737,
740
+ "<d_158>": 32738,
741
+ "<d_159>": 32739,
742
+ "<d_15>": 32740,
743
+ "<d_160>": 32741,
744
+ "<d_161>": 32742,
745
+ "<d_162>": 32743,
746
+ "<d_163>": 32744,
747
+ "<d_164>": 32745,
748
+ "<d_165>": 32746,
749
+ "<d_166>": 32747,
750
+ "<d_167>": 32748,
751
+ "<d_168>": 32749,
752
+ "<d_169>": 32750,
753
+ "<d_16>": 32751,
754
+ "<d_170>": 32752,
755
+ "<d_171>": 32753,
756
+ "<d_172>": 32754,
757
+ "<d_173>": 32755,
758
+ "<d_174>": 32756,
759
+ "<d_175>": 32757,
760
+ "<d_176>": 32758,
761
+ "<d_177>": 32759,
762
+ "<d_178>": 32760,
763
+ "<d_179>": 32761,
764
+ "<d_17>": 32762,
765
+ "<d_180>": 32763,
766
+ "<d_181>": 32764,
767
+ "<d_182>": 32765,
768
+ "<d_183>": 32766,
769
+ "<d_184>": 32767,
770
+ "<d_185>": 32768,
771
+ "<d_186>": 32769,
772
+ "<d_187>": 32770,
773
+ "<d_188>": 32771,
774
+ "<d_189>": 32772,
775
+ "<d_18>": 32773,
776
+ "<d_190>": 32774,
777
+ "<d_191>": 32775,
778
+ "<d_192>": 32776,
779
+ "<d_193>": 32777,
780
+ "<d_194>": 32778,
781
+ "<d_195>": 32779,
782
+ "<d_196>": 32780,
783
+ "<d_197>": 32781,
784
+ "<d_198>": 32782,
785
+ "<d_199>": 32783,
786
+ "<d_19>": 32784,
787
+ "<d_1>": 32785,
788
+ "<d_200>": 32786,
789
+ "<d_201>": 32787,
790
+ "<d_202>": 32788,
791
+ "<d_203>": 32789,
792
+ "<d_204>": 32790,
793
+ "<d_205>": 32791,
794
+ "<d_206>": 32792,
795
+ "<d_207>": 32793,
796
+ "<d_208>": 32794,
797
+ "<d_209>": 32795,
798
+ "<d_20>": 32796,
799
+ "<d_210>": 32797,
800
+ "<d_211>": 32798,
801
+ "<d_212>": 32799,
802
+ "<d_213>": 32800,
803
+ "<d_214>": 32801,
804
+ "<d_215>": 32802,
805
+ "<d_216>": 32803,
806
+ "<d_217>": 32804,
807
+ "<d_218>": 32805,
808
+ "<d_219>": 32806,
809
+ "<d_21>": 32807,
810
+ "<d_220>": 32808,
811
+ "<d_221>": 32809,
812
+ "<d_222>": 32810,
813
+ "<d_223>": 32811,
814
+ "<d_224>": 32812,
815
+ "<d_225>": 32813,
816
+ "<d_226>": 32814,
817
+ "<d_227>": 32815,
818
+ "<d_228>": 32816,
819
+ "<d_229>": 32817,
820
+ "<d_22>": 32818,
821
+ "<d_230>": 32819,
822
+ "<d_231>": 32820,
823
+ "<d_232>": 32821,
824
+ "<d_233>": 32822,
825
+ "<d_234>": 32823,
826
+ "<d_235>": 32824,
827
+ "<d_236>": 32825,
828
+ "<d_237>": 32826,
829
+ "<d_238>": 32827,
830
+ "<d_239>": 32828,
831
+ "<d_23>": 32829,
832
+ "<d_240>": 32830,
833
+ "<d_241>": 32831,
834
+ "<d_242>": 32832,
835
+ "<d_243>": 32833,
836
+ "<d_244>": 32834,
837
+ "<d_245>": 32835,
838
+ "<d_246>": 32836,
839
+ "<d_247>": 32837,
840
+ "<d_248>": 32838,
841
+ "<d_249>": 32839,
842
+ "<d_24>": 32840,
843
+ "<d_250>": 32841,
844
+ "<d_251>": 32842,
845
+ "<d_252>": 32843,
846
+ "<d_253>": 32844,
847
+ "<d_254>": 32845,
848
+ "<d_255>": 32846,
849
+ "<d_25>": 32847,
850
+ "<d_26>": 32848,
851
+ "<d_27>": 32849,
852
+ "<d_28>": 32850,
853
+ "<d_29>": 32851,
854
+ "<d_2>": 32852,
855
+ "<d_30>": 32853,
856
+ "<d_31>": 32854,
857
+ "<d_32>": 32855,
858
+ "<d_33>": 32856,
859
+ "<d_34>": 32857,
860
+ "<d_35>": 32858,
861
+ "<d_36>": 32859,
862
+ "<d_37>": 32860,
863
+ "<d_38>": 32861,
864
+ "<d_39>": 32862,
865
+ "<d_3>": 32863,
866
+ "<d_40>": 32864,
867
+ "<d_41>": 32865,
868
+ "<d_42>": 32866,
869
+ "<d_43>": 32867,
870
+ "<d_44>": 32868,
871
+ "<d_45>": 32869,
872
+ "<d_46>": 32870,
873
+ "<d_47>": 32871,
874
+ "<d_48>": 32872,
875
+ "<d_49>": 32873,
876
+ "<d_4>": 32874,
877
+ "<d_50>": 32875,
878
+ "<d_51>": 32876,
879
+ "<d_52>": 32877,
880
+ "<d_53>": 32878,
881
+ "<d_54>": 32879,
882
+ "<d_55>": 32880,
883
+ "<d_56>": 32881,
884
+ "<d_57>": 32882,
885
+ "<d_58>": 32883,
886
+ "<d_59>": 32884,
887
+ "<d_5>": 32885,
888
+ "<d_60>": 32886,
889
+ "<d_61>": 32887,
890
+ "<d_62>": 32888,
891
+ "<d_63>": 32889,
892
+ "<d_64>": 32890,
893
+ "<d_65>": 32891,
894
+ "<d_66>": 32892,
895
+ "<d_67>": 32893,
896
+ "<d_68>": 32894,
897
+ "<d_69>": 32895,
898
+ "<d_6>": 32896,
899
+ "<d_70>": 32897,
900
+ "<d_71>": 32898,
901
+ "<d_72>": 32899,
902
+ "<d_73>": 32900,
903
+ "<d_74>": 32901,
904
+ "<d_75>": 32902,
905
+ "<d_76>": 32903,
906
+ "<d_77>": 32904,
907
+ "<d_78>": 32905,
908
+ "<d_79>": 32906,
909
+ "<d_7>": 32907,
910
+ "<d_80>": 32908,
911
+ "<d_81>": 32909,
912
+ "<d_82>": 32910,
913
+ "<d_83>": 32911,
914
+ "<d_84>": 32912,
915
+ "<d_85>": 32913,
916
+ "<d_86>": 32914,
917
+ "<d_87>": 32915,
918
+ "<d_88>": 32916,
919
+ "<d_89>": 32917,
920
+ "<d_8>": 32918,
921
+ "<d_90>": 32919,
922
+ "<d_91>": 32920,
923
+ "<d_92>": 32921,
924
+ "<d_93>": 32922,
925
+ "<d_94>": 32923,
926
+ "<d_95>": 32924,
927
+ "<d_96>": 32925,
928
+ "<d_97>": 32926,
929
+ "<d_98>": 32927,
930
+ "<d_99>": 32928,
931
+ "<d_9>": 32929
932
+ }
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/bwzheng/myckpt/delta/Instruments-fp16/",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 2048,
13
+ "max_sequence_length": 2048,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 32,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float16",
24
+ "transformers_version": "4.31.0",
25
+ "use_cache": true,
26
+ "vocab_size": 32930
27
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.31.0"
7
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86da71adcddc636ab33dee29d9374ef18e718b931f9d423dd3974fae1541144c
3
+ size 9984253118
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4338eee9cfb4f9852f37050fff07c9cb2379cd05f078190a5ff9141bb8506995
3
+ size 3507934099
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 13492076544
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00002-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00002.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00002.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
328
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
329
+ }
330
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<unk>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "legacy": true,
22
+ "model_max_length": 2048,
23
+ "pad_token": null,
24
+ "padding_side": "right",
25
+ "sp_model_kwargs": {},
26
+ "tokenizer_class": "LlamaTokenizer",
27
+ "unk_token": {
28
+ "__type": "AddedToken",
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }