alishudi commited on
Commit
58fdbd5
·
verified ·
1 Parent(s): 693d285

Upload folder using huggingface_hub

Browse files
added_tokens.json ADDED
@@ -0,0 +1,1007 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</coord>": 33004,
3
+ "<coord>": 33003,
4
+ "<end_of_utterance>": 32002,
5
+ "<fake_token_around_image>": 32000,
6
+ "<image>": 32001,
7
+ "<|0|>": 32003,
8
+ "<|100|>": 32103,
9
+ "<|101|>": 32104,
10
+ "<|102|>": 32105,
11
+ "<|103|>": 32106,
12
+ "<|104|>": 32107,
13
+ "<|105|>": 32108,
14
+ "<|106|>": 32109,
15
+ "<|107|>": 32110,
16
+ "<|108|>": 32111,
17
+ "<|109|>": 32112,
18
+ "<|10|>": 32013,
19
+ "<|110|>": 32113,
20
+ "<|111|>": 32114,
21
+ "<|112|>": 32115,
22
+ "<|113|>": 32116,
23
+ "<|114|>": 32117,
24
+ "<|115|>": 32118,
25
+ "<|116|>": 32119,
26
+ "<|117|>": 32120,
27
+ "<|118|>": 32121,
28
+ "<|119|>": 32122,
29
+ "<|11|>": 32014,
30
+ "<|120|>": 32123,
31
+ "<|121|>": 32124,
32
+ "<|122|>": 32125,
33
+ "<|123|>": 32126,
34
+ "<|124|>": 32127,
35
+ "<|125|>": 32128,
36
+ "<|126|>": 32129,
37
+ "<|127|>": 32130,
38
+ "<|128|>": 32131,
39
+ "<|129|>": 32132,
40
+ "<|12|>": 32015,
41
+ "<|130|>": 32133,
42
+ "<|131|>": 32134,
43
+ "<|132|>": 32135,
44
+ "<|133|>": 32136,
45
+ "<|134|>": 32137,
46
+ "<|135|>": 32138,
47
+ "<|136|>": 32139,
48
+ "<|137|>": 32140,
49
+ "<|138|>": 32141,
50
+ "<|139|>": 32142,
51
+ "<|13|>": 32016,
52
+ "<|140|>": 32143,
53
+ "<|141|>": 32144,
54
+ "<|142|>": 32145,
55
+ "<|143|>": 32146,
56
+ "<|144|>": 32147,
57
+ "<|145|>": 32148,
58
+ "<|146|>": 32149,
59
+ "<|147|>": 32150,
60
+ "<|148|>": 32151,
61
+ "<|149|>": 32152,
62
+ "<|14|>": 32017,
63
+ "<|150|>": 32153,
64
+ "<|151|>": 32154,
65
+ "<|152|>": 32155,
66
+ "<|153|>": 32156,
67
+ "<|154|>": 32157,
68
+ "<|155|>": 32158,
69
+ "<|156|>": 32159,
70
+ "<|157|>": 32160,
71
+ "<|158|>": 32161,
72
+ "<|159|>": 32162,
73
+ "<|15|>": 32018,
74
+ "<|160|>": 32163,
75
+ "<|161|>": 32164,
76
+ "<|162|>": 32165,
77
+ "<|163|>": 32166,
78
+ "<|164|>": 32167,
79
+ "<|165|>": 32168,
80
+ "<|166|>": 32169,
81
+ "<|167|>": 32170,
82
+ "<|168|>": 32171,
83
+ "<|169|>": 32172,
84
+ "<|16|>": 32019,
85
+ "<|170|>": 32173,
86
+ "<|171|>": 32174,
87
+ "<|172|>": 32175,
88
+ "<|173|>": 32176,
89
+ "<|174|>": 32177,
90
+ "<|175|>": 32178,
91
+ "<|176|>": 32179,
92
+ "<|177|>": 32180,
93
+ "<|178|>": 32181,
94
+ "<|179|>": 32182,
95
+ "<|17|>": 32020,
96
+ "<|180|>": 32183,
97
+ "<|181|>": 32184,
98
+ "<|182|>": 32185,
99
+ "<|183|>": 32186,
100
+ "<|184|>": 32187,
101
+ "<|185|>": 32188,
102
+ "<|186|>": 32189,
103
+ "<|187|>": 32190,
104
+ "<|188|>": 32191,
105
+ "<|189|>": 32192,
106
+ "<|18|>": 32021,
107
+ "<|190|>": 32193,
108
+ "<|191|>": 32194,
109
+ "<|192|>": 32195,
110
+ "<|193|>": 32196,
111
+ "<|194|>": 32197,
112
+ "<|195|>": 32198,
113
+ "<|196|>": 32199,
114
+ "<|197|>": 32200,
115
+ "<|198|>": 32201,
116
+ "<|199|>": 32202,
117
+ "<|19|>": 32022,
118
+ "<|1|>": 32004,
119
+ "<|200|>": 32203,
120
+ "<|201|>": 32204,
121
+ "<|202|>": 32205,
122
+ "<|203|>": 32206,
123
+ "<|204|>": 32207,
124
+ "<|205|>": 32208,
125
+ "<|206|>": 32209,
126
+ "<|207|>": 32210,
127
+ "<|208|>": 32211,
128
+ "<|209|>": 32212,
129
+ "<|20|>": 32023,
130
+ "<|210|>": 32213,
131
+ "<|211|>": 32214,
132
+ "<|212|>": 32215,
133
+ "<|213|>": 32216,
134
+ "<|214|>": 32217,
135
+ "<|215|>": 32218,
136
+ "<|216|>": 32219,
137
+ "<|217|>": 32220,
138
+ "<|218|>": 32221,
139
+ "<|219|>": 32222,
140
+ "<|21|>": 32024,
141
+ "<|220|>": 32223,
142
+ "<|221|>": 32224,
143
+ "<|222|>": 32225,
144
+ "<|223|>": 32226,
145
+ "<|224|>": 32227,
146
+ "<|225|>": 32228,
147
+ "<|226|>": 32229,
148
+ "<|227|>": 32230,
149
+ "<|228|>": 32231,
150
+ "<|229|>": 32232,
151
+ "<|22|>": 32025,
152
+ "<|230|>": 32233,
153
+ "<|231|>": 32234,
154
+ "<|232|>": 32235,
155
+ "<|233|>": 32236,
156
+ "<|234|>": 32237,
157
+ "<|235|>": 32238,
158
+ "<|236|>": 32239,
159
+ "<|237|>": 32240,
160
+ "<|238|>": 32241,
161
+ "<|239|>": 32242,
162
+ "<|23|>": 32026,
163
+ "<|240|>": 32243,
164
+ "<|241|>": 32244,
165
+ "<|242|>": 32245,
166
+ "<|243|>": 32246,
167
+ "<|244|>": 32247,
168
+ "<|245|>": 32248,
169
+ "<|246|>": 32249,
170
+ "<|247|>": 32250,
171
+ "<|248|>": 32251,
172
+ "<|249|>": 32252,
173
+ "<|24|>": 32027,
174
+ "<|250|>": 32253,
175
+ "<|251|>": 32254,
176
+ "<|252|>": 32255,
177
+ "<|253|>": 32256,
178
+ "<|254|>": 32257,
179
+ "<|255|>": 32258,
180
+ "<|256|>": 32259,
181
+ "<|257|>": 32260,
182
+ "<|258|>": 32261,
183
+ "<|259|>": 32262,
184
+ "<|25|>": 32028,
185
+ "<|260|>": 32263,
186
+ "<|261|>": 32264,
187
+ "<|262|>": 32265,
188
+ "<|263|>": 32266,
189
+ "<|264|>": 32267,
190
+ "<|265|>": 32268,
191
+ "<|266|>": 32269,
192
+ "<|267|>": 32270,
193
+ "<|268|>": 32271,
194
+ "<|269|>": 32272,
195
+ "<|26|>": 32029,
196
+ "<|270|>": 32273,
197
+ "<|271|>": 32274,
198
+ "<|272|>": 32275,
199
+ "<|273|>": 32276,
200
+ "<|274|>": 32277,
201
+ "<|275|>": 32278,
202
+ "<|276|>": 32279,
203
+ "<|277|>": 32280,
204
+ "<|278|>": 32281,
205
+ "<|279|>": 32282,
206
+ "<|27|>": 32030,
207
+ "<|280|>": 32283,
208
+ "<|281|>": 32284,
209
+ "<|282|>": 32285,
210
+ "<|283|>": 32286,
211
+ "<|284|>": 32287,
212
+ "<|285|>": 32288,
213
+ "<|286|>": 32289,
214
+ "<|287|>": 32290,
215
+ "<|288|>": 32291,
216
+ "<|289|>": 32292,
217
+ "<|28|>": 32031,
218
+ "<|290|>": 32293,
219
+ "<|291|>": 32294,
220
+ "<|292|>": 32295,
221
+ "<|293|>": 32296,
222
+ "<|294|>": 32297,
223
+ "<|295|>": 32298,
224
+ "<|296|>": 32299,
225
+ "<|297|>": 32300,
226
+ "<|298|>": 32301,
227
+ "<|299|>": 32302,
228
+ "<|29|>": 32032,
229
+ "<|2|>": 32005,
230
+ "<|300|>": 32303,
231
+ "<|301|>": 32304,
232
+ "<|302|>": 32305,
233
+ "<|303|>": 32306,
234
+ "<|304|>": 32307,
235
+ "<|305|>": 32308,
236
+ "<|306|>": 32309,
237
+ "<|307|>": 32310,
238
+ "<|308|>": 32311,
239
+ "<|309|>": 32312,
240
+ "<|30|>": 32033,
241
+ "<|310|>": 32313,
242
+ "<|311|>": 32314,
243
+ "<|312|>": 32315,
244
+ "<|313|>": 32316,
245
+ "<|314|>": 32317,
246
+ "<|315|>": 32318,
247
+ "<|316|>": 32319,
248
+ "<|317|>": 32320,
249
+ "<|318|>": 32321,
250
+ "<|319|>": 32322,
251
+ "<|31|>": 32034,
252
+ "<|320|>": 32323,
253
+ "<|321|>": 32324,
254
+ "<|322|>": 32325,
255
+ "<|323|>": 32326,
256
+ "<|324|>": 32327,
257
+ "<|325|>": 32328,
258
+ "<|326|>": 32329,
259
+ "<|327|>": 32330,
260
+ "<|328|>": 32331,
261
+ "<|329|>": 32332,
262
+ "<|32|>": 32035,
263
+ "<|330|>": 32333,
264
+ "<|331|>": 32334,
265
+ "<|332|>": 32335,
266
+ "<|333|>": 32336,
267
+ "<|334|>": 32337,
268
+ "<|335|>": 32338,
269
+ "<|336|>": 32339,
270
+ "<|337|>": 32340,
271
+ "<|338|>": 32341,
272
+ "<|339|>": 32342,
273
+ "<|33|>": 32036,
274
+ "<|340|>": 32343,
275
+ "<|341|>": 32344,
276
+ "<|342|>": 32345,
277
+ "<|343|>": 32346,
278
+ "<|344|>": 32347,
279
+ "<|345|>": 32348,
280
+ "<|346|>": 32349,
281
+ "<|347|>": 32350,
282
+ "<|348|>": 32351,
283
+ "<|349|>": 32352,
284
+ "<|34|>": 32037,
285
+ "<|350|>": 32353,
286
+ "<|351|>": 32354,
287
+ "<|352|>": 32355,
288
+ "<|353|>": 32356,
289
+ "<|354|>": 32357,
290
+ "<|355|>": 32358,
291
+ "<|356|>": 32359,
292
+ "<|357|>": 32360,
293
+ "<|358|>": 32361,
294
+ "<|359|>": 32362,
295
+ "<|35|>": 32038,
296
+ "<|360|>": 32363,
297
+ "<|361|>": 32364,
298
+ "<|362|>": 32365,
299
+ "<|363|>": 32366,
300
+ "<|364|>": 32367,
301
+ "<|365|>": 32368,
302
+ "<|366|>": 32369,
303
+ "<|367|>": 32370,
304
+ "<|368|>": 32371,
305
+ "<|369|>": 32372,
306
+ "<|36|>": 32039,
307
+ "<|370|>": 32373,
308
+ "<|371|>": 32374,
309
+ "<|372|>": 32375,
310
+ "<|373|>": 32376,
311
+ "<|374|>": 32377,
312
+ "<|375|>": 32378,
313
+ "<|376|>": 32379,
314
+ "<|377|>": 32380,
315
+ "<|378|>": 32381,
316
+ "<|379|>": 32382,
317
+ "<|37|>": 32040,
318
+ "<|380|>": 32383,
319
+ "<|381|>": 32384,
320
+ "<|382|>": 32385,
321
+ "<|383|>": 32386,
322
+ "<|384|>": 32387,
323
+ "<|385|>": 32388,
324
+ "<|386|>": 32389,
325
+ "<|387|>": 32390,
326
+ "<|388|>": 32391,
327
+ "<|389|>": 32392,
328
+ "<|38|>": 32041,
329
+ "<|390|>": 32393,
330
+ "<|391|>": 32394,
331
+ "<|392|>": 32395,
332
+ "<|393|>": 32396,
333
+ "<|394|>": 32397,
334
+ "<|395|>": 32398,
335
+ "<|396|>": 32399,
336
+ "<|397|>": 32400,
337
+ "<|398|>": 32401,
338
+ "<|399|>": 32402,
339
+ "<|39|>": 32042,
340
+ "<|3|>": 32006,
341
+ "<|400|>": 32403,
342
+ "<|401|>": 32404,
343
+ "<|402|>": 32405,
344
+ "<|403|>": 32406,
345
+ "<|404|>": 32407,
346
+ "<|405|>": 32408,
347
+ "<|406|>": 32409,
348
+ "<|407|>": 32410,
349
+ "<|408|>": 32411,
350
+ "<|409|>": 32412,
351
+ "<|40|>": 32043,
352
+ "<|410|>": 32413,
353
+ "<|411|>": 32414,
354
+ "<|412|>": 32415,
355
+ "<|413|>": 32416,
356
+ "<|414|>": 32417,
357
+ "<|415|>": 32418,
358
+ "<|416|>": 32419,
359
+ "<|417|>": 32420,
360
+ "<|418|>": 32421,
361
+ "<|419|>": 32422,
362
+ "<|41|>": 32044,
363
+ "<|420|>": 32423,
364
+ "<|421|>": 32424,
365
+ "<|422|>": 32425,
366
+ "<|423|>": 32426,
367
+ "<|424|>": 32427,
368
+ "<|425|>": 32428,
369
+ "<|426|>": 32429,
370
+ "<|427|>": 32430,
371
+ "<|428|>": 32431,
372
+ "<|429|>": 32432,
373
+ "<|42|>": 32045,
374
+ "<|430|>": 32433,
375
+ "<|431|>": 32434,
376
+ "<|432|>": 32435,
377
+ "<|433|>": 32436,
378
+ "<|434|>": 32437,
379
+ "<|435|>": 32438,
380
+ "<|436|>": 32439,
381
+ "<|437|>": 32440,
382
+ "<|438|>": 32441,
383
+ "<|439|>": 32442,
384
+ "<|43|>": 32046,
385
+ "<|440|>": 32443,
386
+ "<|441|>": 32444,
387
+ "<|442|>": 32445,
388
+ "<|443|>": 32446,
389
+ "<|444|>": 32447,
390
+ "<|445|>": 32448,
391
+ "<|446|>": 32449,
392
+ "<|447|>": 32450,
393
+ "<|448|>": 32451,
394
+ "<|449|>": 32452,
395
+ "<|44|>": 32047,
396
+ "<|450|>": 32453,
397
+ "<|451|>": 32454,
398
+ "<|452|>": 32455,
399
+ "<|453|>": 32456,
400
+ "<|454|>": 32457,
401
+ "<|455|>": 32458,
402
+ "<|456|>": 32459,
403
+ "<|457|>": 32460,
404
+ "<|458|>": 32461,
405
+ "<|459|>": 32462,
406
+ "<|45|>": 32048,
407
+ "<|460|>": 32463,
408
+ "<|461|>": 32464,
409
+ "<|462|>": 32465,
410
+ "<|463|>": 32466,
411
+ "<|464|>": 32467,
412
+ "<|465|>": 32468,
413
+ "<|466|>": 32469,
414
+ "<|467|>": 32470,
415
+ "<|468|>": 32471,
416
+ "<|469|>": 32472,
417
+ "<|46|>": 32049,
418
+ "<|470|>": 32473,
419
+ "<|471|>": 32474,
420
+ "<|472|>": 32475,
421
+ "<|473|>": 32476,
422
+ "<|474|>": 32477,
423
+ "<|475|>": 32478,
424
+ "<|476|>": 32479,
425
+ "<|477|>": 32480,
426
+ "<|478|>": 32481,
427
+ "<|479|>": 32482,
428
+ "<|47|>": 32050,
429
+ "<|480|>": 32483,
430
+ "<|481|>": 32484,
431
+ "<|482|>": 32485,
432
+ "<|483|>": 32486,
433
+ "<|484|>": 32487,
434
+ "<|485|>": 32488,
435
+ "<|486|>": 32489,
436
+ "<|487|>": 32490,
437
+ "<|488|>": 32491,
438
+ "<|489|>": 32492,
439
+ "<|48|>": 32051,
440
+ "<|490|>": 32493,
441
+ "<|491|>": 32494,
442
+ "<|492|>": 32495,
443
+ "<|493|>": 32496,
444
+ "<|494|>": 32497,
445
+ "<|495|>": 32498,
446
+ "<|496|>": 32499,
447
+ "<|497|>": 32500,
448
+ "<|498|>": 32501,
449
+ "<|499|>": 32502,
450
+ "<|49|>": 32052,
451
+ "<|4|>": 32007,
452
+ "<|500|>": 32503,
453
+ "<|501|>": 32504,
454
+ "<|502|>": 32505,
455
+ "<|503|>": 32506,
456
+ "<|504|>": 32507,
457
+ "<|505|>": 32508,
458
+ "<|506|>": 32509,
459
+ "<|507|>": 32510,
460
+ "<|508|>": 32511,
461
+ "<|509|>": 32512,
462
+ "<|50|>": 32053,
463
+ "<|510|>": 32513,
464
+ "<|511|>": 32514,
465
+ "<|512|>": 32515,
466
+ "<|513|>": 32516,
467
+ "<|514|>": 32517,
468
+ "<|515|>": 32518,
469
+ "<|516|>": 32519,
470
+ "<|517|>": 32520,
471
+ "<|518|>": 32521,
472
+ "<|519|>": 32522,
473
+ "<|51|>": 32054,
474
+ "<|520|>": 32523,
475
+ "<|521|>": 32524,
476
+ "<|522|>": 32525,
477
+ "<|523|>": 32526,
478
+ "<|524|>": 32527,
479
+ "<|525|>": 32528,
480
+ "<|526|>": 32529,
481
+ "<|527|>": 32530,
482
+ "<|528|>": 32531,
483
+ "<|529|>": 32532,
484
+ "<|52|>": 32055,
485
+ "<|530|>": 32533,
486
+ "<|531|>": 32534,
487
+ "<|532|>": 32535,
488
+ "<|533|>": 32536,
489
+ "<|534|>": 32537,
490
+ "<|535|>": 32538,
491
+ "<|536|>": 32539,
492
+ "<|537|>": 32540,
493
+ "<|538|>": 32541,
494
+ "<|539|>": 32542,
495
+ "<|53|>": 32056,
496
+ "<|540|>": 32543,
497
+ "<|541|>": 32544,
498
+ "<|542|>": 32545,
499
+ "<|543|>": 32546,
500
+ "<|544|>": 32547,
501
+ "<|545|>": 32548,
502
+ "<|546|>": 32549,
503
+ "<|547|>": 32550,
504
+ "<|548|>": 32551,
505
+ "<|549|>": 32552,
506
+ "<|54|>": 32057,
507
+ "<|550|>": 32553,
508
+ "<|551|>": 32554,
509
+ "<|552|>": 32555,
510
+ "<|553|>": 32556,
511
+ "<|554|>": 32557,
512
+ "<|555|>": 32558,
513
+ "<|556|>": 32559,
514
+ "<|557|>": 32560,
515
+ "<|558|>": 32561,
516
+ "<|559|>": 32562,
517
+ "<|55|>": 32058,
518
+ "<|560|>": 32563,
519
+ "<|561|>": 32564,
520
+ "<|562|>": 32565,
521
+ "<|563|>": 32566,
522
+ "<|564|>": 32567,
523
+ "<|565|>": 32568,
524
+ "<|566|>": 32569,
525
+ "<|567|>": 32570,
526
+ "<|568|>": 32571,
527
+ "<|569|>": 32572,
528
+ "<|56|>": 32059,
529
+ "<|570|>": 32573,
530
+ "<|571|>": 32574,
531
+ "<|572|>": 32575,
532
+ "<|573|>": 32576,
533
+ "<|574|>": 32577,
534
+ "<|575|>": 32578,
535
+ "<|576|>": 32579,
536
+ "<|577|>": 32580,
537
+ "<|578|>": 32581,
538
+ "<|579|>": 32582,
539
+ "<|57|>": 32060,
540
+ "<|580|>": 32583,
541
+ "<|581|>": 32584,
542
+ "<|582|>": 32585,
543
+ "<|583|>": 32586,
544
+ "<|584|>": 32587,
545
+ "<|585|>": 32588,
546
+ "<|586|>": 32589,
547
+ "<|587|>": 32590,
548
+ "<|588|>": 32591,
549
+ "<|589|>": 32592,
550
+ "<|58|>": 32061,
551
+ "<|590|>": 32593,
552
+ "<|591|>": 32594,
553
+ "<|592|>": 32595,
554
+ "<|593|>": 32596,
555
+ "<|594|>": 32597,
556
+ "<|595|>": 32598,
557
+ "<|596|>": 32599,
558
+ "<|597|>": 32600,
559
+ "<|598|>": 32601,
560
+ "<|599|>": 32602,
561
+ "<|59|>": 32062,
562
+ "<|5|>": 32008,
563
+ "<|600|>": 32603,
564
+ "<|601|>": 32604,
565
+ "<|602|>": 32605,
566
+ "<|603|>": 32606,
567
+ "<|604|>": 32607,
568
+ "<|605|>": 32608,
569
+ "<|606|>": 32609,
570
+ "<|607|>": 32610,
571
+ "<|608|>": 32611,
572
+ "<|609|>": 32612,
573
+ "<|60|>": 32063,
574
+ "<|610|>": 32613,
575
+ "<|611|>": 32614,
576
+ "<|612|>": 32615,
577
+ "<|613|>": 32616,
578
+ "<|614|>": 32617,
579
+ "<|615|>": 32618,
580
+ "<|616|>": 32619,
581
+ "<|617|>": 32620,
582
+ "<|618|>": 32621,
583
+ "<|619|>": 32622,
584
+ "<|61|>": 32064,
585
+ "<|620|>": 32623,
586
+ "<|621|>": 32624,
587
+ "<|622|>": 32625,
588
+ "<|623|>": 32626,
589
+ "<|624|>": 32627,
590
+ "<|625|>": 32628,
591
+ "<|626|>": 32629,
592
+ "<|627|>": 32630,
593
+ "<|628|>": 32631,
594
+ "<|629|>": 32632,
595
+ "<|62|>": 32065,
596
+ "<|630|>": 32633,
597
+ "<|631|>": 32634,
598
+ "<|632|>": 32635,
599
+ "<|633|>": 32636,
600
+ "<|634|>": 32637,
601
+ "<|635|>": 32638,
602
+ "<|636|>": 32639,
603
+ "<|637|>": 32640,
604
+ "<|638|>": 32641,
605
+ "<|639|>": 32642,
606
+ "<|63|>": 32066,
607
+ "<|640|>": 32643,
608
+ "<|641|>": 32644,
609
+ "<|642|>": 32645,
610
+ "<|643|>": 32646,
611
+ "<|644|>": 32647,
612
+ "<|645|>": 32648,
613
+ "<|646|>": 32649,
614
+ "<|647|>": 32650,
615
+ "<|648|>": 32651,
616
+ "<|649|>": 32652,
617
+ "<|64|>": 32067,
618
+ "<|650|>": 32653,
619
+ "<|651|>": 32654,
620
+ "<|652|>": 32655,
621
+ "<|653|>": 32656,
622
+ "<|654|>": 32657,
623
+ "<|655|>": 32658,
624
+ "<|656|>": 32659,
625
+ "<|657|>": 32660,
626
+ "<|658|>": 32661,
627
+ "<|659|>": 32662,
628
+ "<|65|>": 32068,
629
+ "<|660|>": 32663,
630
+ "<|661|>": 32664,
631
+ "<|662|>": 32665,
632
+ "<|663|>": 32666,
633
+ "<|664|>": 32667,
634
+ "<|665|>": 32668,
635
+ "<|666|>": 32669,
636
+ "<|667|>": 32670,
637
+ "<|668|>": 32671,
638
+ "<|669|>": 32672,
639
+ "<|66|>": 32069,
640
+ "<|670|>": 32673,
641
+ "<|671|>": 32674,
642
+ "<|672|>": 32675,
643
+ "<|673|>": 32676,
644
+ "<|674|>": 32677,
645
+ "<|675|>": 32678,
646
+ "<|676|>": 32679,
647
+ "<|677|>": 32680,
648
+ "<|678|>": 32681,
649
+ "<|679|>": 32682,
650
+ "<|67|>": 32070,
651
+ "<|680|>": 32683,
652
+ "<|681|>": 32684,
653
+ "<|682|>": 32685,
654
+ "<|683|>": 32686,
655
+ "<|684|>": 32687,
656
+ "<|685|>": 32688,
657
+ "<|686|>": 32689,
658
+ "<|687|>": 32690,
659
+ "<|688|>": 32691,
660
+ "<|689|>": 32692,
661
+ "<|68|>": 32071,
662
+ "<|690|>": 32693,
663
+ "<|691|>": 32694,
664
+ "<|692|>": 32695,
665
+ "<|693|>": 32696,
666
+ "<|694|>": 32697,
667
+ "<|695|>": 32698,
668
+ "<|696|>": 32699,
669
+ "<|697|>": 32700,
670
+ "<|698|>": 32701,
671
+ "<|699|>": 32702,
672
+ "<|69|>": 32072,
673
+ "<|6|>": 32009,
674
+ "<|700|>": 32703,
675
+ "<|701|>": 32704,
676
+ "<|702|>": 32705,
677
+ "<|703|>": 32706,
678
+ "<|704|>": 32707,
679
+ "<|705|>": 32708,
680
+ "<|706|>": 32709,
681
+ "<|707|>": 32710,
682
+ "<|708|>": 32711,
683
+ "<|709|>": 32712,
684
+ "<|70|>": 32073,
685
+ "<|710|>": 32713,
686
+ "<|711|>": 32714,
687
+ "<|712|>": 32715,
688
+ "<|713|>": 32716,
689
+ "<|714|>": 32717,
690
+ "<|715|>": 32718,
691
+ "<|716|>": 32719,
692
+ "<|717|>": 32720,
693
+ "<|718|>": 32721,
694
+ "<|719|>": 32722,
695
+ "<|71|>": 32074,
696
+ "<|720|>": 32723,
697
+ "<|721|>": 32724,
698
+ "<|722|>": 32725,
699
+ "<|723|>": 32726,
700
+ "<|724|>": 32727,
701
+ "<|725|>": 32728,
702
+ "<|726|>": 32729,
703
+ "<|727|>": 32730,
704
+ "<|728|>": 32731,
705
+ "<|729|>": 32732,
706
+ "<|72|>": 32075,
707
+ "<|730|>": 32733,
708
+ "<|731|>": 32734,
709
+ "<|732|>": 32735,
710
+ "<|733|>": 32736,
711
+ "<|734|>": 32737,
712
+ "<|735|>": 32738,
713
+ "<|736|>": 32739,
714
+ "<|737|>": 32740,
715
+ "<|738|>": 32741,
716
+ "<|739|>": 32742,
717
+ "<|73|>": 32076,
718
+ "<|740|>": 32743,
719
+ "<|741|>": 32744,
720
+ "<|742|>": 32745,
721
+ "<|743|>": 32746,
722
+ "<|744|>": 32747,
723
+ "<|745|>": 32748,
724
+ "<|746|>": 32749,
725
+ "<|747|>": 32750,
726
+ "<|748|>": 32751,
727
+ "<|749|>": 32752,
728
+ "<|74|>": 32077,
729
+ "<|750|>": 32753,
730
+ "<|751|>": 32754,
731
+ "<|752|>": 32755,
732
+ "<|753|>": 32756,
733
+ "<|754|>": 32757,
734
+ "<|755|>": 32758,
735
+ "<|756|>": 32759,
736
+ "<|757|>": 32760,
737
+ "<|758|>": 32761,
738
+ "<|759|>": 32762,
739
+ "<|75|>": 32078,
740
+ "<|760|>": 32763,
741
+ "<|761|>": 32764,
742
+ "<|762|>": 32765,
743
+ "<|763|>": 32766,
744
+ "<|764|>": 32767,
745
+ "<|765|>": 32768,
746
+ "<|766|>": 32769,
747
+ "<|767|>": 32770,
748
+ "<|768|>": 32771,
749
+ "<|769|>": 32772,
750
+ "<|76|>": 32079,
751
+ "<|770|>": 32773,
752
+ "<|771|>": 32774,
753
+ "<|772|>": 32775,
754
+ "<|773|>": 32776,
755
+ "<|774|>": 32777,
756
+ "<|775|>": 32778,
757
+ "<|776|>": 32779,
758
+ "<|777|>": 32780,
759
+ "<|778|>": 32781,
760
+ "<|779|>": 32782,
761
+ "<|77|>": 32080,
762
+ "<|780|>": 32783,
763
+ "<|781|>": 32784,
764
+ "<|782|>": 32785,
765
+ "<|783|>": 32786,
766
+ "<|784|>": 32787,
767
+ "<|785|>": 32788,
768
+ "<|786|>": 32789,
769
+ "<|787|>": 32790,
770
+ "<|788|>": 32791,
771
+ "<|789|>": 32792,
772
+ "<|78|>": 32081,
773
+ "<|790|>": 32793,
774
+ "<|791|>": 32794,
775
+ "<|792|>": 32795,
776
+ "<|793|>": 32796,
777
+ "<|794|>": 32797,
778
+ "<|795|>": 32798,
779
+ "<|796|>": 32799,
780
+ "<|797|>": 32800,
781
+ "<|798|>": 32801,
782
+ "<|799|>": 32802,
783
+ "<|79|>": 32082,
784
+ "<|7|>": 32010,
785
+ "<|800|>": 32803,
786
+ "<|801|>": 32804,
787
+ "<|802|>": 32805,
788
+ "<|803|>": 32806,
789
+ "<|804|>": 32807,
790
+ "<|805|>": 32808,
791
+ "<|806|>": 32809,
792
+ "<|807|>": 32810,
793
+ "<|808|>": 32811,
794
+ "<|809|>": 32812,
795
+ "<|80|>": 32083,
796
+ "<|810|>": 32813,
797
+ "<|811|>": 32814,
798
+ "<|812|>": 32815,
799
+ "<|813|>": 32816,
800
+ "<|814|>": 32817,
801
+ "<|815|>": 32818,
802
+ "<|816|>": 32819,
803
+ "<|817|>": 32820,
804
+ "<|818|>": 32821,
805
+ "<|819|>": 32822,
806
+ "<|81|>": 32084,
807
+ "<|820|>": 32823,
808
+ "<|821|>": 32824,
809
+ "<|822|>": 32825,
810
+ "<|823|>": 32826,
811
+ "<|824|>": 32827,
812
+ "<|825|>": 32828,
813
+ "<|826|>": 32829,
814
+ "<|827|>": 32830,
815
+ "<|828|>": 32831,
816
+ "<|829|>": 32832,
817
+ "<|82|>": 32085,
818
+ "<|830|>": 32833,
819
+ "<|831|>": 32834,
820
+ "<|832|>": 32835,
821
+ "<|833|>": 32836,
822
+ "<|834|>": 32837,
823
+ "<|835|>": 32838,
824
+ "<|836|>": 32839,
825
+ "<|837|>": 32840,
826
+ "<|838|>": 32841,
827
+ "<|839|>": 32842,
828
+ "<|83|>": 32086,
829
+ "<|840|>": 32843,
830
+ "<|841|>": 32844,
831
+ "<|842|>": 32845,
832
+ "<|843|>": 32846,
833
+ "<|844|>": 32847,
834
+ "<|845|>": 32848,
835
+ "<|846|>": 32849,
836
+ "<|847|>": 32850,
837
+ "<|848|>": 32851,
838
+ "<|849|>": 32852,
839
+ "<|84|>": 32087,
840
+ "<|850|>": 32853,
841
+ "<|851|>": 32854,
842
+ "<|852|>": 32855,
843
+ "<|853|>": 32856,
844
+ "<|854|>": 32857,
845
+ "<|855|>": 32858,
846
+ "<|856|>": 32859,
847
+ "<|857|>": 32860,
848
+ "<|858|>": 32861,
849
+ "<|859|>": 32862,
850
+ "<|85|>": 32088,
851
+ "<|860|>": 32863,
852
+ "<|861|>": 32864,
853
+ "<|862|>": 32865,
854
+ "<|863|>": 32866,
855
+ "<|864|>": 32867,
856
+ "<|865|>": 32868,
857
+ "<|866|>": 32869,
858
+ "<|867|>": 32870,
859
+ "<|868|>": 32871,
860
+ "<|869|>": 32872,
861
+ "<|86|>": 32089,
862
+ "<|870|>": 32873,
863
+ "<|871|>": 32874,
864
+ "<|872|>": 32875,
865
+ "<|873|>": 32876,
866
+ "<|874|>": 32877,
867
+ "<|875|>": 32878,
868
+ "<|876|>": 32879,
869
+ "<|877|>": 32880,
870
+ "<|878|>": 32881,
871
+ "<|879|>": 32882,
872
+ "<|87|>": 32090,
873
+ "<|880|>": 32883,
874
+ "<|881|>": 32884,
875
+ "<|882|>": 32885,
876
+ "<|883|>": 32886,
877
+ "<|884|>": 32887,
878
+ "<|885|>": 32888,
879
+ "<|886|>": 32889,
880
+ "<|887|>": 32890,
881
+ "<|888|>": 32891,
882
+ "<|889|>": 32892,
883
+ "<|88|>": 32091,
884
+ "<|890|>": 32893,
885
+ "<|891|>": 32894,
886
+ "<|892|>": 32895,
887
+ "<|893|>": 32896,
888
+ "<|894|>": 32897,
889
+ "<|895|>": 32898,
890
+ "<|896|>": 32899,
891
+ "<|897|>": 32900,
892
+ "<|898|>": 32901,
893
+ "<|899|>": 32902,
894
+ "<|89|>": 32092,
895
+ "<|8|>": 32011,
896
+ "<|900|>": 32903,
897
+ "<|901|>": 32904,
898
+ "<|902|>": 32905,
899
+ "<|903|>": 32906,
900
+ "<|904|>": 32907,
901
+ "<|905|>": 32908,
902
+ "<|906|>": 32909,
903
+ "<|907|>": 32910,
904
+ "<|908|>": 32911,
905
+ "<|909|>": 32912,
906
+ "<|90|>": 32093,
907
+ "<|910|>": 32913,
908
+ "<|911|>": 32914,
909
+ "<|912|>": 32915,
910
+ "<|913|>": 32916,
911
+ "<|914|>": 32917,
912
+ "<|915|>": 32918,
913
+ "<|916|>": 32919,
914
+ "<|917|>": 32920,
915
+ "<|918|>": 32921,
916
+ "<|919|>": 32922,
917
+ "<|91|>": 32094,
918
+ "<|920|>": 32923,
919
+ "<|921|>": 32924,
920
+ "<|922|>": 32925,
921
+ "<|923|>": 32926,
922
+ "<|924|>": 32927,
923
+ "<|925|>": 32928,
924
+ "<|926|>": 32929,
925
+ "<|927|>": 32930,
926
+ "<|928|>": 32931,
927
+ "<|929|>": 32932,
928
+ "<|92|>": 32095,
929
+ "<|930|>": 32933,
930
+ "<|931|>": 32934,
931
+ "<|932|>": 32935,
932
+ "<|933|>": 32936,
933
+ "<|934|>": 32937,
934
+ "<|935|>": 32938,
935
+ "<|936|>": 32939,
936
+ "<|937|>": 32940,
937
+ "<|938|>": 32941,
938
+ "<|939|>": 32942,
939
+ "<|93|>": 32096,
940
+ "<|940|>": 32943,
941
+ "<|941|>": 32944,
942
+ "<|942|>": 32945,
943
+ "<|943|>": 32946,
944
+ "<|944|>": 32947,
945
+ "<|945|>": 32948,
946
+ "<|946|>": 32949,
947
+ "<|947|>": 32950,
948
+ "<|948|>": 32951,
949
+ "<|949|>": 32952,
950
+ "<|94|>": 32097,
951
+ "<|950|>": 32953,
952
+ "<|951|>": 32954,
953
+ "<|952|>": 32955,
954
+ "<|953|>": 32956,
955
+ "<|954|>": 32957,
956
+ "<|955|>": 32958,
957
+ "<|956|>": 32959,
958
+ "<|957|>": 32960,
959
+ "<|958|>": 32961,
960
+ "<|959|>": 32962,
961
+ "<|95|>": 32098,
962
+ "<|960|>": 32963,
963
+ "<|961|>": 32964,
964
+ "<|962|>": 32965,
965
+ "<|963|>": 32966,
966
+ "<|964|>": 32967,
967
+ "<|965|>": 32968,
968
+ "<|966|>": 32969,
969
+ "<|967|>": 32970,
970
+ "<|968|>": 32971,
971
+ "<|969|>": 32972,
972
+ "<|96|>": 32099,
973
+ "<|970|>": 32973,
974
+ "<|971|>": 32974,
975
+ "<|972|>": 32975,
976
+ "<|973|>": 32976,
977
+ "<|974|>": 32977,
978
+ "<|975|>": 32978,
979
+ "<|976|>": 32979,
980
+ "<|977|>": 32980,
981
+ "<|978|>": 32981,
982
+ "<|979|>": 32982,
983
+ "<|97|>": 32100,
984
+ "<|980|>": 32983,
985
+ "<|981|>": 32984,
986
+ "<|982|>": 32985,
987
+ "<|983|>": 32986,
988
+ "<|984|>": 32987,
989
+ "<|985|>": 32988,
990
+ "<|986|>": 32989,
991
+ "<|987|>": 32990,
992
+ "<|988|>": 32991,
993
+ "<|989|>": 32992,
994
+ "<|98|>": 32101,
995
+ "<|990|>": 32993,
996
+ "<|991|>": 32994,
997
+ "<|992|>": 32995,
998
+ "<|993|>": 32996,
999
+ "<|994|>": 32997,
1000
+ "<|995|>": 32998,
1001
+ "<|996|>": 32999,
1002
+ "<|997|>": 33000,
1003
+ "<|998|>": 33001,
1004
+ "<|999|>": 33002,
1005
+ "<|99|>": 32102,
1006
+ "<|9|>": 32012
1007
+ }
config.json ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "screenmate/idefics_25_50_25_merged",
3
+ "architectures": [
4
+ "Idefics2ForConditionalGeneration"
5
+ ],
6
+ "image_token_id": 32001,
7
+ "model_type": "idefics2",
8
+ "perceiver_config": {
9
+ "_name_or_path": "",
10
+ "add_cross_attention": false,
11
+ "architectures": null,
12
+ "attention_dropout": 0.0,
13
+ "bad_words_ids": null,
14
+ "begin_suppress_tokens": null,
15
+ "bos_token_id": null,
16
+ "chunk_size_feed_forward": 0,
17
+ "cross_attention_hidden_size": null,
18
+ "decoder_start_token_id": null,
19
+ "diversity_penalty": 0.0,
20
+ "do_sample": false,
21
+ "early_stopping": false,
22
+ "encoder_no_repeat_ngram_size": 0,
23
+ "eos_token_id": null,
24
+ "exponential_decay_length_penalty": null,
25
+ "finetuning_task": null,
26
+ "forced_bos_token_id": null,
27
+ "forced_eos_token_id": null,
28
+ "hidden_act": "silu",
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1"
32
+ },
33
+ "is_decoder": false,
34
+ "is_encoder_decoder": false,
35
+ "label2id": {
36
+ "LABEL_0": 0,
37
+ "LABEL_1": 1
38
+ },
39
+ "length_penalty": 1.0,
40
+ "max_length": 20,
41
+ "min_length": 0,
42
+ "model_type": "idefics2",
43
+ "no_repeat_ngram_size": 0,
44
+ "num_beam_groups": 1,
45
+ "num_beams": 1,
46
+ "num_key_value_heads": 4,
47
+ "num_return_sequences": 1,
48
+ "output_attentions": false,
49
+ "output_hidden_states": false,
50
+ "output_scores": false,
51
+ "pad_token_id": null,
52
+ "prefix": null,
53
+ "problem_type": null,
54
+ "pruned_heads": {},
55
+ "remove_invalid_values": false,
56
+ "repetition_penalty": 1.0,
57
+ "resampler_depth": 3,
58
+ "resampler_head_dim": 96,
59
+ "resampler_n_heads": 16,
60
+ "resampler_n_latents": 64,
61
+ "return_dict": true,
62
+ "return_dict_in_generate": false,
63
+ "sep_token_id": null,
64
+ "suppress_tokens": null,
65
+ "task_specific_params": null,
66
+ "temperature": 1.0,
67
+ "tf_legacy_loss": false,
68
+ "tie_encoder_decoder": false,
69
+ "tie_word_embeddings": true,
70
+ "tokenizer_class": null,
71
+ "top_k": 50,
72
+ "top_p": 1.0,
73
+ "torch_dtype": null,
74
+ "torchscript": false,
75
+ "typical_p": 1.0,
76
+ "use_bfloat16": false
77
+ },
78
+ "text_config": {
79
+ "_name_or_path": "",
80
+ "add_cross_attention": false,
81
+ "architectures": null,
82
+ "attention_dropout": 0.0,
83
+ "bad_words_ids": null,
84
+ "begin_suppress_tokens": null,
85
+ "bos_token_id": 1,
86
+ "chunk_size_feed_forward": 0,
87
+ "cross_attention_hidden_size": null,
88
+ "decoder_start_token_id": null,
89
+ "diversity_penalty": 0.0,
90
+ "do_sample": false,
91
+ "early_stopping": false,
92
+ "encoder_no_repeat_ngram_size": 0,
93
+ "eos_token_id": 2,
94
+ "exponential_decay_length_penalty": null,
95
+ "finetuning_task": null,
96
+ "forced_bos_token_id": null,
97
+ "forced_eos_token_id": null,
98
+ "hidden_act": "silu",
99
+ "hidden_size": 4096,
100
+ "id2label": {
101
+ "0": "LABEL_0",
102
+ "1": "LABEL_1"
103
+ },
104
+ "initializer_range": 0.02,
105
+ "intermediate_size": 14336,
106
+ "is_decoder": false,
107
+ "is_encoder_decoder": false,
108
+ "label2id": {
109
+ "LABEL_0": 0,
110
+ "LABEL_1": 1
111
+ },
112
+ "length_penalty": 1.0,
113
+ "max_length": 20,
114
+ "max_position_embeddings": 32768,
115
+ "min_length": 0,
116
+ "model_type": "mistral",
117
+ "no_repeat_ngram_size": 0,
118
+ "num_attention_heads": 32,
119
+ "num_beam_groups": 1,
120
+ "num_beams": 1,
121
+ "num_hidden_layers": 32,
122
+ "num_key_value_heads": 8,
123
+ "num_return_sequences": 1,
124
+ "output_attentions": false,
125
+ "output_hidden_states": false,
126
+ "output_scores": false,
127
+ "pad_token_id": 0,
128
+ "prefix": null,
129
+ "problem_type": null,
130
+ "pruned_heads": {},
131
+ "remove_invalid_values": false,
132
+ "repetition_penalty": 1.0,
133
+ "return_dict": true,
134
+ "return_dict_in_generate": false,
135
+ "rms_norm_eps": 1e-05,
136
+ "rope_theta": 10000.0,
137
+ "sep_token_id": null,
138
+ "sliding_window": 4096,
139
+ "suppress_tokens": null,
140
+ "task_specific_params": null,
141
+ "temperature": 1.0,
142
+ "tf_legacy_loss": false,
143
+ "tie_encoder_decoder": false,
144
+ "tie_word_embeddings": false,
145
+ "tokenizer_class": null,
146
+ "top_k": 50,
147
+ "top_p": 1.0,
148
+ "torch_dtype": null,
149
+ "torchscript": false,
150
+ "typical_p": 1.0,
151
+ "use_bfloat16": false,
152
+ "use_cache": true,
153
+ "vocab_size": 33024
154
+ },
155
+ "tie_word_embeddings": false,
156
+ "torch_dtype": "bfloat16",
157
+ "transformers_version": "4.41.0.dev0",
158
+ "use_cache": true,
159
+ "vision_config": {
160
+ "_name_or_path": "",
161
+ "add_cross_attention": false,
162
+ "architectures": null,
163
+ "attention_dropout": 0.0,
164
+ "bad_words_ids": null,
165
+ "begin_suppress_tokens": null,
166
+ "bos_token_id": null,
167
+ "chunk_size_feed_forward": 0,
168
+ "cross_attention_hidden_size": null,
169
+ "decoder_start_token_id": null,
170
+ "diversity_penalty": 0.0,
171
+ "do_sample": false,
172
+ "early_stopping": false,
173
+ "encoder_no_repeat_ngram_size": 0,
174
+ "eos_token_id": null,
175
+ "exponential_decay_length_penalty": null,
176
+ "finetuning_task": null,
177
+ "forced_bos_token_id": null,
178
+ "forced_eos_token_id": null,
179
+ "hidden_act": "gelu_pytorch_tanh",
180
+ "hidden_size": 1152,
181
+ "id2label": {
182
+ "0": "LABEL_0",
183
+ "1": "LABEL_1"
184
+ },
185
+ "image_size": 980,
186
+ "initializer_range": 0.02,
187
+ "intermediate_size": 4304,
188
+ "is_decoder": false,
189
+ "is_encoder_decoder": false,
190
+ "label2id": {
191
+ "LABEL_0": 0,
192
+ "LABEL_1": 1
193
+ },
194
+ "layer_norm_eps": 1e-06,
195
+ "length_penalty": 1.0,
196
+ "max_length": 20,
197
+ "min_length": 0,
198
+ "model_type": "idefics2",
199
+ "no_repeat_ngram_size": 0,
200
+ "num_attention_heads": 16,
201
+ "num_beam_groups": 1,
202
+ "num_beams": 1,
203
+ "num_channels": 3,
204
+ "num_hidden_layers": 27,
205
+ "num_return_sequences": 1,
206
+ "output_attentions": false,
207
+ "output_hidden_states": false,
208
+ "output_scores": false,
209
+ "pad_token_id": null,
210
+ "patch_size": 14,
211
+ "prefix": null,
212
+ "problem_type": null,
213
+ "pruned_heads": {},
214
+ "remove_invalid_values": false,
215
+ "repetition_penalty": 1.0,
216
+ "return_dict": true,
217
+ "return_dict_in_generate": false,
218
+ "sep_token_id": null,
219
+ "suppress_tokens": null,
220
+ "task_specific_params": null,
221
+ "temperature": 1.0,
222
+ "tf_legacy_loss": false,
223
+ "tie_encoder_decoder": false,
224
+ "tie_word_embeddings": true,
225
+ "tokenizer_class": null,
226
+ "top_k": 50,
227
+ "top_p": 1.0,
228
+ "torch_dtype": null,
229
+ "torchscript": false,
230
+ "typical_p": 1.0,
231
+ "use_bfloat16": false
232
+ }
233
+ }
generation_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bad_words_ids": [
4
+ [
5
+ 32000
6
+ ],
7
+ [
8
+ 32001
9
+ ]
10
+ ],
11
+ "bos_token_id": 1,
12
+ "eos_token_id": [
13
+ 2,
14
+ 32002
15
+ ],
16
+ "pad_token_id": 0,
17
+ "transformers_version": "4.41.0.dev0"
18
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc3128dd765558908b7479b29ad3efd360076a2da2a804105d96b81905a4112a
3
+ size 4975070864
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4f8f5b3d16d945032d13f12797ac43ed3d3a7d3b9e950ca7d2a5dfa8232d354
3
+ size 4915917232
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:414d628ab0d1178eecf55a62e56c20af5f2ea7d07585312383ebd31d780337be
3
+ size 4999820504
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:034ed51f5559fc5febdda3efd2361d42bd733c077a118d0188bf0bc7533dec4b
3
+ size 1931555008
model.safetensors.index.json ADDED
@@ -0,0 +1,770 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 16822264288
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.connector.modality_projection.down_proj.weight": "model-00001-of-00004.safetensors",
8
+ "model.connector.modality_projection.gate_proj.weight": "model-00001-of-00004.safetensors",
9
+ "model.connector.modality_projection.up_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.connector.perceiver_resampler.latents": "model-00001-of-00004.safetensors",
11
+ "model.connector.perceiver_resampler.layers.0.input_context_norm.weight": "model-00001-of-00004.safetensors",
12
+ "model.connector.perceiver_resampler.layers.0.input_latents_norm.weight": "model-00001-of-00004.safetensors",
13
+ "model.connector.perceiver_resampler.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
14
+ "model.connector.perceiver_resampler.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.connector.perceiver_resampler.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.connector.perceiver_resampler.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
17
+ "model.connector.perceiver_resampler.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.connector.perceiver_resampler.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.connector.perceiver_resampler.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.connector.perceiver_resampler.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
21
+ "model.connector.perceiver_resampler.layers.1.input_context_norm.weight": "model-00001-of-00004.safetensors",
22
+ "model.connector.perceiver_resampler.layers.1.input_latents_norm.weight": "model-00001-of-00004.safetensors",
23
+ "model.connector.perceiver_resampler.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.connector.perceiver_resampler.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
25
+ "model.connector.perceiver_resampler.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
26
+ "model.connector.perceiver_resampler.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
27
+ "model.connector.perceiver_resampler.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.connector.perceiver_resampler.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
29
+ "model.connector.perceiver_resampler.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.connector.perceiver_resampler.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
31
+ "model.connector.perceiver_resampler.layers.2.input_context_norm.weight": "model-00001-of-00004.safetensors",
32
+ "model.connector.perceiver_resampler.layers.2.input_latents_norm.weight": "model-00001-of-00004.safetensors",
33
+ "model.connector.perceiver_resampler.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
34
+ "model.connector.perceiver_resampler.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
35
+ "model.connector.perceiver_resampler.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
36
+ "model.connector.perceiver_resampler.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
37
+ "model.connector.perceiver_resampler.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
38
+ "model.connector.perceiver_resampler.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
39
+ "model.connector.perceiver_resampler.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
40
+ "model.connector.perceiver_resampler.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
41
+ "model.connector.perceiver_resampler.norm.weight": "model-00001-of-00004.safetensors",
42
+ "model.text_model.embed_tokens.weight": "model-00001-of-00004.safetensors",
43
+ "model.text_model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
44
+ "model.text_model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
45
+ "model.text_model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
46
+ "model.text_model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
47
+ "model.text_model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
48
+ "model.text_model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
49
+ "model.text_model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
50
+ "model.text_model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
51
+ "model.text_model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
52
+ "model.text_model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
53
+ "model.text_model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
54
+ "model.text_model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
55
+ "model.text_model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
56
+ "model.text_model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
57
+ "model.text_model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
58
+ "model.text_model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
59
+ "model.text_model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
60
+ "model.text_model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
61
+ "model.text_model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
62
+ "model.text_model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.text_model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.text_model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.text_model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
66
+ "model.text_model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.text_model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.text_model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
69
+ "model.text_model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.text_model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
71
+ "model.text_model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.text_model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.text_model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.text_model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
75
+ "model.text_model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.text_model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.text_model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.text_model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.text_model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
80
+ "model.text_model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
81
+ "model.text_model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.text_model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.text_model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
84
+ "model.text_model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.text_model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
86
+ "model.text_model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.text_model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.text_model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
89
+ "model.text_model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.text_model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
91
+ "model.text_model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.text_model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.text_model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.text_model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.text_model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.text_model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.text_model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "model.text_model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.text_model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.text_model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.text_model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
102
+ "model.text_model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
103
+ "model.text_model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.text_model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.text_model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.text_model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
107
+ "model.text_model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.text_model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
109
+ "model.text_model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.text_model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
111
+ "model.text_model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.text_model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
113
+ "model.text_model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.text_model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.text_model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
116
+ "model.text_model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
117
+ "model.text_model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.text_model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.text_model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
120
+ "model.text_model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
121
+ "model.text_model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
122
+ "model.text_model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.text_model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.text_model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
125
+ "model.text_model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
126
+ "model.text_model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
127
+ "model.text_model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
128
+ "model.text_model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.text_model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.text_model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
131
+ "model.text_model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
132
+ "model.text_model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
133
+ "model.text_model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
134
+ "model.text_model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
135
+ "model.text_model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
136
+ "model.text_model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
137
+ "model.text_model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
138
+ "model.text_model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
139
+ "model.text_model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
140
+ "model.text_model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
141
+ "model.text_model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.text_model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
143
+ "model.text_model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.text_model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
145
+ "model.text_model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
146
+ "model.text_model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
147
+ "model.text_model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.text_model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
149
+ "model.text_model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.text_model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
151
+ "model.text_model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
152
+ "model.text_model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
153
+ "model.text_model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.text_model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.text_model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
156
+ "model.text_model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
157
+ "model.text_model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
158
+ "model.text_model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.text_model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.text_model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
161
+ "model.text_model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
162
+ "model.text_model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
163
+ "model.text_model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
164
+ "model.text_model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.text_model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.text_model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.text_model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.text_model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
169
+ "model.text_model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
170
+ "model.text_model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.text_model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.text_model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
173
+ "model.text_model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
174
+ "model.text_model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
175
+ "model.text_model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.text_model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.text_model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.text_model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
179
+ "model.text_model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.text_model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
181
+ "model.text_model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
182
+ "model.text_model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
183
+ "model.text_model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.text_model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
185
+ "model.text_model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.text_model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.text_model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
188
+ "model.text_model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.text_model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.text_model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.text_model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
192
+ "model.text_model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
193
+ "model.text_model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
194
+ "model.text_model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.text_model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.text_model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
197
+ "model.text_model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.text_model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.text_model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.text_model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.text_model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.text_model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.text_model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.text_model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.text_model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
206
+ "model.text_model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.text_model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.text_model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.text_model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
210
+ "model.text_model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
211
+ "model.text_model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.text_model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
213
+ "model.text_model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.text_model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
215
+ "model.text_model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.text_model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.text_model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
218
+ "model.text_model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
219
+ "model.text_model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.text_model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.text_model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.text_model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
223
+ "model.text_model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
224
+ "model.text_model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.text_model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.text_model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.text_model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
228
+ "model.text_model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
229
+ "model.text_model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
230
+ "model.text_model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.text_model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.text_model.layers.28.input_layernorm.weight": "model-00004-of-00004.safetensors",
233
+ "model.text_model.layers.28.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
234
+ "model.text_model.layers.28.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
235
+ "model.text_model.layers.28.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
236
+ "model.text_model.layers.28.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
237
+ "model.text_model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.text_model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.text_model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.text_model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.text_model.layers.29.input_layernorm.weight": "model-00004-of-00004.safetensors",
242
+ "model.text_model.layers.29.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
243
+ "model.text_model.layers.29.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
244
+ "model.text_model.layers.29.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
245
+ "model.text_model.layers.29.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
246
+ "model.text_model.layers.29.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
247
+ "model.text_model.layers.29.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
248
+ "model.text_model.layers.29.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
249
+ "model.text_model.layers.29.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
250
+ "model.text_model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
251
+ "model.text_model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
252
+ "model.text_model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
253
+ "model.text_model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
254
+ "model.text_model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
255
+ "model.text_model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
256
+ "model.text_model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
257
+ "model.text_model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
258
+ "model.text_model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
259
+ "model.text_model.layers.30.input_layernorm.weight": "model-00004-of-00004.safetensors",
260
+ "model.text_model.layers.30.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
261
+ "model.text_model.layers.30.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
262
+ "model.text_model.layers.30.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
263
+ "model.text_model.layers.30.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
264
+ "model.text_model.layers.30.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
265
+ "model.text_model.layers.30.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
266
+ "model.text_model.layers.30.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
267
+ "model.text_model.layers.30.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
268
+ "model.text_model.layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors",
269
+ "model.text_model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
270
+ "model.text_model.layers.31.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
271
+ "model.text_model.layers.31.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
272
+ "model.text_model.layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
273
+ "model.text_model.layers.31.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
274
+ "model.text_model.layers.31.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
275
+ "model.text_model.layers.31.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
276
+ "model.text_model.layers.31.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
277
+ "model.text_model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
278
+ "model.text_model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.text_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.text_model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
281
+ "model.text_model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
282
+ "model.text_model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
283
+ "model.text_model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.text_model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
285
+ "model.text_model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.text_model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
287
+ "model.text_model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
288
+ "model.text_model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
289
+ "model.text_model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
290
+ "model.text_model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
291
+ "model.text_model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.text_model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
293
+ "model.text_model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.text_model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
295
+ "model.text_model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
296
+ "model.text_model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
297
+ "model.text_model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
298
+ "model.text_model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
299
+ "model.text_model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
300
+ "model.text_model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
301
+ "model.text_model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
302
+ "model.text_model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
303
+ "model.text_model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
304
+ "model.text_model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
305
+ "model.text_model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
306
+ "model.text_model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
307
+ "model.text_model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
308
+ "model.text_model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
309
+ "model.text_model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
310
+ "model.text_model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
311
+ "model.text_model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
312
+ "model.text_model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
313
+ "model.text_model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
314
+ "model.text_model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
315
+ "model.text_model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
316
+ "model.text_model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
317
+ "model.text_model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
318
+ "model.text_model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
319
+ "model.text_model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
320
+ "model.text_model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
321
+ "model.text_model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.text_model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
323
+ "model.text_model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.text_model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
325
+ "model.text_model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
326
+ "model.text_model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
327
+ "model.text_model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
328
+ "model.text_model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
329
+ "model.text_model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
330
+ "model.text_model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
331
+ "model.text_model.norm.weight": "model-00004-of-00004.safetensors",
332
+ "model.vision_model.embeddings.patch_embedding.bias": "model-00001-of-00004.safetensors",
333
+ "model.vision_model.embeddings.patch_embedding.weight": "model-00001-of-00004.safetensors",
334
+ "model.vision_model.embeddings.position_embedding.weight": "model-00001-of-00004.safetensors",
335
+ "model.vision_model.encoder.layers.0.layer_norm1.bias": "model-00001-of-00004.safetensors",
336
+ "model.vision_model.encoder.layers.0.layer_norm1.weight": "model-00001-of-00004.safetensors",
337
+ "model.vision_model.encoder.layers.0.layer_norm2.bias": "model-00001-of-00004.safetensors",
338
+ "model.vision_model.encoder.layers.0.layer_norm2.weight": "model-00001-of-00004.safetensors",
339
+ "model.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00001-of-00004.safetensors",
340
+ "model.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00001-of-00004.safetensors",
341
+ "model.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00001-of-00004.safetensors",
342
+ "model.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00001-of-00004.safetensors",
343
+ "model.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
344
+ "model.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
345
+ "model.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
346
+ "model.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
347
+ "model.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
348
+ "model.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
349
+ "model.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
350
+ "model.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
351
+ "model.vision_model.encoder.layers.1.layer_norm1.bias": "model-00001-of-00004.safetensors",
352
+ "model.vision_model.encoder.layers.1.layer_norm1.weight": "model-00001-of-00004.safetensors",
353
+ "model.vision_model.encoder.layers.1.layer_norm2.bias": "model-00001-of-00004.safetensors",
354
+ "model.vision_model.encoder.layers.1.layer_norm2.weight": "model-00001-of-00004.safetensors",
355
+ "model.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00001-of-00004.safetensors",
356
+ "model.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00001-of-00004.safetensors",
357
+ "model.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00001-of-00004.safetensors",
358
+ "model.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00001-of-00004.safetensors",
359
+ "model.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
360
+ "model.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
361
+ "model.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
362
+ "model.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
363
+ "model.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
364
+ "model.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
365
+ "model.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
366
+ "model.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
367
+ "model.vision_model.encoder.layers.10.layer_norm1.bias": "model-00001-of-00004.safetensors",
368
+ "model.vision_model.encoder.layers.10.layer_norm1.weight": "model-00001-of-00004.safetensors",
369
+ "model.vision_model.encoder.layers.10.layer_norm2.bias": "model-00001-of-00004.safetensors",
370
+ "model.vision_model.encoder.layers.10.layer_norm2.weight": "model-00001-of-00004.safetensors",
371
+ "model.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00001-of-00004.safetensors",
372
+ "model.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00001-of-00004.safetensors",
373
+ "model.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00001-of-00004.safetensors",
374
+ "model.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00001-of-00004.safetensors",
375
+ "model.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
376
+ "model.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
377
+ "model.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
378
+ "model.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
379
+ "model.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
380
+ "model.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
381
+ "model.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
382
+ "model.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
383
+ "model.vision_model.encoder.layers.11.layer_norm1.bias": "model-00001-of-00004.safetensors",
384
+ "model.vision_model.encoder.layers.11.layer_norm1.weight": "model-00001-of-00004.safetensors",
385
+ "model.vision_model.encoder.layers.11.layer_norm2.bias": "model-00001-of-00004.safetensors",
386
+ "model.vision_model.encoder.layers.11.layer_norm2.weight": "model-00001-of-00004.safetensors",
387
+ "model.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00001-of-00004.safetensors",
388
+ "model.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00001-of-00004.safetensors",
389
+ "model.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00001-of-00004.safetensors",
390
+ "model.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00001-of-00004.safetensors",
391
+ "model.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
392
+ "model.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
393
+ "model.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
394
+ "model.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
395
+ "model.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
396
+ "model.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
397
+ "model.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
398
+ "model.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
399
+ "model.vision_model.encoder.layers.12.layer_norm1.bias": "model-00001-of-00004.safetensors",
400
+ "model.vision_model.encoder.layers.12.layer_norm1.weight": "model-00001-of-00004.safetensors",
401
+ "model.vision_model.encoder.layers.12.layer_norm2.bias": "model-00001-of-00004.safetensors",
402
+ "model.vision_model.encoder.layers.12.layer_norm2.weight": "model-00001-of-00004.safetensors",
403
+ "model.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00001-of-00004.safetensors",
404
+ "model.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00001-of-00004.safetensors",
405
+ "model.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00001-of-00004.safetensors",
406
+ "model.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00001-of-00004.safetensors",
407
+ "model.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
408
+ "model.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
409
+ "model.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
410
+ "model.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
411
+ "model.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
412
+ "model.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
413
+ "model.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
414
+ "model.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
415
+ "model.vision_model.encoder.layers.13.layer_norm1.bias": "model-00001-of-00004.safetensors",
416
+ "model.vision_model.encoder.layers.13.layer_norm1.weight": "model-00001-of-00004.safetensors",
417
+ "model.vision_model.encoder.layers.13.layer_norm2.bias": "model-00001-of-00004.safetensors",
418
+ "model.vision_model.encoder.layers.13.layer_norm2.weight": "model-00001-of-00004.safetensors",
419
+ "model.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00001-of-00004.safetensors",
420
+ "model.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00001-of-00004.safetensors",
421
+ "model.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00001-of-00004.safetensors",
422
+ "model.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00001-of-00004.safetensors",
423
+ "model.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
424
+ "model.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
425
+ "model.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
426
+ "model.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
427
+ "model.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
428
+ "model.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
429
+ "model.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
430
+ "model.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
431
+ "model.vision_model.encoder.layers.14.layer_norm1.bias": "model-00001-of-00004.safetensors",
432
+ "model.vision_model.encoder.layers.14.layer_norm1.weight": "model-00001-of-00004.safetensors",
433
+ "model.vision_model.encoder.layers.14.layer_norm2.bias": "model-00001-of-00004.safetensors",
434
+ "model.vision_model.encoder.layers.14.layer_norm2.weight": "model-00001-of-00004.safetensors",
435
+ "model.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00001-of-00004.safetensors",
436
+ "model.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00001-of-00004.safetensors",
437
+ "model.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00001-of-00004.safetensors",
438
+ "model.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00001-of-00004.safetensors",
439
+ "model.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
440
+ "model.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
441
+ "model.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
442
+ "model.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
443
+ "model.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
444
+ "model.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
445
+ "model.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
446
+ "model.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
447
+ "model.vision_model.encoder.layers.15.layer_norm1.bias": "model-00001-of-00004.safetensors",
448
+ "model.vision_model.encoder.layers.15.layer_norm1.weight": "model-00001-of-00004.safetensors",
449
+ "model.vision_model.encoder.layers.15.layer_norm2.bias": "model-00001-of-00004.safetensors",
450
+ "model.vision_model.encoder.layers.15.layer_norm2.weight": "model-00001-of-00004.safetensors",
451
+ "model.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00001-of-00004.safetensors",
452
+ "model.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00001-of-00004.safetensors",
453
+ "model.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00001-of-00004.safetensors",
454
+ "model.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00001-of-00004.safetensors",
455
+ "model.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
456
+ "model.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
457
+ "model.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
458
+ "model.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
459
+ "model.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
460
+ "model.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
461
+ "model.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
462
+ "model.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
463
+ "model.vision_model.encoder.layers.16.layer_norm1.bias": "model-00001-of-00004.safetensors",
464
+ "model.vision_model.encoder.layers.16.layer_norm1.weight": "model-00001-of-00004.safetensors",
465
+ "model.vision_model.encoder.layers.16.layer_norm2.bias": "model-00001-of-00004.safetensors",
466
+ "model.vision_model.encoder.layers.16.layer_norm2.weight": "model-00001-of-00004.safetensors",
467
+ "model.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00001-of-00004.safetensors",
468
+ "model.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00001-of-00004.safetensors",
469
+ "model.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00001-of-00004.safetensors",
470
+ "model.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00001-of-00004.safetensors",
471
+ "model.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
472
+ "model.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
473
+ "model.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
474
+ "model.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
475
+ "model.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
476
+ "model.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
477
+ "model.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
478
+ "model.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
479
+ "model.vision_model.encoder.layers.17.layer_norm1.bias": "model-00001-of-00004.safetensors",
480
+ "model.vision_model.encoder.layers.17.layer_norm1.weight": "model-00001-of-00004.safetensors",
481
+ "model.vision_model.encoder.layers.17.layer_norm2.bias": "model-00001-of-00004.safetensors",
482
+ "model.vision_model.encoder.layers.17.layer_norm2.weight": "model-00001-of-00004.safetensors",
483
+ "model.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00001-of-00004.safetensors",
484
+ "model.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00001-of-00004.safetensors",
485
+ "model.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00001-of-00004.safetensors",
486
+ "model.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00001-of-00004.safetensors",
487
+ "model.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
488
+ "model.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
489
+ "model.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
490
+ "model.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
491
+ "model.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
492
+ "model.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
493
+ "model.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
494
+ "model.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
495
+ "model.vision_model.encoder.layers.18.layer_norm1.bias": "model-00001-of-00004.safetensors",
496
+ "model.vision_model.encoder.layers.18.layer_norm1.weight": "model-00001-of-00004.safetensors",
497
+ "model.vision_model.encoder.layers.18.layer_norm2.bias": "model-00001-of-00004.safetensors",
498
+ "model.vision_model.encoder.layers.18.layer_norm2.weight": "model-00001-of-00004.safetensors",
499
+ "model.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00001-of-00004.safetensors",
500
+ "model.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00001-of-00004.safetensors",
501
+ "model.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00001-of-00004.safetensors",
502
+ "model.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00001-of-00004.safetensors",
503
+ "model.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
504
+ "model.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
505
+ "model.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
506
+ "model.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
507
+ "model.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
508
+ "model.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
509
+ "model.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
510
+ "model.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
511
+ "model.vision_model.encoder.layers.19.layer_norm1.bias": "model-00001-of-00004.safetensors",
512
+ "model.vision_model.encoder.layers.19.layer_norm1.weight": "model-00001-of-00004.safetensors",
513
+ "model.vision_model.encoder.layers.19.layer_norm2.bias": "model-00001-of-00004.safetensors",
514
+ "model.vision_model.encoder.layers.19.layer_norm2.weight": "model-00001-of-00004.safetensors",
515
+ "model.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00001-of-00004.safetensors",
516
+ "model.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00001-of-00004.safetensors",
517
+ "model.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00001-of-00004.safetensors",
518
+ "model.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00001-of-00004.safetensors",
519
+ "model.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
520
+ "model.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
521
+ "model.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
522
+ "model.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
523
+ "model.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
524
+ "model.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
525
+ "model.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
526
+ "model.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
527
+ "model.vision_model.encoder.layers.2.layer_norm1.bias": "model-00001-of-00004.safetensors",
528
+ "model.vision_model.encoder.layers.2.layer_norm1.weight": "model-00001-of-00004.safetensors",
529
+ "model.vision_model.encoder.layers.2.layer_norm2.bias": "model-00001-of-00004.safetensors",
530
+ "model.vision_model.encoder.layers.2.layer_norm2.weight": "model-00001-of-00004.safetensors",
531
+ "model.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00001-of-00004.safetensors",
532
+ "model.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00001-of-00004.safetensors",
533
+ "model.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00001-of-00004.safetensors",
534
+ "model.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00001-of-00004.safetensors",
535
+ "model.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
536
+ "model.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
537
+ "model.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
538
+ "model.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
539
+ "model.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
540
+ "model.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
541
+ "model.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
542
+ "model.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
543
+ "model.vision_model.encoder.layers.20.layer_norm1.bias": "model-00001-of-00004.safetensors",
544
+ "model.vision_model.encoder.layers.20.layer_norm1.weight": "model-00001-of-00004.safetensors",
545
+ "model.vision_model.encoder.layers.20.layer_norm2.bias": "model-00001-of-00004.safetensors",
546
+ "model.vision_model.encoder.layers.20.layer_norm2.weight": "model-00001-of-00004.safetensors",
547
+ "model.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00001-of-00004.safetensors",
548
+ "model.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00001-of-00004.safetensors",
549
+ "model.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00001-of-00004.safetensors",
550
+ "model.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00001-of-00004.safetensors",
551
+ "model.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
552
+ "model.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
553
+ "model.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
554
+ "model.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
555
+ "model.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
556
+ "model.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
557
+ "model.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
558
+ "model.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
559
+ "model.vision_model.encoder.layers.21.layer_norm1.bias": "model-00001-of-00004.safetensors",
560
+ "model.vision_model.encoder.layers.21.layer_norm1.weight": "model-00001-of-00004.safetensors",
561
+ "model.vision_model.encoder.layers.21.layer_norm2.bias": "model-00001-of-00004.safetensors",
562
+ "model.vision_model.encoder.layers.21.layer_norm2.weight": "model-00001-of-00004.safetensors",
563
+ "model.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00001-of-00004.safetensors",
564
+ "model.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00001-of-00004.safetensors",
565
+ "model.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00001-of-00004.safetensors",
566
+ "model.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00001-of-00004.safetensors",
567
+ "model.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
568
+ "model.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
569
+ "model.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
570
+ "model.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
571
+ "model.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
572
+ "model.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
573
+ "model.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
574
+ "model.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
575
+ "model.vision_model.encoder.layers.22.layer_norm1.bias": "model-00001-of-00004.safetensors",
576
+ "model.vision_model.encoder.layers.22.layer_norm1.weight": "model-00001-of-00004.safetensors",
577
+ "model.vision_model.encoder.layers.22.layer_norm2.bias": "model-00001-of-00004.safetensors",
578
+ "model.vision_model.encoder.layers.22.layer_norm2.weight": "model-00001-of-00004.safetensors",
579
+ "model.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00001-of-00004.safetensors",
580
+ "model.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00001-of-00004.safetensors",
581
+ "model.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00001-of-00004.safetensors",
582
+ "model.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00001-of-00004.safetensors",
583
+ "model.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
584
+ "model.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
585
+ "model.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
586
+ "model.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
587
+ "model.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
588
+ "model.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
589
+ "model.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
590
+ "model.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
591
+ "model.vision_model.encoder.layers.23.layer_norm1.bias": "model-00001-of-00004.safetensors",
592
+ "model.vision_model.encoder.layers.23.layer_norm1.weight": "model-00001-of-00004.safetensors",
593
+ "model.vision_model.encoder.layers.23.layer_norm2.bias": "model-00001-of-00004.safetensors",
594
+ "model.vision_model.encoder.layers.23.layer_norm2.weight": "model-00001-of-00004.safetensors",
595
+ "model.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00001-of-00004.safetensors",
596
+ "model.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00001-of-00004.safetensors",
597
+ "model.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00001-of-00004.safetensors",
598
+ "model.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00001-of-00004.safetensors",
599
+ "model.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
600
+ "model.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
601
+ "model.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
602
+ "model.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
603
+ "model.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
604
+ "model.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
605
+ "model.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
606
+ "model.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
607
+ "model.vision_model.encoder.layers.24.layer_norm1.bias": "model-00001-of-00004.safetensors",
608
+ "model.vision_model.encoder.layers.24.layer_norm1.weight": "model-00001-of-00004.safetensors",
609
+ "model.vision_model.encoder.layers.24.layer_norm2.bias": "model-00001-of-00004.safetensors",
610
+ "model.vision_model.encoder.layers.24.layer_norm2.weight": "model-00001-of-00004.safetensors",
611
+ "model.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00001-of-00004.safetensors",
612
+ "model.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00001-of-00004.safetensors",
613
+ "model.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00001-of-00004.safetensors",
614
+ "model.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00001-of-00004.safetensors",
615
+ "model.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
616
+ "model.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
617
+ "model.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
618
+ "model.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
619
+ "model.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
620
+ "model.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
621
+ "model.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
622
+ "model.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
623
+ "model.vision_model.encoder.layers.25.layer_norm1.bias": "model-00001-of-00004.safetensors",
624
+ "model.vision_model.encoder.layers.25.layer_norm1.weight": "model-00001-of-00004.safetensors",
625
+ "model.vision_model.encoder.layers.25.layer_norm2.bias": "model-00001-of-00004.safetensors",
626
+ "model.vision_model.encoder.layers.25.layer_norm2.weight": "model-00001-of-00004.safetensors",
627
+ "model.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00001-of-00004.safetensors",
628
+ "model.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00001-of-00004.safetensors",
629
+ "model.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00001-of-00004.safetensors",
630
+ "model.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00001-of-00004.safetensors",
631
+ "model.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
632
+ "model.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
633
+ "model.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
634
+ "model.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
635
+ "model.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
636
+ "model.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
637
+ "model.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
638
+ "model.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
639
+ "model.vision_model.encoder.layers.26.layer_norm1.bias": "model-00001-of-00004.safetensors",
640
+ "model.vision_model.encoder.layers.26.layer_norm1.weight": "model-00001-of-00004.safetensors",
641
+ "model.vision_model.encoder.layers.26.layer_norm2.bias": "model-00001-of-00004.safetensors",
642
+ "model.vision_model.encoder.layers.26.layer_norm2.weight": "model-00001-of-00004.safetensors",
643
+ "model.vision_model.encoder.layers.26.mlp.fc1.bias": "model-00001-of-00004.safetensors",
644
+ "model.vision_model.encoder.layers.26.mlp.fc1.weight": "model-00001-of-00004.safetensors",
645
+ "model.vision_model.encoder.layers.26.mlp.fc2.bias": "model-00001-of-00004.safetensors",
646
+ "model.vision_model.encoder.layers.26.mlp.fc2.weight": "model-00001-of-00004.safetensors",
647
+ "model.vision_model.encoder.layers.26.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
648
+ "model.vision_model.encoder.layers.26.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
649
+ "model.vision_model.encoder.layers.26.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
650
+ "model.vision_model.encoder.layers.26.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
651
+ "model.vision_model.encoder.layers.26.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
652
+ "model.vision_model.encoder.layers.26.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
653
+ "model.vision_model.encoder.layers.26.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
654
+ "model.vision_model.encoder.layers.26.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
655
+ "model.vision_model.encoder.layers.3.layer_norm1.bias": "model-00001-of-00004.safetensors",
656
+ "model.vision_model.encoder.layers.3.layer_norm1.weight": "model-00001-of-00004.safetensors",
657
+ "model.vision_model.encoder.layers.3.layer_norm2.bias": "model-00001-of-00004.safetensors",
658
+ "model.vision_model.encoder.layers.3.layer_norm2.weight": "model-00001-of-00004.safetensors",
659
+ "model.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00001-of-00004.safetensors",
660
+ "model.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00001-of-00004.safetensors",
661
+ "model.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00001-of-00004.safetensors",
662
+ "model.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00001-of-00004.safetensors",
663
+ "model.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
664
+ "model.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
665
+ "model.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
666
+ "model.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
667
+ "model.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
668
+ "model.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
669
+ "model.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
670
+ "model.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
671
+ "model.vision_model.encoder.layers.4.layer_norm1.bias": "model-00001-of-00004.safetensors",
672
+ "model.vision_model.encoder.layers.4.layer_norm1.weight": "model-00001-of-00004.safetensors",
673
+ "model.vision_model.encoder.layers.4.layer_norm2.bias": "model-00001-of-00004.safetensors",
674
+ "model.vision_model.encoder.layers.4.layer_norm2.weight": "model-00001-of-00004.safetensors",
675
+ "model.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00001-of-00004.safetensors",
676
+ "model.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00001-of-00004.safetensors",
677
+ "model.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00001-of-00004.safetensors",
678
+ "model.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00001-of-00004.safetensors",
679
+ "model.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
680
+ "model.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
681
+ "model.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
682
+ "model.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
683
+ "model.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
684
+ "model.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
685
+ "model.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
686
+ "model.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
687
+ "model.vision_model.encoder.layers.5.layer_norm1.bias": "model-00001-of-00004.safetensors",
688
+ "model.vision_model.encoder.layers.5.layer_norm1.weight": "model-00001-of-00004.safetensors",
689
+ "model.vision_model.encoder.layers.5.layer_norm2.bias": "model-00001-of-00004.safetensors",
690
+ "model.vision_model.encoder.layers.5.layer_norm2.weight": "model-00001-of-00004.safetensors",
691
+ "model.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00001-of-00004.safetensors",
692
+ "model.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00001-of-00004.safetensors",
693
+ "model.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00001-of-00004.safetensors",
694
+ "model.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00001-of-00004.safetensors",
695
+ "model.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
696
+ "model.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
697
+ "model.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
698
+ "model.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
699
+ "model.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
700
+ "model.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
701
+ "model.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
702
+ "model.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
703
+ "model.vision_model.encoder.layers.6.layer_norm1.bias": "model-00001-of-00004.safetensors",
704
+ "model.vision_model.encoder.layers.6.layer_norm1.weight": "model-00001-of-00004.safetensors",
705
+ "model.vision_model.encoder.layers.6.layer_norm2.bias": "model-00001-of-00004.safetensors",
706
+ "model.vision_model.encoder.layers.6.layer_norm2.weight": "model-00001-of-00004.safetensors",
707
+ "model.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00001-of-00004.safetensors",
708
+ "model.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00001-of-00004.safetensors",
709
+ "model.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00001-of-00004.safetensors",
710
+ "model.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00001-of-00004.safetensors",
711
+ "model.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
712
+ "model.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
713
+ "model.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
714
+ "model.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
715
+ "model.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
716
+ "model.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
717
+ "model.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
718
+ "model.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
719
+ "model.vision_model.encoder.layers.7.layer_norm1.bias": "model-00001-of-00004.safetensors",
720
+ "model.vision_model.encoder.layers.7.layer_norm1.weight": "model-00001-of-00004.safetensors",
721
+ "model.vision_model.encoder.layers.7.layer_norm2.bias": "model-00001-of-00004.safetensors",
722
+ "model.vision_model.encoder.layers.7.layer_norm2.weight": "model-00001-of-00004.safetensors",
723
+ "model.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00001-of-00004.safetensors",
724
+ "model.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00001-of-00004.safetensors",
725
+ "model.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00001-of-00004.safetensors",
726
+ "model.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00001-of-00004.safetensors",
727
+ "model.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
728
+ "model.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
729
+ "model.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
730
+ "model.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
731
+ "model.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
732
+ "model.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
733
+ "model.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
734
+ "model.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
735
+ "model.vision_model.encoder.layers.8.layer_norm1.bias": "model-00001-of-00004.safetensors",
736
+ "model.vision_model.encoder.layers.8.layer_norm1.weight": "model-00001-of-00004.safetensors",
737
+ "model.vision_model.encoder.layers.8.layer_norm2.bias": "model-00001-of-00004.safetensors",
738
+ "model.vision_model.encoder.layers.8.layer_norm2.weight": "model-00001-of-00004.safetensors",
739
+ "model.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00001-of-00004.safetensors",
740
+ "model.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00001-of-00004.safetensors",
741
+ "model.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00001-of-00004.safetensors",
742
+ "model.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00001-of-00004.safetensors",
743
+ "model.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
744
+ "model.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
745
+ "model.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
746
+ "model.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
747
+ "model.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
748
+ "model.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
749
+ "model.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
750
+ "model.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
751
+ "model.vision_model.encoder.layers.9.layer_norm1.bias": "model-00001-of-00004.safetensors",
752
+ "model.vision_model.encoder.layers.9.layer_norm1.weight": "model-00001-of-00004.safetensors",
753
+ "model.vision_model.encoder.layers.9.layer_norm2.bias": "model-00001-of-00004.safetensors",
754
+ "model.vision_model.encoder.layers.9.layer_norm2.weight": "model-00001-of-00004.safetensors",
755
+ "model.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00001-of-00004.safetensors",
756
+ "model.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00001-of-00004.safetensors",
757
+ "model.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00001-of-00004.safetensors",
758
+ "model.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00001-of-00004.safetensors",
759
+ "model.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
760
+ "model.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
761
+ "model.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00001-of-00004.safetensors",
762
+ "model.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00001-of-00004.safetensors",
763
+ "model.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
764
+ "model.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
765
+ "model.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
766
+ "model.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
767
+ "model.vision_model.post_layernorm.bias": "model-00001-of-00004.safetensors",
768
+ "model.vision_model.post_layernorm.weight": "model-00001-of-00004.safetensors"
769
+ }
770
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<fake_token_around_image>",
4
+ "<image>",
5
+ "<end_of_utterance>"
6
+ ],
7
+ "bos_token": {
8
+ "content": "<s>",
9
+ "lstrip": false,
10
+ "normalized": false,
11
+ "rstrip": false,
12
+ "single_word": false
13
+ },
14
+ "eos_token": {
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "pad_token": {
22
+ "content": "<unk>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false
27
+ },
28
+ "unk_token": {
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff