dima806 commited on
Commit
f0cbdaf
1 Parent(s): 449646b

Upload folder using huggingface_hub

Browse files
checkpoint-2610/config.json ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "lemon",
13
+ "1": "orange",
14
+ "2": "beetroot",
15
+ "3": "mango",
16
+ "4": "chilli pepper",
17
+ "5": "banana",
18
+ "6": "cauliflower",
19
+ "7": "cucumber",
20
+ "8": "raddish",
21
+ "9": "grapes",
22
+ "10": "corn",
23
+ "11": "pomegranate",
24
+ "12": "bell pepper",
25
+ "13": "peas",
26
+ "14": "pear",
27
+ "15": "sweetpotato",
28
+ "16": "carrot",
29
+ "17": "capsicum",
30
+ "18": "spinach",
31
+ "19": "apple",
32
+ "20": "eggplant",
33
+ "21": "tomato",
34
+ "22": "paprika",
35
+ "23": "ginger",
36
+ "24": "pineapple",
37
+ "25": "garlic",
38
+ "26": "soy beans",
39
+ "27": "watermelon",
40
+ "28": "cabbage",
41
+ "29": "potato",
42
+ "30": "lettuce",
43
+ "31": "sweetcorn",
44
+ "32": "onion",
45
+ "33": "turnip",
46
+ "34": "jalepeno",
47
+ "35": "kiwi"
48
+ },
49
+ "image_size": 224,
50
+ "initializer_range": 0.02,
51
+ "intermediate_size": 3072,
52
+ "label2id": {
53
+ "apple": 19,
54
+ "banana": 5,
55
+ "beetroot": 2,
56
+ "bell pepper": 12,
57
+ "cabbage": 28,
58
+ "capsicum": 17,
59
+ "carrot": 16,
60
+ "cauliflower": 6,
61
+ "chilli pepper": 4,
62
+ "corn": 10,
63
+ "cucumber": 7,
64
+ "eggplant": 20,
65
+ "garlic": 25,
66
+ "ginger": 23,
67
+ "grapes": 9,
68
+ "jalepeno": 34,
69
+ "kiwi": 35,
70
+ "lemon": 0,
71
+ "lettuce": 30,
72
+ "mango": 3,
73
+ "onion": 32,
74
+ "orange": 1,
75
+ "paprika": 22,
76
+ "pear": 14,
77
+ "peas": 13,
78
+ "pineapple": 24,
79
+ "pomegranate": 11,
80
+ "potato": 29,
81
+ "raddish": 8,
82
+ "soy beans": 26,
83
+ "spinach": 18,
84
+ "sweetcorn": 31,
85
+ "sweetpotato": 15,
86
+ "tomato": 21,
87
+ "turnip": 33,
88
+ "watermelon": 27
89
+ },
90
+ "layer_norm_eps": 1e-12,
91
+ "model_type": "vit",
92
+ "num_attention_heads": 12,
93
+ "num_channels": 3,
94
+ "num_hidden_layers": 12,
95
+ "patch_size": 16,
96
+ "problem_type": "single_label_classification",
97
+ "qkv_bias": true,
98
+ "torch_dtype": "float32",
99
+ "transformers_version": "4.32.1"
100
+ }
checkpoint-2610/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4fd05ba76b86ed48f3319c6879561ce216b92221b746979f669375b01568402
3
+ size 686777605
checkpoint-2610/preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
checkpoint-2610/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c03c6108c541597eb29ce2595c0860d4c96ad57cdae8d295ff5c99053174269a
3
+ size 343373293
checkpoint-2610/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec1fac373a7eb454a0522046dae2dcb9b3e396e4c7b06b3df7aaea2929f7ef86
3
+ size 14575
checkpoint-2610/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53bd11292f7d02123a8647c42c345eabb4bf050dd7915ae9f7f97a9300a34e87
3
+ size 627
checkpoint-2610/trainer_state.json ADDED
@@ -0,0 +1,319 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.9747084379196167,
3
+ "best_model_checkpoint": "fruit_vegetable_image_detection/checkpoint-2610",
4
+ "epoch": 30.0,
5
+ "eval_steps": 500,
6
+ "global_step": 2610,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "eval_accuracy": 0.45217391304347826,
14
+ "eval_loss": 3.413668394088745,
15
+ "eval_runtime": 31.8642,
16
+ "eval_samples_per_second": 21.654,
17
+ "eval_steps_per_second": 2.73,
18
+ "step": 87
19
+ },
20
+ {
21
+ "epoch": 2.0,
22
+ "eval_accuracy": 0.7623188405797101,
23
+ "eval_loss": 3.100569725036621,
24
+ "eval_runtime": 31.6151,
25
+ "eval_samples_per_second": 21.825,
26
+ "eval_steps_per_second": 2.752,
27
+ "step": 174
28
+ },
29
+ {
30
+ "epoch": 3.0,
31
+ "eval_accuracy": 0.8420289855072464,
32
+ "eval_loss": 2.817446231842041,
33
+ "eval_runtime": 31.4959,
34
+ "eval_samples_per_second": 21.908,
35
+ "eval_steps_per_second": 2.762,
36
+ "step": 261
37
+ },
38
+ {
39
+ "epoch": 4.0,
40
+ "eval_accuracy": 0.8579710144927536,
41
+ "eval_loss": 2.5662131309509277,
42
+ "eval_runtime": 31.3214,
43
+ "eval_samples_per_second": 22.03,
44
+ "eval_steps_per_second": 2.778,
45
+ "step": 348
46
+ },
47
+ {
48
+ "epoch": 5.0,
49
+ "eval_accuracy": 0.8753623188405797,
50
+ "eval_loss": 2.3473355770111084,
51
+ "eval_runtime": 31.4215,
52
+ "eval_samples_per_second": 21.96,
53
+ "eval_steps_per_second": 2.769,
54
+ "step": 435
55
+ },
56
+ {
57
+ "epoch": 5.75,
58
+ "learning_rate": 8.242187500000001e-06,
59
+ "loss": 2.8278,
60
+ "step": 500
61
+ },
62
+ {
63
+ "epoch": 6.0,
64
+ "eval_accuracy": 0.8739130434782608,
65
+ "eval_loss": 2.1650309562683105,
66
+ "eval_runtime": 31.0716,
67
+ "eval_samples_per_second": 22.207,
68
+ "eval_steps_per_second": 2.8,
69
+ "step": 522
70
+ },
71
+ {
72
+ "epoch": 7.0,
73
+ "eval_accuracy": 0.8826086956521739,
74
+ "eval_loss": 2.007321834564209,
75
+ "eval_runtime": 31.1654,
76
+ "eval_samples_per_second": 22.14,
77
+ "eval_steps_per_second": 2.792,
78
+ "step": 609
79
+ },
80
+ {
81
+ "epoch": 8.0,
82
+ "eval_accuracy": 0.8884057971014493,
83
+ "eval_loss": 1.872201919555664,
84
+ "eval_runtime": 32.285,
85
+ "eval_samples_per_second": 21.372,
86
+ "eval_steps_per_second": 2.695,
87
+ "step": 696
88
+ },
89
+ {
90
+ "epoch": 9.0,
91
+ "eval_accuracy": 0.8913043478260869,
92
+ "eval_loss": 1.7544584274291992,
93
+ "eval_runtime": 31.8948,
94
+ "eval_samples_per_second": 21.634,
95
+ "eval_steps_per_second": 2.728,
96
+ "step": 783
97
+ },
98
+ {
99
+ "epoch": 10.0,
100
+ "eval_accuracy": 0.8855072463768116,
101
+ "eval_loss": 1.647611141204834,
102
+ "eval_runtime": 35.3695,
103
+ "eval_samples_per_second": 19.508,
104
+ "eval_steps_per_second": 2.46,
105
+ "step": 870
106
+ },
107
+ {
108
+ "epoch": 11.0,
109
+ "eval_accuracy": 0.8927536231884058,
110
+ "eval_loss": 1.5588265657424927,
111
+ "eval_runtime": 31.805,
112
+ "eval_samples_per_second": 21.695,
113
+ "eval_steps_per_second": 2.735,
114
+ "step": 957
115
+ },
116
+ {
117
+ "epoch": 11.49,
118
+ "learning_rate": 6.2890625e-06,
119
+ "loss": 1.6708,
120
+ "step": 1000
121
+ },
122
+ {
123
+ "epoch": 12.0,
124
+ "eval_accuracy": 0.8942028985507247,
125
+ "eval_loss": 1.4784280061721802,
126
+ "eval_runtime": 31.5625,
127
+ "eval_samples_per_second": 21.861,
128
+ "eval_steps_per_second": 2.756,
129
+ "step": 1044
130
+ },
131
+ {
132
+ "epoch": 13.0,
133
+ "eval_accuracy": 0.8971014492753623,
134
+ "eval_loss": 1.4085804224014282,
135
+ "eval_runtime": 31.4333,
136
+ "eval_samples_per_second": 21.951,
137
+ "eval_steps_per_second": 2.768,
138
+ "step": 1131
139
+ },
140
+ {
141
+ "epoch": 14.0,
142
+ "eval_accuracy": 0.8985507246376812,
143
+ "eval_loss": 1.3446983098983765,
144
+ "eval_runtime": 31.561,
145
+ "eval_samples_per_second": 21.862,
146
+ "eval_steps_per_second": 2.757,
147
+ "step": 1218
148
+ },
149
+ {
150
+ "epoch": 15.0,
151
+ "eval_accuracy": 0.8985507246376812,
152
+ "eval_loss": 1.2906484603881836,
153
+ "eval_runtime": 31.4843,
154
+ "eval_samples_per_second": 21.916,
155
+ "eval_steps_per_second": 2.763,
156
+ "step": 1305
157
+ },
158
+ {
159
+ "epoch": 16.0,
160
+ "eval_accuracy": 0.9,
161
+ "eval_loss": 1.2425957918167114,
162
+ "eval_runtime": 31.4644,
163
+ "eval_samples_per_second": 21.93,
164
+ "eval_steps_per_second": 2.765,
165
+ "step": 1392
166
+ },
167
+ {
168
+ "epoch": 17.0,
169
+ "eval_accuracy": 0.9057971014492754,
170
+ "eval_loss": 1.1979601383209229,
171
+ "eval_runtime": 31.4077,
172
+ "eval_samples_per_second": 21.969,
173
+ "eval_steps_per_second": 2.77,
174
+ "step": 1479
175
+ },
176
+ {
177
+ "epoch": 17.24,
178
+ "learning_rate": 4.3359375e-06,
179
+ "loss": 1.1506,
180
+ "step": 1500
181
+ },
182
+ {
183
+ "epoch": 18.0,
184
+ "eval_accuracy": 0.9101449275362319,
185
+ "eval_loss": 1.1607540845870972,
186
+ "eval_runtime": 31.118,
187
+ "eval_samples_per_second": 22.174,
188
+ "eval_steps_per_second": 2.796,
189
+ "step": 1566
190
+ },
191
+ {
192
+ "epoch": 19.0,
193
+ "eval_accuracy": 0.9130434782608695,
194
+ "eval_loss": 1.1271917819976807,
195
+ "eval_runtime": 31.8607,
196
+ "eval_samples_per_second": 21.657,
197
+ "eval_steps_per_second": 2.731,
198
+ "step": 1653
199
+ },
200
+ {
201
+ "epoch": 20.0,
202
+ "eval_accuracy": 0.908695652173913,
203
+ "eval_loss": 1.0992004871368408,
204
+ "eval_runtime": 31.7729,
205
+ "eval_samples_per_second": 21.717,
206
+ "eval_steps_per_second": 2.738,
207
+ "step": 1740
208
+ },
209
+ {
210
+ "epoch": 21.0,
211
+ "eval_accuracy": 0.9144927536231884,
212
+ "eval_loss": 1.0763933658599854,
213
+ "eval_runtime": 31.7621,
214
+ "eval_samples_per_second": 21.724,
215
+ "eval_steps_per_second": 2.739,
216
+ "step": 1827
217
+ },
218
+ {
219
+ "epoch": 22.0,
220
+ "eval_accuracy": 0.9115942028985508,
221
+ "eval_loss": 1.0521405935287476,
222
+ "eval_runtime": 31.7427,
223
+ "eval_samples_per_second": 21.737,
224
+ "eval_steps_per_second": 2.741,
225
+ "step": 1914
226
+ },
227
+ {
228
+ "epoch": 22.99,
229
+ "learning_rate": 2.3828125000000003e-06,
230
+ "loss": 0.8876,
231
+ "step": 2000
232
+ },
233
+ {
234
+ "epoch": 23.0,
235
+ "eval_accuracy": 0.9130434782608695,
236
+ "eval_loss": 1.0325771570205688,
237
+ "eval_runtime": 32.3662,
238
+ "eval_samples_per_second": 21.319,
239
+ "eval_steps_per_second": 2.688,
240
+ "step": 2001
241
+ },
242
+ {
243
+ "epoch": 24.0,
244
+ "eval_accuracy": 0.9144927536231884,
245
+ "eval_loss": 1.0169775485992432,
246
+ "eval_runtime": 32.1403,
247
+ "eval_samples_per_second": 21.468,
248
+ "eval_steps_per_second": 2.707,
249
+ "step": 2088
250
+ },
251
+ {
252
+ "epoch": 25.0,
253
+ "eval_accuracy": 0.9115942028985508,
254
+ "eval_loss": 1.0033783912658691,
255
+ "eval_runtime": 31.6883,
256
+ "eval_samples_per_second": 21.775,
257
+ "eval_steps_per_second": 2.745,
258
+ "step": 2175
259
+ },
260
+ {
261
+ "epoch": 26.0,
262
+ "eval_accuracy": 0.9130434782608695,
263
+ "eval_loss": 0.99229896068573,
264
+ "eval_runtime": 31.8,
265
+ "eval_samples_per_second": 21.698,
266
+ "eval_steps_per_second": 2.736,
267
+ "step": 2262
268
+ },
269
+ {
270
+ "epoch": 27.0,
271
+ "eval_accuracy": 0.9115942028985508,
272
+ "eval_loss": 0.9860368371009827,
273
+ "eval_runtime": 31.6691,
274
+ "eval_samples_per_second": 21.788,
275
+ "eval_steps_per_second": 2.747,
276
+ "step": 2349
277
+ },
278
+ {
279
+ "epoch": 28.0,
280
+ "eval_accuracy": 0.9130434782608695,
281
+ "eval_loss": 0.9801862239837646,
282
+ "eval_runtime": 31.6203,
283
+ "eval_samples_per_second": 21.821,
284
+ "eval_steps_per_second": 2.751,
285
+ "step": 2436
286
+ },
287
+ {
288
+ "epoch": 28.74,
289
+ "learning_rate": 4.296875e-07,
290
+ "loss": 0.7689,
291
+ "step": 2500
292
+ },
293
+ {
294
+ "epoch": 29.0,
295
+ "eval_accuracy": 0.9130434782608695,
296
+ "eval_loss": 0.9760327935218811,
297
+ "eval_runtime": 31.5973,
298
+ "eval_samples_per_second": 21.837,
299
+ "eval_steps_per_second": 2.753,
300
+ "step": 2523
301
+ },
302
+ {
303
+ "epoch": 30.0,
304
+ "eval_accuracy": 0.9130434782608695,
305
+ "eval_loss": 0.9747084379196167,
306
+ "eval_runtime": 31.6656,
307
+ "eval_samples_per_second": 21.79,
308
+ "eval_steps_per_second": 2.747,
309
+ "step": 2610
310
+ }
311
+ ],
312
+ "logging_steps": 500,
313
+ "max_steps": 2610,
314
+ "num_train_epochs": 30,
315
+ "save_steps": 500,
316
+ "total_flos": 6.413641062145229e+18,
317
+ "trial_name": null,
318
+ "trial_params": null
319
+ }
checkpoint-2610/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1c3fe248ae319cf61cde26b541f95ca083199992b947e7f65644515c053f5b2
3
+ size 4027
config.json ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "lemon",
13
+ "1": "orange",
14
+ "2": "beetroot",
15
+ "3": "mango",
16
+ "4": "chilli pepper",
17
+ "5": "banana",
18
+ "6": "cauliflower",
19
+ "7": "cucumber",
20
+ "8": "raddish",
21
+ "9": "grapes",
22
+ "10": "corn",
23
+ "11": "pomegranate",
24
+ "12": "bell pepper",
25
+ "13": "peas",
26
+ "14": "pear",
27
+ "15": "sweetpotato",
28
+ "16": "carrot",
29
+ "17": "capsicum",
30
+ "18": "spinach",
31
+ "19": "apple",
32
+ "20": "eggplant",
33
+ "21": "tomato",
34
+ "22": "paprika",
35
+ "23": "ginger",
36
+ "24": "pineapple",
37
+ "25": "garlic",
38
+ "26": "soy beans",
39
+ "27": "watermelon",
40
+ "28": "cabbage",
41
+ "29": "potato",
42
+ "30": "lettuce",
43
+ "31": "sweetcorn",
44
+ "32": "onion",
45
+ "33": "turnip",
46
+ "34": "jalepeno",
47
+ "35": "kiwi"
48
+ },
49
+ "image_size": 224,
50
+ "initializer_range": 0.02,
51
+ "intermediate_size": 3072,
52
+ "label2id": {
53
+ "apple": 19,
54
+ "banana": 5,
55
+ "beetroot": 2,
56
+ "bell pepper": 12,
57
+ "cabbage": 28,
58
+ "capsicum": 17,
59
+ "carrot": 16,
60
+ "cauliflower": 6,
61
+ "chilli pepper": 4,
62
+ "corn": 10,
63
+ "cucumber": 7,
64
+ "eggplant": 20,
65
+ "garlic": 25,
66
+ "ginger": 23,
67
+ "grapes": 9,
68
+ "jalepeno": 34,
69
+ "kiwi": 35,
70
+ "lemon": 0,
71
+ "lettuce": 30,
72
+ "mango": 3,
73
+ "onion": 32,
74
+ "orange": 1,
75
+ "paprika": 22,
76
+ "pear": 14,
77
+ "peas": 13,
78
+ "pineapple": 24,
79
+ "pomegranate": 11,
80
+ "potato": 29,
81
+ "raddish": 8,
82
+ "soy beans": 26,
83
+ "spinach": 18,
84
+ "sweetcorn": 31,
85
+ "sweetpotato": 15,
86
+ "tomato": 21,
87
+ "turnip": 33,
88
+ "watermelon": 27
89
+ },
90
+ "layer_norm_eps": 1e-12,
91
+ "model_type": "vit",
92
+ "num_attention_heads": 12,
93
+ "num_channels": 3,
94
+ "num_hidden_layers": 12,
95
+ "patch_size": 16,
96
+ "problem_type": "single_label_classification",
97
+ "qkv_bias": true,
98
+ "torch_dtype": "float32",
99
+ "transformers_version": "4.32.1"
100
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c03c6108c541597eb29ce2595c0860d4c96ad57cdae8d295ff5c99053174269a
3
+ size 343373293
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1c3fe248ae319cf61cde26b541f95ca083199992b947e7f65644515c053f5b2
3
+ size 4027