lamrin8224 commited on
Commit
fe41745
1 Parent(s): 0873176

Upload folder using huggingface_hub

Browse files
checkpoint-11940/config.json ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "bougainvillea",
13
+ "1": "gazania",
14
+ "2": "azalea",
15
+ "3": "magnolia",
16
+ "4": "love in the mist",
17
+ "5": "petunia",
18
+ "6": "black-eyed susan",
19
+ "7": "canna lily",
20
+ "8": "monkshood",
21
+ "9": "mallow",
22
+ "10": "snapdragon",
23
+ "11": "red ginger",
24
+ "12": "silverbush",
25
+ "13": "ball moss",
26
+ "14": "lotus lotus",
27
+ "15": "geranium",
28
+ "16": "orange dahlia",
29
+ "17": "desert-rose",
30
+ "18": "watercress",
31
+ "19": "corn poppy",
32
+ "20": "yellow iris",
33
+ "21": "common dandelion",
34
+ "22": "water lily",
35
+ "23": "bishop of llandaff",
36
+ "24": "pink-yellow dahlia",
37
+ "25": "lenten rose",
38
+ "26": "wallflower",
39
+ "27": "siam tulip",
40
+ "28": "pincushion flower",
41
+ "29": "frangipani",
42
+ "30": "spring crocus",
43
+ "31": "bolero deep blue",
44
+ "32": "alpine sea holly",
45
+ "33": "stemless gentian",
46
+ "34": "sunflower",
47
+ "35": "tiger lily",
48
+ "36": "clematis",
49
+ "37": "californian poppy",
50
+ "38": "foxglove",
51
+ "39": "pink primrose",
52
+ "40": "daffodil",
53
+ "41": "moon orchid",
54
+ "42": "cyclamen",
55
+ "43": "morning glory",
56
+ "44": "anthurium",
57
+ "45": "sweet pea",
58
+ "46": "rose",
59
+ "47": "oxeye daisy",
60
+ "48": "grape hyacinth",
61
+ "49": "fire lily",
62
+ "50": "globe thistle",
63
+ "51": "king protea",
64
+ "52": "tree mallow",
65
+ "53": "poinsettia",
66
+ "54": "english marigold",
67
+ "55": "toad lily",
68
+ "56": "tree poppy",
69
+ "57": "wild pansy",
70
+ "58": "fritillary",
71
+ "59": "pelargonium",
72
+ "60": "gaura",
73
+ "61": "hibiscus",
74
+ "62": "barbeton daisy",
75
+ "63": "windflower",
76
+ "64": "balloon flower",
77
+ "65": "bearded iris",
78
+ "66": "carnation",
79
+ "67": "great masterwort",
80
+ "68": "buttercup",
81
+ "69": "bird of paradise",
82
+ "70": "hard-leaved pocket orchid",
83
+ "71": "hippeastrum",
84
+ "72": "sweet william",
85
+ "73": "japanese anemone",
86
+ "74": "spear thistle",
87
+ "75": "bee balm",
88
+ "76": "camellia",
89
+ "77": "prince of wales feathers",
90
+ "78": "cape flower",
91
+ "79": "ruby-lipped cattleya",
92
+ "80": "thorn apple",
93
+ "81": "cautleya spicata",
94
+ "82": "mexican petunia",
95
+ "83": "peruvian lily",
96
+ "84": "osteospermum",
97
+ "85": "bromelia",
98
+ "86": "globe-flower",
99
+ "87": "giant white arum lily",
100
+ "88": "purple coneflower",
101
+ "89": "passion flower",
102
+ "90": "colt's foot",
103
+ "91": "primula",
104
+ "92": "blanket flower",
105
+ "93": "marigold",
106
+ "94": "canterbury bells",
107
+ "95": "artichoke",
108
+ "96": "mexican aster",
109
+ "97": "sword lily",
110
+ "98": "columbine",
111
+ "99": "blackberry lily",
112
+ "100": "garden phlox",
113
+ "101": "trumpet creeper"
114
+ },
115
+ "image_size": 224,
116
+ "initializer_range": 0.02,
117
+ "intermediate_size": 3072,
118
+ "label2id": {
119
+ "alpine sea holly": 32,
120
+ "anthurium": 44,
121
+ "artichoke": 95,
122
+ "azalea": 2,
123
+ "ball moss": 13,
124
+ "balloon flower": 64,
125
+ "barbeton daisy": 62,
126
+ "bearded iris": 65,
127
+ "bee balm": 75,
128
+ "bird of paradise": 69,
129
+ "bishop of llandaff": 23,
130
+ "black-eyed susan": 6,
131
+ "blackberry lily": 99,
132
+ "blanket flower": 92,
133
+ "bolero deep blue": 31,
134
+ "bougainvillea": 0,
135
+ "bromelia": 85,
136
+ "buttercup": 68,
137
+ "californian poppy": 37,
138
+ "camellia": 76,
139
+ "canna lily": 7,
140
+ "canterbury bells": 94,
141
+ "cape flower": 78,
142
+ "carnation": 66,
143
+ "cautleya spicata": 81,
144
+ "clematis": 36,
145
+ "colt's foot": 90,
146
+ "columbine": 98,
147
+ "common dandelion": 21,
148
+ "corn poppy": 19,
149
+ "cyclamen": 42,
150
+ "daffodil": 40,
151
+ "desert-rose": 17,
152
+ "english marigold": 54,
153
+ "fire lily": 49,
154
+ "foxglove": 38,
155
+ "frangipani": 29,
156
+ "fritillary": 58,
157
+ "garden phlox": 100,
158
+ "gaura": 60,
159
+ "gazania": 1,
160
+ "geranium": 15,
161
+ "giant white arum lily": 87,
162
+ "globe thistle": 50,
163
+ "globe-flower": 86,
164
+ "grape hyacinth": 48,
165
+ "great masterwort": 67,
166
+ "hard-leaved pocket orchid": 70,
167
+ "hibiscus": 61,
168
+ "hippeastrum": 71,
169
+ "japanese anemone": 73,
170
+ "king protea": 51,
171
+ "lenten rose": 25,
172
+ "lotus lotus": 14,
173
+ "love in the mist": 4,
174
+ "magnolia": 3,
175
+ "mallow": 9,
176
+ "marigold": 93,
177
+ "mexican aster": 96,
178
+ "mexican petunia": 82,
179
+ "monkshood": 8,
180
+ "moon orchid": 41,
181
+ "morning glory": 43,
182
+ "orange dahlia": 16,
183
+ "osteospermum": 84,
184
+ "oxeye daisy": 47,
185
+ "passion flower": 89,
186
+ "pelargonium": 59,
187
+ "peruvian lily": 83,
188
+ "petunia": 5,
189
+ "pincushion flower": 28,
190
+ "pink primrose": 39,
191
+ "pink-yellow dahlia": 24,
192
+ "poinsettia": 53,
193
+ "primula": 91,
194
+ "prince of wales feathers": 77,
195
+ "purple coneflower": 88,
196
+ "red ginger": 11,
197
+ "rose": 46,
198
+ "ruby-lipped cattleya": 79,
199
+ "siam tulip": 27,
200
+ "silverbush": 12,
201
+ "snapdragon": 10,
202
+ "spear thistle": 74,
203
+ "spring crocus": 30,
204
+ "stemless gentian": 33,
205
+ "sunflower": 34,
206
+ "sweet pea": 45,
207
+ "sweet william": 72,
208
+ "sword lily": 97,
209
+ "thorn apple": 80,
210
+ "tiger lily": 35,
211
+ "toad lily": 55,
212
+ "tree mallow": 52,
213
+ "tree poppy": 56,
214
+ "trumpet creeper": 101,
215
+ "wallflower": 26,
216
+ "water lily": 22,
217
+ "watercress": 18,
218
+ "wild pansy": 57,
219
+ "windflower": 63,
220
+ "yellow iris": 20
221
+ },
222
+ "layer_norm_eps": 1e-12,
223
+ "model_type": "vit",
224
+ "num_attention_heads": 12,
225
+ "num_channels": 3,
226
+ "num_hidden_layers": 12,
227
+ "patch_size": 16,
228
+ "problem_type": "single_label_classification",
229
+ "qkv_bias": true,
230
+ "torch_dtype": "float32",
231
+ "transformers_version": "4.38.2"
232
+ }
checkpoint-11940/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f3e930f667060501dada12483787a306e1f467ff6824f32863c2b4ff9e5f6d9
3
+ size 343531584
checkpoint-11940/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52e27102ad9178cb00fd9a5aeaef91c1e88fbc416e6658cb2ca5c0edbbb3eec9
3
+ size 687183621
checkpoint-11940/preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
checkpoint-11940/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac2cfc9e1cb5027eca107806b733fa7cc57becb27b5fe28ddf492ab1310999e1
3
+ size 14575
checkpoint-11940/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb6f29cc6c7d938f9f1f2a74b92fe6093c546631eef44f9f2c5047c8a7f40088
3
+ size 627
checkpoint-11940/trainer_state.json ADDED
@@ -0,0 +1,362 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.08057570457458496,
3
+ "best_model_checkpoint": "oxford_flowers_image_detection/checkpoint-11940",
4
+ "epoch": 20.0,
5
+ "eval_steps": 500,
6
+ "global_step": 11940,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.84,
13
+ "grad_norm": 1.605017066001892,
14
+ "learning_rate": 9.621530698065601e-06,
15
+ "loss": 4.1645,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_accuracy": 0.9794721407624634,
21
+ "eval_loss": 3.545278549194336,
22
+ "eval_runtime": 60.0735,
23
+ "eval_samples_per_second": 79.469,
24
+ "eval_steps_per_second": 9.938,
25
+ "step": 597
26
+ },
27
+ {
28
+ "epoch": 1.68,
29
+ "grad_norm": 1.842254400253296,
30
+ "learning_rate": 9.201009251471826e-06,
31
+ "loss": 3.2178,
32
+ "step": 1000
33
+ },
34
+ {
35
+ "epoch": 2.0,
36
+ "eval_accuracy": 0.9897360703812317,
37
+ "eval_loss": 2.678635597229004,
38
+ "eval_runtime": 59.3446,
39
+ "eval_samples_per_second": 80.445,
40
+ "eval_steps_per_second": 10.06,
41
+ "step": 1194
42
+ },
43
+ {
44
+ "epoch": 2.51,
45
+ "grad_norm": 1.6889910697937012,
46
+ "learning_rate": 8.78048780487805e-06,
47
+ "loss": 2.5052,
48
+ "step": 1500
49
+ },
50
+ {
51
+ "epoch": 3.0,
52
+ "eval_accuracy": 0.9922496857980729,
53
+ "eval_loss": 2.020171880722046,
54
+ "eval_runtime": 60.7885,
55
+ "eval_samples_per_second": 78.535,
56
+ "eval_steps_per_second": 9.821,
57
+ "step": 1791
58
+ },
59
+ {
60
+ "epoch": 3.35,
61
+ "grad_norm": 1.7160316705703735,
62
+ "learning_rate": 8.359966358284272e-06,
63
+ "loss": 1.9468,
64
+ "step": 2000
65
+ },
66
+ {
67
+ "epoch": 4.0,
68
+ "eval_accuracy": 0.9970674486803519,
69
+ "eval_loss": 1.4925795793533325,
70
+ "eval_runtime": 58.8625,
71
+ "eval_samples_per_second": 81.104,
72
+ "eval_steps_per_second": 10.142,
73
+ "step": 2388
74
+ },
75
+ {
76
+ "epoch": 4.19,
77
+ "grad_norm": 1.7639594078063965,
78
+ "learning_rate": 7.939444911690497e-06,
79
+ "loss": 1.4855,
80
+ "step": 2500
81
+ },
82
+ {
83
+ "epoch": 5.0,
84
+ "eval_accuracy": 0.9970674486803519,
85
+ "eval_loss": 1.0846645832061768,
86
+ "eval_runtime": 58.8615,
87
+ "eval_samples_per_second": 81.106,
88
+ "eval_steps_per_second": 10.142,
89
+ "step": 2985
90
+ },
91
+ {
92
+ "epoch": 5.03,
93
+ "grad_norm": 1.3591196537017822,
94
+ "learning_rate": 7.51892346509672e-06,
95
+ "loss": 1.1245,
96
+ "step": 3000
97
+ },
98
+ {
99
+ "epoch": 5.86,
100
+ "grad_norm": 1.0564796924591064,
101
+ "learning_rate": 7.0984020185029444e-06,
102
+ "loss": 0.8379,
103
+ "step": 3500
104
+ },
105
+ {
106
+ "epoch": 6.0,
107
+ "eval_accuracy": 0.9976958525345622,
108
+ "eval_loss": 0.7747777700424194,
109
+ "eval_runtime": 64.9338,
110
+ "eval_samples_per_second": 73.521,
111
+ "eval_steps_per_second": 9.194,
112
+ "step": 3582
113
+ },
114
+ {
115
+ "epoch": 6.7,
116
+ "grad_norm": 0.7882522344589233,
117
+ "learning_rate": 6.677880571909168e-06,
118
+ "loss": 0.6216,
119
+ "step": 4000
120
+ },
121
+ {
122
+ "epoch": 7.0,
123
+ "eval_accuracy": 0.9976958525345622,
124
+ "eval_loss": 0.5561748743057251,
125
+ "eval_runtime": 58.7465,
126
+ "eval_samples_per_second": 81.264,
127
+ "eval_steps_per_second": 10.162,
128
+ "step": 4179
129
+ },
130
+ {
131
+ "epoch": 7.54,
132
+ "grad_norm": 0.7526402473449707,
133
+ "learning_rate": 6.257359125315392e-06,
134
+ "loss": 0.4709,
135
+ "step": 4500
136
+ },
137
+ {
138
+ "epoch": 8.0,
139
+ "eval_accuracy": 0.9972769166317553,
140
+ "eval_loss": 0.4182124435901642,
141
+ "eval_runtime": 58.6111,
142
+ "eval_samples_per_second": 81.452,
143
+ "eval_steps_per_second": 10.186,
144
+ "step": 4776
145
+ },
146
+ {
147
+ "epoch": 8.38,
148
+ "grad_norm": 0.758459210395813,
149
+ "learning_rate": 5.836837678721616e-06,
150
+ "loss": 0.3674,
151
+ "step": 5000
152
+ },
153
+ {
154
+ "epoch": 9.0,
155
+ "eval_accuracy": 0.9979053204859657,
156
+ "eval_loss": 0.3245885372161865,
157
+ "eval_runtime": 58.7024,
158
+ "eval_samples_per_second": 81.325,
159
+ "eval_steps_per_second": 10.17,
160
+ "step": 5373
161
+ },
162
+ {
163
+ "epoch": 9.21,
164
+ "grad_norm": 0.5952056050300598,
165
+ "learning_rate": 5.416316232127838e-06,
166
+ "loss": 0.2934,
167
+ "step": 5500
168
+ },
169
+ {
170
+ "epoch": 10.0,
171
+ "eval_accuracy": 0.9981147884373691,
172
+ "eval_loss": 0.25785893201828003,
173
+ "eval_runtime": 58.5206,
174
+ "eval_samples_per_second": 81.578,
175
+ "eval_steps_per_second": 10.202,
176
+ "step": 5970
177
+ },
178
+ {
179
+ "epoch": 10.05,
180
+ "grad_norm": 0.3461519777774811,
181
+ "learning_rate": 4.9957947855340624e-06,
182
+ "loss": 0.2405,
183
+ "step": 6000
184
+ },
185
+ {
186
+ "epoch": 10.89,
187
+ "grad_norm": 0.34682661294937134,
188
+ "learning_rate": 4.575273338940287e-06,
189
+ "loss": 0.1989,
190
+ "step": 6500
191
+ },
192
+ {
193
+ "epoch": 11.0,
194
+ "eval_accuracy": 0.998533724340176,
195
+ "eval_loss": 0.21054226160049438,
196
+ "eval_runtime": 60.1082,
197
+ "eval_samples_per_second": 79.423,
198
+ "eval_steps_per_second": 9.932,
199
+ "step": 6567
200
+ },
201
+ {
202
+ "epoch": 11.73,
203
+ "grad_norm": 0.32442188262939453,
204
+ "learning_rate": 4.15475189234651e-06,
205
+ "loss": 0.1687,
206
+ "step": 7000
207
+ },
208
+ {
209
+ "epoch": 12.0,
210
+ "eval_accuracy": 0.998533724340176,
211
+ "eval_loss": 0.1750020831823349,
212
+ "eval_runtime": 61.3349,
213
+ "eval_samples_per_second": 77.835,
214
+ "eval_steps_per_second": 9.733,
215
+ "step": 7164
216
+ },
217
+ {
218
+ "epoch": 12.56,
219
+ "grad_norm": 0.3209201693534851,
220
+ "learning_rate": 3.734230445752734e-06,
221
+ "loss": 0.1447,
222
+ "step": 7500
223
+ },
224
+ {
225
+ "epoch": 13.0,
226
+ "eval_accuracy": 0.9987431922915794,
227
+ "eval_loss": 0.1479656994342804,
228
+ "eval_runtime": 61.7701,
229
+ "eval_samples_per_second": 77.287,
230
+ "eval_steps_per_second": 9.665,
231
+ "step": 7761
232
+ },
233
+ {
234
+ "epoch": 13.4,
235
+ "grad_norm": 0.3844256103038788,
236
+ "learning_rate": 3.313708999158957e-06,
237
+ "loss": 0.1248,
238
+ "step": 8000
239
+ },
240
+ {
241
+ "epoch": 14.0,
242
+ "eval_accuracy": 0.9987431922915794,
243
+ "eval_loss": 0.12714745104312897,
244
+ "eval_runtime": 59.2363,
245
+ "eval_samples_per_second": 80.593,
246
+ "eval_steps_per_second": 10.078,
247
+ "step": 8358
248
+ },
249
+ {
250
+ "epoch": 14.24,
251
+ "grad_norm": 0.2125348150730133,
252
+ "learning_rate": 2.893187552565181e-06,
253
+ "loss": 0.1106,
254
+ "step": 8500
255
+ },
256
+ {
257
+ "epoch": 15.0,
258
+ "eval_accuracy": 0.9987431922915794,
259
+ "eval_loss": 0.11167968809604645,
260
+ "eval_runtime": 59.6724,
261
+ "eval_samples_per_second": 80.004,
262
+ "eval_steps_per_second": 10.005,
263
+ "step": 8955
264
+ },
265
+ {
266
+ "epoch": 15.08,
267
+ "grad_norm": 0.19407622516155243,
268
+ "learning_rate": 2.4726661059714047e-06,
269
+ "loss": 0.098,
270
+ "step": 9000
271
+ },
272
+ {
273
+ "epoch": 15.91,
274
+ "grad_norm": 0.15018388628959656,
275
+ "learning_rate": 2.0521446593776286e-06,
276
+ "loss": 0.0892,
277
+ "step": 9500
278
+ },
279
+ {
280
+ "epoch": 16.0,
281
+ "eval_accuracy": 0.9987431922915794,
282
+ "eval_loss": 0.09966100007295609,
283
+ "eval_runtime": 59.4365,
284
+ "eval_samples_per_second": 80.321,
285
+ "eval_steps_per_second": 10.044,
286
+ "step": 9552
287
+ },
288
+ {
289
+ "epoch": 16.75,
290
+ "grad_norm": 0.15547074377536774,
291
+ "learning_rate": 1.6316232127838522e-06,
292
+ "loss": 0.0812,
293
+ "step": 10000
294
+ },
295
+ {
296
+ "epoch": 17.0,
297
+ "eval_accuracy": 0.9991621281943862,
298
+ "eval_loss": 0.09098479151725769,
299
+ "eval_runtime": 61.7838,
300
+ "eval_samples_per_second": 77.269,
301
+ "eval_steps_per_second": 9.663,
302
+ "step": 10149
303
+ },
304
+ {
305
+ "epoch": 17.59,
306
+ "grad_norm": 0.1597314476966858,
307
+ "learning_rate": 1.2111017661900759e-06,
308
+ "loss": 0.0763,
309
+ "step": 10500
310
+ },
311
+ {
312
+ "epoch": 18.0,
313
+ "eval_accuracy": 0.9987431922915794,
314
+ "eval_loss": 0.08528301864862442,
315
+ "eval_runtime": 59.3195,
316
+ "eval_samples_per_second": 80.479,
317
+ "eval_steps_per_second": 10.064,
318
+ "step": 10746
319
+ },
320
+ {
321
+ "epoch": 18.43,
322
+ "grad_norm": 0.13518930971622467,
323
+ "learning_rate": 7.905803195962995e-07,
324
+ "loss": 0.0717,
325
+ "step": 11000
326
+ },
327
+ {
328
+ "epoch": 19.0,
329
+ "eval_accuracy": 0.9987431922915794,
330
+ "eval_loss": 0.08179289102554321,
331
+ "eval_runtime": 59.5963,
332
+ "eval_samples_per_second": 80.106,
333
+ "eval_steps_per_second": 10.017,
334
+ "step": 11343
335
+ },
336
+ {
337
+ "epoch": 19.26,
338
+ "grad_norm": 0.12960000336170197,
339
+ "learning_rate": 3.700588730025231e-07,
340
+ "loss": 0.0691,
341
+ "step": 11500
342
+ },
343
+ {
344
+ "epoch": 20.0,
345
+ "eval_accuracy": 0.9987431922915794,
346
+ "eval_loss": 0.08057570457458496,
347
+ "eval_runtime": 60.953,
348
+ "eval_samples_per_second": 78.323,
349
+ "eval_steps_per_second": 9.794,
350
+ "step": 11940
351
+ }
352
+ ],
353
+ "logging_steps": 500,
354
+ "max_steps": 11940,
355
+ "num_input_tokens_seen": 0,
356
+ "num_train_epochs": 20,
357
+ "save_steps": 500,
358
+ "total_flos": 2.961916394212049e+19,
359
+ "train_batch_size": 32,
360
+ "trial_name": null,
361
+ "trial_params": null
362
+ }
checkpoint-11940/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f8e04ba40c6744bef65cab07f0c0f487adfa8b70b5bf1f7c176441d5a7860aa
3
+ size 4411
config.json ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "bougainvillea",
13
+ "1": "gazania",
14
+ "2": "azalea",
15
+ "3": "magnolia",
16
+ "4": "love in the mist",
17
+ "5": "petunia",
18
+ "6": "black-eyed susan",
19
+ "7": "canna lily",
20
+ "8": "monkshood",
21
+ "9": "mallow",
22
+ "10": "snapdragon",
23
+ "11": "red ginger",
24
+ "12": "silverbush",
25
+ "13": "ball moss",
26
+ "14": "lotus lotus",
27
+ "15": "geranium",
28
+ "16": "orange dahlia",
29
+ "17": "desert-rose",
30
+ "18": "watercress",
31
+ "19": "corn poppy",
32
+ "20": "yellow iris",
33
+ "21": "common dandelion",
34
+ "22": "water lily",
35
+ "23": "bishop of llandaff",
36
+ "24": "pink-yellow dahlia",
37
+ "25": "lenten rose",
38
+ "26": "wallflower",
39
+ "27": "siam tulip",
40
+ "28": "pincushion flower",
41
+ "29": "frangipani",
42
+ "30": "spring crocus",
43
+ "31": "bolero deep blue",
44
+ "32": "alpine sea holly",
45
+ "33": "stemless gentian",
46
+ "34": "sunflower",
47
+ "35": "tiger lily",
48
+ "36": "clematis",
49
+ "37": "californian poppy",
50
+ "38": "foxglove",
51
+ "39": "pink primrose",
52
+ "40": "daffodil",
53
+ "41": "moon orchid",
54
+ "42": "cyclamen",
55
+ "43": "morning glory",
56
+ "44": "anthurium",
57
+ "45": "sweet pea",
58
+ "46": "rose",
59
+ "47": "oxeye daisy",
60
+ "48": "grape hyacinth",
61
+ "49": "fire lily",
62
+ "50": "globe thistle",
63
+ "51": "king protea",
64
+ "52": "tree mallow",
65
+ "53": "poinsettia",
66
+ "54": "english marigold",
67
+ "55": "toad lily",
68
+ "56": "tree poppy",
69
+ "57": "wild pansy",
70
+ "58": "fritillary",
71
+ "59": "pelargonium",
72
+ "60": "gaura",
73
+ "61": "hibiscus",
74
+ "62": "barbeton daisy",
75
+ "63": "windflower",
76
+ "64": "balloon flower",
77
+ "65": "bearded iris",
78
+ "66": "carnation",
79
+ "67": "great masterwort",
80
+ "68": "buttercup",
81
+ "69": "bird of paradise",
82
+ "70": "hard-leaved pocket orchid",
83
+ "71": "hippeastrum",
84
+ "72": "sweet william",
85
+ "73": "japanese anemone",
86
+ "74": "spear thistle",
87
+ "75": "bee balm",
88
+ "76": "camellia",
89
+ "77": "prince of wales feathers",
90
+ "78": "cape flower",
91
+ "79": "ruby-lipped cattleya",
92
+ "80": "thorn apple",
93
+ "81": "cautleya spicata",
94
+ "82": "mexican petunia",
95
+ "83": "peruvian lily",
96
+ "84": "osteospermum",
97
+ "85": "bromelia",
98
+ "86": "globe-flower",
99
+ "87": "giant white arum lily",
100
+ "88": "purple coneflower",
101
+ "89": "passion flower",
102
+ "90": "colt's foot",
103
+ "91": "primula",
104
+ "92": "blanket flower",
105
+ "93": "marigold",
106
+ "94": "canterbury bells",
107
+ "95": "artichoke",
108
+ "96": "mexican aster",
109
+ "97": "sword lily",
110
+ "98": "columbine",
111
+ "99": "blackberry lily",
112
+ "100": "garden phlox",
113
+ "101": "trumpet creeper"
114
+ },
115
+ "image_size": 224,
116
+ "initializer_range": 0.02,
117
+ "intermediate_size": 3072,
118
+ "label2id": {
119
+ "alpine sea holly": 32,
120
+ "anthurium": 44,
121
+ "artichoke": 95,
122
+ "azalea": 2,
123
+ "ball moss": 13,
124
+ "balloon flower": 64,
125
+ "barbeton daisy": 62,
126
+ "bearded iris": 65,
127
+ "bee balm": 75,
128
+ "bird of paradise": 69,
129
+ "bishop of llandaff": 23,
130
+ "black-eyed susan": 6,
131
+ "blackberry lily": 99,
132
+ "blanket flower": 92,
133
+ "bolero deep blue": 31,
134
+ "bougainvillea": 0,
135
+ "bromelia": 85,
136
+ "buttercup": 68,
137
+ "californian poppy": 37,
138
+ "camellia": 76,
139
+ "canna lily": 7,
140
+ "canterbury bells": 94,
141
+ "cape flower": 78,
142
+ "carnation": 66,
143
+ "cautleya spicata": 81,
144
+ "clematis": 36,
145
+ "colt's foot": 90,
146
+ "columbine": 98,
147
+ "common dandelion": 21,
148
+ "corn poppy": 19,
149
+ "cyclamen": 42,
150
+ "daffodil": 40,
151
+ "desert-rose": 17,
152
+ "english marigold": 54,
153
+ "fire lily": 49,
154
+ "foxglove": 38,
155
+ "frangipani": 29,
156
+ "fritillary": 58,
157
+ "garden phlox": 100,
158
+ "gaura": 60,
159
+ "gazania": 1,
160
+ "geranium": 15,
161
+ "giant white arum lily": 87,
162
+ "globe thistle": 50,
163
+ "globe-flower": 86,
164
+ "grape hyacinth": 48,
165
+ "great masterwort": 67,
166
+ "hard-leaved pocket orchid": 70,
167
+ "hibiscus": 61,
168
+ "hippeastrum": 71,
169
+ "japanese anemone": 73,
170
+ "king protea": 51,
171
+ "lenten rose": 25,
172
+ "lotus lotus": 14,
173
+ "love in the mist": 4,
174
+ "magnolia": 3,
175
+ "mallow": 9,
176
+ "marigold": 93,
177
+ "mexican aster": 96,
178
+ "mexican petunia": 82,
179
+ "monkshood": 8,
180
+ "moon orchid": 41,
181
+ "morning glory": 43,
182
+ "orange dahlia": 16,
183
+ "osteospermum": 84,
184
+ "oxeye daisy": 47,
185
+ "passion flower": 89,
186
+ "pelargonium": 59,
187
+ "peruvian lily": 83,
188
+ "petunia": 5,
189
+ "pincushion flower": 28,
190
+ "pink primrose": 39,
191
+ "pink-yellow dahlia": 24,
192
+ "poinsettia": 53,
193
+ "primula": 91,
194
+ "prince of wales feathers": 77,
195
+ "purple coneflower": 88,
196
+ "red ginger": 11,
197
+ "rose": 46,
198
+ "ruby-lipped cattleya": 79,
199
+ "siam tulip": 27,
200
+ "silverbush": 12,
201
+ "snapdragon": 10,
202
+ "spear thistle": 74,
203
+ "spring crocus": 30,
204
+ "stemless gentian": 33,
205
+ "sunflower": 34,
206
+ "sweet pea": 45,
207
+ "sweet william": 72,
208
+ "sword lily": 97,
209
+ "thorn apple": 80,
210
+ "tiger lily": 35,
211
+ "toad lily": 55,
212
+ "tree mallow": 52,
213
+ "tree poppy": 56,
214
+ "trumpet creeper": 101,
215
+ "wallflower": 26,
216
+ "water lily": 22,
217
+ "watercress": 18,
218
+ "wild pansy": 57,
219
+ "windflower": 63,
220
+ "yellow iris": 20
221
+ },
222
+ "layer_norm_eps": 1e-12,
223
+ "model_type": "vit",
224
+ "num_attention_heads": 12,
225
+ "num_channels": 3,
226
+ "num_hidden_layers": 12,
227
+ "patch_size": 16,
228
+ "problem_type": "single_label_classification",
229
+ "qkv_bias": true,
230
+ "torch_dtype": "float32",
231
+ "transformers_version": "4.38.2"
232
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f3e930f667060501dada12483787a306e1f467ff6824f32863c2b4ff9e5f6d9
3
+ size 343531584
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f8e04ba40c6744bef65cab07f0c0f487adfa8b70b5bf1f7c176441d5a7860aa
3
+ size 4411