OLD_vit_epochs5_batch64_lr5e-05_size224_tiles1_seed1_classic_image_classification
/
trainer_state.json
{ | |
"best_metric": 0.006617749575525522, | |
"best_model_checkpoint": "vit_epochs5_batch64_lr5e-05_size224_tiles1_seed1_classic_image_classification/checkpoint-1410", | |
"epoch": 5.0, | |
"eval_steps": 500, | |
"global_step": 1410, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01773049645390071, | |
"grad_norm": 1.1057369709014893, | |
"learning_rate": 4.9822695035461e-05, | |
"loss": 0.6106, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.03546099290780142, | |
"grad_norm": 1.056671142578125, | |
"learning_rate": 4.964539007092199e-05, | |
"loss": 0.462, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.05319148936170213, | |
"grad_norm": 0.9491887092590332, | |
"learning_rate": 4.946808510638298e-05, | |
"loss": 0.31, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.07092198581560284, | |
"grad_norm": 1.2302058935165405, | |
"learning_rate": 4.929078014184397e-05, | |
"loss": 0.2197, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.08865248226950355, | |
"grad_norm": 1.1420009136199951, | |
"learning_rate": 4.911347517730497e-05, | |
"loss": 0.1468, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.10638297872340426, | |
"grad_norm": 1.3249993324279785, | |
"learning_rate": 4.893617021276596e-05, | |
"loss": 0.1098, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.12411347517730496, | |
"grad_norm": 0.8271305561065674, | |
"learning_rate": 4.875886524822695e-05, | |
"loss": 0.0994, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.14184397163120568, | |
"grad_norm": 1.1268686056137085, | |
"learning_rate": 4.858156028368794e-05, | |
"loss": 0.0718, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.1595744680851064, | |
"grad_norm": 0.8098769187927246, | |
"learning_rate": 4.840425531914894e-05, | |
"loss": 0.0861, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.1773049645390071, | |
"grad_norm": 1.5605119466781616, | |
"learning_rate": 4.822695035460993e-05, | |
"loss": 0.0882, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.1950354609929078, | |
"grad_norm": 0.6594899892807007, | |
"learning_rate": 4.804964539007092e-05, | |
"loss": 0.0567, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.2127659574468085, | |
"grad_norm": 0.3054638206958771, | |
"learning_rate": 4.787234042553192e-05, | |
"loss": 0.043, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.23049645390070922, | |
"grad_norm": 1.3071125745773315, | |
"learning_rate": 4.769503546099291e-05, | |
"loss": 0.0675, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.24822695035460993, | |
"grad_norm": 1.4442698955535889, | |
"learning_rate": 4.751773049645391e-05, | |
"loss": 0.0818, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.26595744680851063, | |
"grad_norm": 1.443562388420105, | |
"learning_rate": 4.734042553191489e-05, | |
"loss": 0.09, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.28368794326241137, | |
"grad_norm": 1.0713045597076416, | |
"learning_rate": 4.716312056737589e-05, | |
"loss": 0.0766, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.30141843971631205, | |
"grad_norm": 0.24775153398513794, | |
"learning_rate": 4.698581560283688e-05, | |
"loss": 0.067, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.3191489361702128, | |
"grad_norm": 1.436522126197815, | |
"learning_rate": 4.680851063829788e-05, | |
"loss": 0.0792, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.33687943262411346, | |
"grad_norm": 0.49839749932289124, | |
"learning_rate": 4.663120567375887e-05, | |
"loss": 0.0595, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.3546099290780142, | |
"grad_norm": 0.6618617177009583, | |
"learning_rate": 4.645390070921986e-05, | |
"loss": 0.036, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.3723404255319149, | |
"grad_norm": 0.8074954748153687, | |
"learning_rate": 4.627659574468085e-05, | |
"loss": 0.0589, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.3900709219858156, | |
"grad_norm": 1.512774109840393, | |
"learning_rate": 4.609929078014185e-05, | |
"loss": 0.0477, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.4078014184397163, | |
"grad_norm": 1.3695027828216553, | |
"learning_rate": 4.592198581560284e-05, | |
"loss": 0.1018, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.425531914893617, | |
"grad_norm": 0.7358906865119934, | |
"learning_rate": 4.574468085106383e-05, | |
"loss": 0.0689, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.4432624113475177, | |
"grad_norm": 0.2363295555114746, | |
"learning_rate": 4.556737588652483e-05, | |
"loss": 0.0644, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.46099290780141844, | |
"grad_norm": 2.4520492553710938, | |
"learning_rate": 4.539007092198582e-05, | |
"loss": 0.0489, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.4787234042553192, | |
"grad_norm": 1.8659327030181885, | |
"learning_rate": 4.5212765957446815e-05, | |
"loss": 0.0544, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.49645390070921985, | |
"grad_norm": 0.5137922167778015, | |
"learning_rate": 4.50354609929078e-05, | |
"loss": 0.0733, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.5141843971631206, | |
"grad_norm": 0.2559826076030731, | |
"learning_rate": 4.48581560283688e-05, | |
"loss": 0.0349, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.5319148936170213, | |
"grad_norm": 1.2222139835357666, | |
"learning_rate": 4.468085106382979e-05, | |
"loss": 0.061, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.549645390070922, | |
"grad_norm": 0.19817987084388733, | |
"learning_rate": 4.4503546099290785e-05, | |
"loss": 0.0548, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.5673758865248227, | |
"grad_norm": 0.6621349453926086, | |
"learning_rate": 4.432624113475177e-05, | |
"loss": 0.0409, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.5851063829787234, | |
"grad_norm": 0.8852491974830627, | |
"learning_rate": 4.414893617021277e-05, | |
"loss": 0.027, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.6028368794326241, | |
"grad_norm": 0.1130673736333847, | |
"learning_rate": 4.3971631205673764e-05, | |
"loss": 0.0271, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.6205673758865248, | |
"grad_norm": 2.4764528274536133, | |
"learning_rate": 4.3794326241134755e-05, | |
"loss": 0.0578, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.6382978723404256, | |
"grad_norm": 1.4849659204483032, | |
"learning_rate": 4.3617021276595746e-05, | |
"loss": 0.0425, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.6560283687943262, | |
"grad_norm": 0.5148060321807861, | |
"learning_rate": 4.343971631205674e-05, | |
"loss": 0.036, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.6737588652482269, | |
"grad_norm": 1.4764983654022217, | |
"learning_rate": 4.3262411347517734e-05, | |
"loss": 0.0815, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.6914893617021277, | |
"grad_norm": 0.46336182951927185, | |
"learning_rate": 4.3085106382978725e-05, | |
"loss": 0.0225, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.7092198581560284, | |
"grad_norm": 0.6195293664932251, | |
"learning_rate": 4.2907801418439716e-05, | |
"loss": 0.0228, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.7269503546099291, | |
"grad_norm": 0.27533644437789917, | |
"learning_rate": 4.273049645390071e-05, | |
"loss": 0.0709, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.7446808510638298, | |
"grad_norm": 0.6618344187736511, | |
"learning_rate": 4.2553191489361704e-05, | |
"loss": 0.0708, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.7624113475177305, | |
"grad_norm": 0.9032989740371704, | |
"learning_rate": 4.2375886524822695e-05, | |
"loss": 0.0396, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.7801418439716312, | |
"grad_norm": 2.292132616043091, | |
"learning_rate": 4.219858156028369e-05, | |
"loss": 0.0927, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.7978723404255319, | |
"grad_norm": 2.1669278144836426, | |
"learning_rate": 4.2021276595744684e-05, | |
"loss": 0.1068, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.8156028368794326, | |
"grad_norm": 1.218011498451233, | |
"learning_rate": 4.1843971631205674e-05, | |
"loss": 0.0444, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.8333333333333334, | |
"grad_norm": 1.6065305471420288, | |
"learning_rate": 4.166666666666667e-05, | |
"loss": 0.0443, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.851063829787234, | |
"grad_norm": 1.1235682964324951, | |
"learning_rate": 4.148936170212766e-05, | |
"loss": 0.0683, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.8687943262411347, | |
"grad_norm": 1.7434436082839966, | |
"learning_rate": 4.1312056737588654e-05, | |
"loss": 0.0857, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.8865248226950354, | |
"grad_norm": 0.5483965277671814, | |
"learning_rate": 4.1134751773049644e-05, | |
"loss": 0.0437, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.9042553191489362, | |
"grad_norm": 1.176393985748291, | |
"learning_rate": 4.095744680851064e-05, | |
"loss": 0.0771, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.9219858156028369, | |
"grad_norm": 1.270770788192749, | |
"learning_rate": 4.078014184397163e-05, | |
"loss": 0.0371, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.9397163120567376, | |
"grad_norm": 1.3304009437561035, | |
"learning_rate": 4.0602836879432624e-05, | |
"loss": 0.0815, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.9574468085106383, | |
"grad_norm": 0.5169529914855957, | |
"learning_rate": 4.0425531914893614e-05, | |
"loss": 0.0511, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.975177304964539, | |
"grad_norm": 0.4900430142879486, | |
"learning_rate": 4.024822695035461e-05, | |
"loss": 0.0304, | |
"step": 275 | |
}, | |
{ | |
"epoch": 0.9929078014184397, | |
"grad_norm": 1.8383599519729614, | |
"learning_rate": 4.007092198581561e-05, | |
"loss": 0.0449, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": 0.9957144968228167, | |
"eval_loss": 0.018303971737623215, | |
"eval_runtime": 99.5406, | |
"eval_samples_per_second": 67.982, | |
"eval_steps_per_second": 1.065, | |
"step": 282 | |
}, | |
{ | |
"epoch": 1.0106382978723405, | |
"grad_norm": 0.5246375203132629, | |
"learning_rate": 3.9893617021276594e-05, | |
"loss": 0.0246, | |
"step": 285 | |
}, | |
{ | |
"epoch": 1.0283687943262412, | |
"grad_norm": 1.0449841022491455, | |
"learning_rate": 3.971631205673759e-05, | |
"loss": 0.0212, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.0460992907801419, | |
"grad_norm": 0.26453474164009094, | |
"learning_rate": 3.953900709219858e-05, | |
"loss": 0.0358, | |
"step": 295 | |
}, | |
{ | |
"epoch": 1.0638297872340425, | |
"grad_norm": 0.6182503700256348, | |
"learning_rate": 3.936170212765958e-05, | |
"loss": 0.0387, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.0815602836879432, | |
"grad_norm": 0.46771693229675293, | |
"learning_rate": 3.918439716312057e-05, | |
"loss": 0.0285, | |
"step": 305 | |
}, | |
{ | |
"epoch": 1.099290780141844, | |
"grad_norm": 1.8450125455856323, | |
"learning_rate": 3.900709219858156e-05, | |
"loss": 0.0475, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.1170212765957448, | |
"grad_norm": 0.7594661712646484, | |
"learning_rate": 3.882978723404255e-05, | |
"loss": 0.0156, | |
"step": 315 | |
}, | |
{ | |
"epoch": 1.1347517730496455, | |
"grad_norm": 0.29846104979515076, | |
"learning_rate": 3.865248226950355e-05, | |
"loss": 0.0375, | |
"step": 320 | |
}, | |
{ | |
"epoch": 1.1524822695035462, | |
"grad_norm": 1.215070128440857, | |
"learning_rate": 3.847517730496454e-05, | |
"loss": 0.0626, | |
"step": 325 | |
}, | |
{ | |
"epoch": 1.1702127659574468, | |
"grad_norm": 0.27688533067703247, | |
"learning_rate": 3.829787234042553e-05, | |
"loss": 0.0327, | |
"step": 330 | |
}, | |
{ | |
"epoch": 1.1879432624113475, | |
"grad_norm": 1.9927847385406494, | |
"learning_rate": 3.812056737588653e-05, | |
"loss": 0.0557, | |
"step": 335 | |
}, | |
{ | |
"epoch": 1.2056737588652482, | |
"grad_norm": 4.12082576751709, | |
"learning_rate": 3.794326241134752e-05, | |
"loss": 0.0669, | |
"step": 340 | |
}, | |
{ | |
"epoch": 1.2234042553191489, | |
"grad_norm": 1.2266093492507935, | |
"learning_rate": 3.776595744680852e-05, | |
"loss": 0.0575, | |
"step": 345 | |
}, | |
{ | |
"epoch": 1.2411347517730495, | |
"grad_norm": 0.7239733934402466, | |
"learning_rate": 3.75886524822695e-05, | |
"loss": 0.0316, | |
"step": 350 | |
}, | |
{ | |
"epoch": 1.2588652482269502, | |
"grad_norm": 1.3613947629928589, | |
"learning_rate": 3.74113475177305e-05, | |
"loss": 0.0512, | |
"step": 355 | |
}, | |
{ | |
"epoch": 1.2765957446808511, | |
"grad_norm": 2.1938693523406982, | |
"learning_rate": 3.723404255319149e-05, | |
"loss": 0.0511, | |
"step": 360 | |
}, | |
{ | |
"epoch": 1.2943262411347518, | |
"grad_norm": 0.5384643077850342, | |
"learning_rate": 3.705673758865249e-05, | |
"loss": 0.0479, | |
"step": 365 | |
}, | |
{ | |
"epoch": 1.3120567375886525, | |
"grad_norm": 0.4367295801639557, | |
"learning_rate": 3.687943262411347e-05, | |
"loss": 0.0595, | |
"step": 370 | |
}, | |
{ | |
"epoch": 1.3297872340425532, | |
"grad_norm": 1.2368284463882446, | |
"learning_rate": 3.670212765957447e-05, | |
"loss": 0.0332, | |
"step": 375 | |
}, | |
{ | |
"epoch": 1.3475177304964538, | |
"grad_norm": 1.422599196434021, | |
"learning_rate": 3.6524822695035466e-05, | |
"loss": 0.0433, | |
"step": 380 | |
}, | |
{ | |
"epoch": 1.3652482269503547, | |
"grad_norm": 1.9355744123458862, | |
"learning_rate": 3.634751773049646e-05, | |
"loss": 0.0401, | |
"step": 385 | |
}, | |
{ | |
"epoch": 1.3829787234042552, | |
"grad_norm": 0.11844053119421005, | |
"learning_rate": 3.617021276595745e-05, | |
"loss": 0.0308, | |
"step": 390 | |
}, | |
{ | |
"epoch": 1.400709219858156, | |
"grad_norm": 1.5698013305664062, | |
"learning_rate": 3.599290780141844e-05, | |
"loss": 0.024, | |
"step": 395 | |
}, | |
{ | |
"epoch": 1.4184397163120568, | |
"grad_norm": 0.24424462020397186, | |
"learning_rate": 3.5815602836879437e-05, | |
"loss": 0.0284, | |
"step": 400 | |
}, | |
{ | |
"epoch": 1.4361702127659575, | |
"grad_norm": 0.8079735040664673, | |
"learning_rate": 3.563829787234043e-05, | |
"loss": 0.0289, | |
"step": 405 | |
}, | |
{ | |
"epoch": 1.4539007092198581, | |
"grad_norm": 0.5713645815849304, | |
"learning_rate": 3.546099290780142e-05, | |
"loss": 0.0489, | |
"step": 410 | |
}, | |
{ | |
"epoch": 1.4716312056737588, | |
"grad_norm": 1.418959140777588, | |
"learning_rate": 3.528368794326241e-05, | |
"loss": 0.0494, | |
"step": 415 | |
}, | |
{ | |
"epoch": 1.4893617021276595, | |
"grad_norm": 1.3852524757385254, | |
"learning_rate": 3.5106382978723407e-05, | |
"loss": 0.0416, | |
"step": 420 | |
}, | |
{ | |
"epoch": 1.5070921985815602, | |
"grad_norm": 1.9135431051254272, | |
"learning_rate": 3.49290780141844e-05, | |
"loss": 0.0244, | |
"step": 425 | |
}, | |
{ | |
"epoch": 1.524822695035461, | |
"grad_norm": 0.7146717309951782, | |
"learning_rate": 3.4751773049645395e-05, | |
"loss": 0.0512, | |
"step": 430 | |
}, | |
{ | |
"epoch": 1.5425531914893615, | |
"grad_norm": 0.1944599747657776, | |
"learning_rate": 3.4574468085106386e-05, | |
"loss": 0.0191, | |
"step": 435 | |
}, | |
{ | |
"epoch": 1.5602836879432624, | |
"grad_norm": 0.32298266887664795, | |
"learning_rate": 3.4397163120567377e-05, | |
"loss": 0.0221, | |
"step": 440 | |
}, | |
{ | |
"epoch": 1.5780141843971631, | |
"grad_norm": 0.7677968740463257, | |
"learning_rate": 3.4219858156028374e-05, | |
"loss": 0.023, | |
"step": 445 | |
}, | |
{ | |
"epoch": 1.5957446808510638, | |
"grad_norm": 0.19664119184017181, | |
"learning_rate": 3.4042553191489365e-05, | |
"loss": 0.0402, | |
"step": 450 | |
}, | |
{ | |
"epoch": 1.6134751773049647, | |
"grad_norm": 0.8602023124694824, | |
"learning_rate": 3.3865248226950356e-05, | |
"loss": 0.0343, | |
"step": 455 | |
}, | |
{ | |
"epoch": 1.6312056737588652, | |
"grad_norm": 2.0019989013671875, | |
"learning_rate": 3.3687943262411347e-05, | |
"loss": 0.0521, | |
"step": 460 | |
}, | |
{ | |
"epoch": 1.648936170212766, | |
"grad_norm": 0.42928043007850647, | |
"learning_rate": 3.3510638297872344e-05, | |
"loss": 0.0434, | |
"step": 465 | |
}, | |
{ | |
"epoch": 1.6666666666666665, | |
"grad_norm": 0.31418949365615845, | |
"learning_rate": 3.3333333333333335e-05, | |
"loss": 0.0125, | |
"step": 470 | |
}, | |
{ | |
"epoch": 1.6843971631205674, | |
"grad_norm": 0.6069945096969604, | |
"learning_rate": 3.3156028368794326e-05, | |
"loss": 0.055, | |
"step": 475 | |
}, | |
{ | |
"epoch": 1.702127659574468, | |
"grad_norm": 2.3644161224365234, | |
"learning_rate": 3.2978723404255317e-05, | |
"loss": 0.0652, | |
"step": 480 | |
}, | |
{ | |
"epoch": 1.7198581560283688, | |
"grad_norm": 0.054500628262758255, | |
"learning_rate": 3.2801418439716314e-05, | |
"loss": 0.0398, | |
"step": 485 | |
}, | |
{ | |
"epoch": 1.7375886524822695, | |
"grad_norm": 2.016355037689209, | |
"learning_rate": 3.262411347517731e-05, | |
"loss": 0.0788, | |
"step": 490 | |
}, | |
{ | |
"epoch": 1.7553191489361701, | |
"grad_norm": 0.6187741160392761, | |
"learning_rate": 3.2446808510638296e-05, | |
"loss": 0.0579, | |
"step": 495 | |
}, | |
{ | |
"epoch": 1.773049645390071, | |
"grad_norm": 1.820343255996704, | |
"learning_rate": 3.226950354609929e-05, | |
"loss": 0.0632, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.7907801418439715, | |
"grad_norm": 0.4283825755119324, | |
"learning_rate": 3.2092198581560284e-05, | |
"loss": 0.0449, | |
"step": 505 | |
}, | |
{ | |
"epoch": 1.8085106382978724, | |
"grad_norm": 1.2107046842575073, | |
"learning_rate": 3.191489361702128e-05, | |
"loss": 0.0293, | |
"step": 510 | |
}, | |
{ | |
"epoch": 1.826241134751773, | |
"grad_norm": 0.751215398311615, | |
"learning_rate": 3.173758865248227e-05, | |
"loss": 0.0436, | |
"step": 515 | |
}, | |
{ | |
"epoch": 1.8439716312056738, | |
"grad_norm": 0.8796170353889465, | |
"learning_rate": 3.156028368794326e-05, | |
"loss": 0.052, | |
"step": 520 | |
}, | |
{ | |
"epoch": 1.8617021276595744, | |
"grad_norm": 0.14203386008739471, | |
"learning_rate": 3.1382978723404254e-05, | |
"loss": 0.0103, | |
"step": 525 | |
}, | |
{ | |
"epoch": 1.8794326241134751, | |
"grad_norm": 1.044580340385437, | |
"learning_rate": 3.120567375886525e-05, | |
"loss": 0.0269, | |
"step": 530 | |
}, | |
{ | |
"epoch": 1.897163120567376, | |
"grad_norm": 2.8254494667053223, | |
"learning_rate": 3.102836879432624e-05, | |
"loss": 0.0662, | |
"step": 535 | |
}, | |
{ | |
"epoch": 1.9148936170212765, | |
"grad_norm": 0.6130526065826416, | |
"learning_rate": 3.085106382978723e-05, | |
"loss": 0.0277, | |
"step": 540 | |
}, | |
{ | |
"epoch": 1.9326241134751774, | |
"grad_norm": 0.12123987823724747, | |
"learning_rate": 3.067375886524823e-05, | |
"loss": 0.035, | |
"step": 545 | |
}, | |
{ | |
"epoch": 1.950354609929078, | |
"grad_norm": 0.5822201371192932, | |
"learning_rate": 3.0496453900709222e-05, | |
"loss": 0.0294, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.9680851063829787, | |
"grad_norm": 0.28792017698287964, | |
"learning_rate": 3.0319148936170216e-05, | |
"loss": 0.0543, | |
"step": 555 | |
}, | |
{ | |
"epoch": 1.9858156028368794, | |
"grad_norm": 1.7587871551513672, | |
"learning_rate": 3.0141843971631207e-05, | |
"loss": 0.04, | |
"step": 560 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": 0.9980789123688488, | |
"eval_loss": 0.010071107186377048, | |
"eval_runtime": 99.7696, | |
"eval_samples_per_second": 67.826, | |
"eval_steps_per_second": 1.062, | |
"step": 564 | |
}, | |
{ | |
"epoch": 2.00354609929078, | |
"grad_norm": 0.3479292690753937, | |
"learning_rate": 2.99645390070922e-05, | |
"loss": 0.0726, | |
"step": 565 | |
}, | |
{ | |
"epoch": 2.021276595744681, | |
"grad_norm": 0.10121797770261765, | |
"learning_rate": 2.9787234042553192e-05, | |
"loss": 0.0185, | |
"step": 570 | |
}, | |
{ | |
"epoch": 2.0390070921985815, | |
"grad_norm": 0.9174487590789795, | |
"learning_rate": 2.9609929078014186e-05, | |
"loss": 0.0637, | |
"step": 575 | |
}, | |
{ | |
"epoch": 2.0567375886524824, | |
"grad_norm": 0.4720062017440796, | |
"learning_rate": 2.9432624113475177e-05, | |
"loss": 0.0246, | |
"step": 580 | |
}, | |
{ | |
"epoch": 2.074468085106383, | |
"grad_norm": 0.3442896008491516, | |
"learning_rate": 2.925531914893617e-05, | |
"loss": 0.0159, | |
"step": 585 | |
}, | |
{ | |
"epoch": 2.0921985815602837, | |
"grad_norm": 1.1636303663253784, | |
"learning_rate": 2.9078014184397162e-05, | |
"loss": 0.018, | |
"step": 590 | |
}, | |
{ | |
"epoch": 2.1099290780141846, | |
"grad_norm": 0.7616550326347351, | |
"learning_rate": 2.8900709219858156e-05, | |
"loss": 0.0392, | |
"step": 595 | |
}, | |
{ | |
"epoch": 2.127659574468085, | |
"grad_norm": 0.17084839940071106, | |
"learning_rate": 2.8723404255319154e-05, | |
"loss": 0.0172, | |
"step": 600 | |
}, | |
{ | |
"epoch": 2.145390070921986, | |
"grad_norm": 0.4418025612831116, | |
"learning_rate": 2.8546099290780144e-05, | |
"loss": 0.0287, | |
"step": 605 | |
}, | |
{ | |
"epoch": 2.1631205673758864, | |
"grad_norm": 0.05007610097527504, | |
"learning_rate": 2.836879432624114e-05, | |
"loss": 0.0311, | |
"step": 610 | |
}, | |
{ | |
"epoch": 2.1808510638297873, | |
"grad_norm": 2.390089273452759, | |
"learning_rate": 2.819148936170213e-05, | |
"loss": 0.0393, | |
"step": 615 | |
}, | |
{ | |
"epoch": 2.198581560283688, | |
"grad_norm": 1.8016951084136963, | |
"learning_rate": 2.8014184397163124e-05, | |
"loss": 0.0518, | |
"step": 620 | |
}, | |
{ | |
"epoch": 2.2163120567375887, | |
"grad_norm": 0.39676979184150696, | |
"learning_rate": 2.7836879432624114e-05, | |
"loss": 0.0319, | |
"step": 625 | |
}, | |
{ | |
"epoch": 2.2340425531914896, | |
"grad_norm": 0.2608576714992523, | |
"learning_rate": 2.765957446808511e-05, | |
"loss": 0.0148, | |
"step": 630 | |
}, | |
{ | |
"epoch": 2.25177304964539, | |
"grad_norm": 1.5164140462875366, | |
"learning_rate": 2.74822695035461e-05, | |
"loss": 0.04, | |
"step": 635 | |
}, | |
{ | |
"epoch": 2.269503546099291, | |
"grad_norm": 0.3103680908679962, | |
"learning_rate": 2.7304964539007094e-05, | |
"loss": 0.0234, | |
"step": 640 | |
}, | |
{ | |
"epoch": 2.2872340425531914, | |
"grad_norm": 0.4016372561454773, | |
"learning_rate": 2.7127659574468084e-05, | |
"loss": 0.0171, | |
"step": 645 | |
}, | |
{ | |
"epoch": 2.3049645390070923, | |
"grad_norm": 0.5997406840324402, | |
"learning_rate": 2.695035460992908e-05, | |
"loss": 0.0387, | |
"step": 650 | |
}, | |
{ | |
"epoch": 2.3226950354609928, | |
"grad_norm": 0.8444051742553711, | |
"learning_rate": 2.6773049645390076e-05, | |
"loss": 0.0539, | |
"step": 655 | |
}, | |
{ | |
"epoch": 2.3404255319148937, | |
"grad_norm": 0.108007051050663, | |
"learning_rate": 2.6595744680851064e-05, | |
"loss": 0.0315, | |
"step": 660 | |
}, | |
{ | |
"epoch": 2.3581560283687946, | |
"grad_norm": 0.21093517541885376, | |
"learning_rate": 2.641843971631206e-05, | |
"loss": 0.0175, | |
"step": 665 | |
}, | |
{ | |
"epoch": 2.375886524822695, | |
"grad_norm": 0.4845324754714966, | |
"learning_rate": 2.624113475177305e-05, | |
"loss": 0.0185, | |
"step": 670 | |
}, | |
{ | |
"epoch": 2.393617021276596, | |
"grad_norm": 2.6781203746795654, | |
"learning_rate": 2.6063829787234046e-05, | |
"loss": 0.0562, | |
"step": 675 | |
}, | |
{ | |
"epoch": 2.4113475177304964, | |
"grad_norm": 0.301878958940506, | |
"learning_rate": 2.5886524822695034e-05, | |
"loss": 0.0298, | |
"step": 680 | |
}, | |
{ | |
"epoch": 2.4290780141843973, | |
"grad_norm": 0.9099262952804565, | |
"learning_rate": 2.570921985815603e-05, | |
"loss": 0.0421, | |
"step": 685 | |
}, | |
{ | |
"epoch": 2.4468085106382977, | |
"grad_norm": 0.11847200244665146, | |
"learning_rate": 2.5531914893617022e-05, | |
"loss": 0.0174, | |
"step": 690 | |
}, | |
{ | |
"epoch": 2.4645390070921986, | |
"grad_norm": 0.12315840274095535, | |
"learning_rate": 2.5354609929078016e-05, | |
"loss": 0.0129, | |
"step": 695 | |
}, | |
{ | |
"epoch": 2.482269503546099, | |
"grad_norm": 0.955663800239563, | |
"learning_rate": 2.5177304964539007e-05, | |
"loss": 0.0239, | |
"step": 700 | |
}, | |
{ | |
"epoch": 2.5, | |
"grad_norm": 0.13527634739875793, | |
"learning_rate": 2.5e-05, | |
"loss": 0.0217, | |
"step": 705 | |
}, | |
{ | |
"epoch": 2.5177304964539005, | |
"grad_norm": 0.24646982550621033, | |
"learning_rate": 2.4822695035460995e-05, | |
"loss": 0.0388, | |
"step": 710 | |
}, | |
{ | |
"epoch": 2.5354609929078014, | |
"grad_norm": 0.09928718209266663, | |
"learning_rate": 2.4645390070921986e-05, | |
"loss": 0.0117, | |
"step": 715 | |
}, | |
{ | |
"epoch": 2.5531914893617023, | |
"grad_norm": 2.1757736206054688, | |
"learning_rate": 2.446808510638298e-05, | |
"loss": 0.0205, | |
"step": 720 | |
}, | |
{ | |
"epoch": 2.5709219858156027, | |
"grad_norm": 0.4307613968849182, | |
"learning_rate": 2.429078014184397e-05, | |
"loss": 0.0495, | |
"step": 725 | |
}, | |
{ | |
"epoch": 2.5886524822695036, | |
"grad_norm": 0.07029764354228973, | |
"learning_rate": 2.4113475177304965e-05, | |
"loss": 0.0568, | |
"step": 730 | |
}, | |
{ | |
"epoch": 2.6063829787234045, | |
"grad_norm": 1.3018437623977661, | |
"learning_rate": 2.393617021276596e-05, | |
"loss": 0.0389, | |
"step": 735 | |
}, | |
{ | |
"epoch": 2.624113475177305, | |
"grad_norm": 0.2572399973869324, | |
"learning_rate": 2.3758865248226954e-05, | |
"loss": 0.0392, | |
"step": 740 | |
}, | |
{ | |
"epoch": 2.6418439716312054, | |
"grad_norm": 1.5049391984939575, | |
"learning_rate": 2.3581560283687945e-05, | |
"loss": 0.0273, | |
"step": 745 | |
}, | |
{ | |
"epoch": 2.6595744680851063, | |
"grad_norm": 1.8658090829849243, | |
"learning_rate": 2.340425531914894e-05, | |
"loss": 0.0503, | |
"step": 750 | |
}, | |
{ | |
"epoch": 2.6773049645390072, | |
"grad_norm": 1.4984087944030762, | |
"learning_rate": 2.322695035460993e-05, | |
"loss": 0.0256, | |
"step": 755 | |
}, | |
{ | |
"epoch": 2.6950354609929077, | |
"grad_norm": 1.7155365943908691, | |
"learning_rate": 2.3049645390070924e-05, | |
"loss": 0.0371, | |
"step": 760 | |
}, | |
{ | |
"epoch": 2.7127659574468086, | |
"grad_norm": 2.706315517425537, | |
"learning_rate": 2.2872340425531915e-05, | |
"loss": 0.0289, | |
"step": 765 | |
}, | |
{ | |
"epoch": 2.7304964539007095, | |
"grad_norm": 0.5071002244949341, | |
"learning_rate": 2.269503546099291e-05, | |
"loss": 0.0144, | |
"step": 770 | |
}, | |
{ | |
"epoch": 2.74822695035461, | |
"grad_norm": 2.5877788066864014, | |
"learning_rate": 2.25177304964539e-05, | |
"loss": 0.0462, | |
"step": 775 | |
}, | |
{ | |
"epoch": 2.7659574468085104, | |
"grad_norm": 0.13342903554439545, | |
"learning_rate": 2.2340425531914894e-05, | |
"loss": 0.0311, | |
"step": 780 | |
}, | |
{ | |
"epoch": 2.7836879432624113, | |
"grad_norm": 0.5681129693984985, | |
"learning_rate": 2.2163120567375885e-05, | |
"loss": 0.0496, | |
"step": 785 | |
}, | |
{ | |
"epoch": 2.801418439716312, | |
"grad_norm": 1.3924553394317627, | |
"learning_rate": 2.1985815602836882e-05, | |
"loss": 0.0302, | |
"step": 790 | |
}, | |
{ | |
"epoch": 2.8191489361702127, | |
"grad_norm": 0.9623014330863953, | |
"learning_rate": 2.1808510638297873e-05, | |
"loss": 0.0166, | |
"step": 795 | |
}, | |
{ | |
"epoch": 2.8368794326241136, | |
"grad_norm": 1.2445993423461914, | |
"learning_rate": 2.1631205673758867e-05, | |
"loss": 0.0237, | |
"step": 800 | |
}, | |
{ | |
"epoch": 2.854609929078014, | |
"grad_norm": 0.8575385212898254, | |
"learning_rate": 2.1453900709219858e-05, | |
"loss": 0.0753, | |
"step": 805 | |
}, | |
{ | |
"epoch": 2.872340425531915, | |
"grad_norm": 1.026553988456726, | |
"learning_rate": 2.1276595744680852e-05, | |
"loss": 0.0266, | |
"step": 810 | |
}, | |
{ | |
"epoch": 2.8900709219858154, | |
"grad_norm": 1.974367380142212, | |
"learning_rate": 2.1099290780141846e-05, | |
"loss": 0.0302, | |
"step": 815 | |
}, | |
{ | |
"epoch": 2.9078014184397163, | |
"grad_norm": 1.4900615215301514, | |
"learning_rate": 2.0921985815602837e-05, | |
"loss": 0.0499, | |
"step": 820 | |
}, | |
{ | |
"epoch": 2.925531914893617, | |
"grad_norm": 2.0212316513061523, | |
"learning_rate": 2.074468085106383e-05, | |
"loss": 0.0411, | |
"step": 825 | |
}, | |
{ | |
"epoch": 2.9432624113475176, | |
"grad_norm": 1.2716954946517944, | |
"learning_rate": 2.0567375886524822e-05, | |
"loss": 0.0538, | |
"step": 830 | |
}, | |
{ | |
"epoch": 2.9609929078014185, | |
"grad_norm": 1.8260325193405151, | |
"learning_rate": 2.0390070921985816e-05, | |
"loss": 0.0405, | |
"step": 835 | |
}, | |
{ | |
"epoch": 2.978723404255319, | |
"grad_norm": 0.23081009089946747, | |
"learning_rate": 2.0212765957446807e-05, | |
"loss": 0.0255, | |
"step": 840 | |
}, | |
{ | |
"epoch": 2.99645390070922, | |
"grad_norm": 1.7825428247451782, | |
"learning_rate": 2.0035460992907805e-05, | |
"loss": 0.0303, | |
"step": 845 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": 0.9985222402837298, | |
"eval_loss": 0.008113781921565533, | |
"eval_runtime": 100.8534, | |
"eval_samples_per_second": 67.097, | |
"eval_steps_per_second": 1.051, | |
"step": 846 | |
}, | |
{ | |
"epoch": 3.0141843971631204, | |
"grad_norm": 1.1453741788864136, | |
"learning_rate": 1.9858156028368796e-05, | |
"loss": 0.0207, | |
"step": 850 | |
}, | |
{ | |
"epoch": 3.0319148936170213, | |
"grad_norm": 0.37254324555397034, | |
"learning_rate": 1.968085106382979e-05, | |
"loss": 0.0222, | |
"step": 855 | |
}, | |
{ | |
"epoch": 3.049645390070922, | |
"grad_norm": 2.6893270015716553, | |
"learning_rate": 1.950354609929078e-05, | |
"loss": 0.0606, | |
"step": 860 | |
}, | |
{ | |
"epoch": 3.0673758865248226, | |
"grad_norm": 2.4312515258789062, | |
"learning_rate": 1.9326241134751775e-05, | |
"loss": 0.0251, | |
"step": 865 | |
}, | |
{ | |
"epoch": 3.0851063829787235, | |
"grad_norm": 1.2454279661178589, | |
"learning_rate": 1.9148936170212766e-05, | |
"loss": 0.0173, | |
"step": 870 | |
}, | |
{ | |
"epoch": 3.102836879432624, | |
"grad_norm": 0.6530167460441589, | |
"learning_rate": 1.897163120567376e-05, | |
"loss": 0.0156, | |
"step": 875 | |
}, | |
{ | |
"epoch": 3.120567375886525, | |
"grad_norm": 0.12946569919586182, | |
"learning_rate": 1.879432624113475e-05, | |
"loss": 0.0225, | |
"step": 880 | |
}, | |
{ | |
"epoch": 3.1382978723404253, | |
"grad_norm": 0.357861191034317, | |
"learning_rate": 1.8617021276595745e-05, | |
"loss": 0.0121, | |
"step": 885 | |
}, | |
{ | |
"epoch": 3.1560283687943262, | |
"grad_norm": 0.1260479986667633, | |
"learning_rate": 1.8439716312056736e-05, | |
"loss": 0.0496, | |
"step": 890 | |
}, | |
{ | |
"epoch": 3.173758865248227, | |
"grad_norm": 0.7033432722091675, | |
"learning_rate": 1.8262411347517733e-05, | |
"loss": 0.0313, | |
"step": 895 | |
}, | |
{ | |
"epoch": 3.1914893617021276, | |
"grad_norm": 1.0441181659698486, | |
"learning_rate": 1.8085106382978724e-05, | |
"loss": 0.0143, | |
"step": 900 | |
}, | |
{ | |
"epoch": 3.2092198581560285, | |
"grad_norm": 0.032710377126932144, | |
"learning_rate": 1.7907801418439718e-05, | |
"loss": 0.0091, | |
"step": 905 | |
}, | |
{ | |
"epoch": 3.226950354609929, | |
"grad_norm": 1.0562541484832764, | |
"learning_rate": 1.773049645390071e-05, | |
"loss": 0.0642, | |
"step": 910 | |
}, | |
{ | |
"epoch": 3.24468085106383, | |
"grad_norm": 0.16099953651428223, | |
"learning_rate": 1.7553191489361703e-05, | |
"loss": 0.0195, | |
"step": 915 | |
}, | |
{ | |
"epoch": 3.2624113475177303, | |
"grad_norm": 0.868291437625885, | |
"learning_rate": 1.7375886524822697e-05, | |
"loss": 0.0272, | |
"step": 920 | |
}, | |
{ | |
"epoch": 3.280141843971631, | |
"grad_norm": 0.9113226532936096, | |
"learning_rate": 1.7198581560283688e-05, | |
"loss": 0.0317, | |
"step": 925 | |
}, | |
{ | |
"epoch": 3.297872340425532, | |
"grad_norm": 1.1665312051773071, | |
"learning_rate": 1.7021276595744682e-05, | |
"loss": 0.0301, | |
"step": 930 | |
}, | |
{ | |
"epoch": 3.3156028368794326, | |
"grad_norm": 1.0826573371887207, | |
"learning_rate": 1.6843971631205673e-05, | |
"loss": 0.0129, | |
"step": 935 | |
}, | |
{ | |
"epoch": 3.3333333333333335, | |
"grad_norm": 1.054902195930481, | |
"learning_rate": 1.6666666666666667e-05, | |
"loss": 0.0346, | |
"step": 940 | |
}, | |
{ | |
"epoch": 3.351063829787234, | |
"grad_norm": 2.4785568714141846, | |
"learning_rate": 1.6489361702127658e-05, | |
"loss": 0.0419, | |
"step": 945 | |
}, | |
{ | |
"epoch": 3.368794326241135, | |
"grad_norm": 0.26840367913246155, | |
"learning_rate": 1.6312056737588656e-05, | |
"loss": 0.0184, | |
"step": 950 | |
}, | |
{ | |
"epoch": 3.3865248226950353, | |
"grad_norm": 1.8092659711837769, | |
"learning_rate": 1.6134751773049647e-05, | |
"loss": 0.0374, | |
"step": 955 | |
}, | |
{ | |
"epoch": 3.404255319148936, | |
"grad_norm": 0.6401882767677307, | |
"learning_rate": 1.595744680851064e-05, | |
"loss": 0.0521, | |
"step": 960 | |
}, | |
{ | |
"epoch": 3.421985815602837, | |
"grad_norm": 0.3769932985305786, | |
"learning_rate": 1.578014184397163e-05, | |
"loss": 0.0176, | |
"step": 965 | |
}, | |
{ | |
"epoch": 3.4397163120567376, | |
"grad_norm": 1.343386173248291, | |
"learning_rate": 1.5602836879432626e-05, | |
"loss": 0.0187, | |
"step": 970 | |
}, | |
{ | |
"epoch": 3.4574468085106385, | |
"grad_norm": 0.7425516247749329, | |
"learning_rate": 1.5425531914893617e-05, | |
"loss": 0.0334, | |
"step": 975 | |
}, | |
{ | |
"epoch": 3.475177304964539, | |
"grad_norm": 0.5523159503936768, | |
"learning_rate": 1.5248226950354611e-05, | |
"loss": 0.0233, | |
"step": 980 | |
}, | |
{ | |
"epoch": 3.49290780141844, | |
"grad_norm": 0.3636256754398346, | |
"learning_rate": 1.5070921985815603e-05, | |
"loss": 0.0269, | |
"step": 985 | |
}, | |
{ | |
"epoch": 3.5106382978723403, | |
"grad_norm": 0.7461684346199036, | |
"learning_rate": 1.4893617021276596e-05, | |
"loss": 0.0313, | |
"step": 990 | |
}, | |
{ | |
"epoch": 3.528368794326241, | |
"grad_norm": 0.6355846524238586, | |
"learning_rate": 1.4716312056737588e-05, | |
"loss": 0.0216, | |
"step": 995 | |
}, | |
{ | |
"epoch": 3.546099290780142, | |
"grad_norm": 0.046597789973020554, | |
"learning_rate": 1.4539007092198581e-05, | |
"loss": 0.0247, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 3.5638297872340425, | |
"grad_norm": 0.7104584574699402, | |
"learning_rate": 1.4361702127659577e-05, | |
"loss": 0.0375, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 3.581560283687943, | |
"grad_norm": 0.7681288719177246, | |
"learning_rate": 1.418439716312057e-05, | |
"loss": 0.013, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 3.599290780141844, | |
"grad_norm": 2.825113534927368, | |
"learning_rate": 1.4007092198581562e-05, | |
"loss": 0.0138, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 3.617021276595745, | |
"grad_norm": 0.15184448659420013, | |
"learning_rate": 1.3829787234042554e-05, | |
"loss": 0.0278, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 3.6347517730496453, | |
"grad_norm": 1.885351300239563, | |
"learning_rate": 1.3652482269503547e-05, | |
"loss": 0.047, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 3.652482269503546, | |
"grad_norm": 1.271122932434082, | |
"learning_rate": 1.347517730496454e-05, | |
"loss": 0.0309, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 3.670212765957447, | |
"grad_norm": 0.02030951902270317, | |
"learning_rate": 1.3297872340425532e-05, | |
"loss": 0.0072, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 3.6879432624113475, | |
"grad_norm": 0.046280715614557266, | |
"learning_rate": 1.3120567375886524e-05, | |
"loss": 0.0219, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 3.705673758865248, | |
"grad_norm": 2.204589605331421, | |
"learning_rate": 1.2943262411347517e-05, | |
"loss": 0.0402, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 3.723404255319149, | |
"grad_norm": 0.6903723478317261, | |
"learning_rate": 1.2765957446808511e-05, | |
"loss": 0.0319, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 3.7411347517730498, | |
"grad_norm": 0.8682162165641785, | |
"learning_rate": 1.2588652482269504e-05, | |
"loss": 0.0296, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 3.7588652482269502, | |
"grad_norm": 0.9223999381065369, | |
"learning_rate": 1.2411347517730498e-05, | |
"loss": 0.0365, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 3.776595744680851, | |
"grad_norm": 0.5410795211791992, | |
"learning_rate": 1.223404255319149e-05, | |
"loss": 0.0121, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 3.794326241134752, | |
"grad_norm": 0.17826128005981445, | |
"learning_rate": 1.2056737588652483e-05, | |
"loss": 0.0231, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 3.8120567375886525, | |
"grad_norm": 0.47573089599609375, | |
"learning_rate": 1.1879432624113477e-05, | |
"loss": 0.0266, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 3.829787234042553, | |
"grad_norm": 0.8923744559288025, | |
"learning_rate": 1.170212765957447e-05, | |
"loss": 0.0345, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 3.847517730496454, | |
"grad_norm": 1.696198582649231, | |
"learning_rate": 1.1524822695035462e-05, | |
"loss": 0.0451, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 3.8652482269503547, | |
"grad_norm": 0.4135494828224182, | |
"learning_rate": 1.1347517730496454e-05, | |
"loss": 0.0248, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 3.882978723404255, | |
"grad_norm": 0.4472277760505676, | |
"learning_rate": 1.1170212765957447e-05, | |
"loss": 0.0125, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 3.900709219858156, | |
"grad_norm": 3.610241174697876, | |
"learning_rate": 1.0992907801418441e-05, | |
"loss": 0.0263, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 3.918439716312057, | |
"grad_norm": 0.06732177734375, | |
"learning_rate": 1.0815602836879434e-05, | |
"loss": 0.0153, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 3.9361702127659575, | |
"grad_norm": 0.8051948547363281, | |
"learning_rate": 1.0638297872340426e-05, | |
"loss": 0.0217, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 3.953900709219858, | |
"grad_norm": 0.24708445370197296, | |
"learning_rate": 1.0460992907801419e-05, | |
"loss": 0.0432, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 3.971631205673759, | |
"grad_norm": 0.0366179421544075, | |
"learning_rate": 1.0283687943262411e-05, | |
"loss": 0.0065, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 3.9893617021276597, | |
"grad_norm": 1.3547054529190063, | |
"learning_rate": 1.0106382978723404e-05, | |
"loss": 0.0489, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": 0.9988177922269839, | |
"eval_loss": 0.0067949132062494755, | |
"eval_runtime": 99.24, | |
"eval_samples_per_second": 68.188, | |
"eval_steps_per_second": 1.068, | |
"step": 1128 | |
}, | |
{ | |
"epoch": 4.00709219858156, | |
"grad_norm": 0.2567829191684723, | |
"learning_rate": 9.929078014184398e-06, | |
"loss": 0.0132, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 4.024822695035461, | |
"grad_norm": 0.8916745781898499, | |
"learning_rate": 9.75177304964539e-06, | |
"loss": 0.0217, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 4.042553191489362, | |
"grad_norm": 0.13239595293998718, | |
"learning_rate": 9.574468085106383e-06, | |
"loss": 0.0171, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 4.060283687943262, | |
"grad_norm": 0.2502301335334778, | |
"learning_rate": 9.397163120567375e-06, | |
"loss": 0.006, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 4.078014184397163, | |
"grad_norm": 0.3878186047077179, | |
"learning_rate": 9.219858156028368e-06, | |
"loss": 0.0184, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 4.095744680851064, | |
"grad_norm": 1.005374789237976, | |
"learning_rate": 9.042553191489362e-06, | |
"loss": 0.0633, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 4.113475177304965, | |
"grad_norm": 1.9449095726013184, | |
"learning_rate": 8.865248226950355e-06, | |
"loss": 0.027, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 4.131205673758865, | |
"grad_norm": 0.04838603362441063, | |
"learning_rate": 8.687943262411349e-06, | |
"loss": 0.0077, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 4.148936170212766, | |
"grad_norm": 0.07912664860486984, | |
"learning_rate": 8.510638297872341e-06, | |
"loss": 0.0075, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 4.166666666666667, | |
"grad_norm": 0.24843692779541016, | |
"learning_rate": 8.333333333333334e-06, | |
"loss": 0.0134, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 4.184397163120567, | |
"grad_norm": 0.7264091968536377, | |
"learning_rate": 8.156028368794328e-06, | |
"loss": 0.0367, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 4.202127659574468, | |
"grad_norm": 1.442780613899231, | |
"learning_rate": 7.97872340425532e-06, | |
"loss": 0.0143, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 4.219858156028369, | |
"grad_norm": 0.4630628228187561, | |
"learning_rate": 7.801418439716313e-06, | |
"loss": 0.0506, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 4.23758865248227, | |
"grad_norm": 1.1415939331054688, | |
"learning_rate": 7.6241134751773054e-06, | |
"loss": 0.0266, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 4.25531914893617, | |
"grad_norm": 0.8882013559341431, | |
"learning_rate": 7.446808510638298e-06, | |
"loss": 0.0159, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 4.273049645390071, | |
"grad_norm": 0.12462817132472992, | |
"learning_rate": 7.2695035460992904e-06, | |
"loss": 0.0209, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 4.290780141843972, | |
"grad_norm": 1.5256775617599487, | |
"learning_rate": 7.092198581560285e-06, | |
"loss": 0.0241, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 4.308510638297872, | |
"grad_norm": 3.1360552310943604, | |
"learning_rate": 6.914893617021277e-06, | |
"loss": 0.0298, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 4.326241134751773, | |
"grad_norm": 0.7372686266899109, | |
"learning_rate": 6.73758865248227e-06, | |
"loss": 0.0193, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 4.343971631205674, | |
"grad_norm": 1.058393955230713, | |
"learning_rate": 6.560283687943262e-06, | |
"loss": 0.0293, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 4.361702127659575, | |
"grad_norm": 0.09787444770336151, | |
"learning_rate": 6.3829787234042555e-06, | |
"loss": 0.0098, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 4.379432624113475, | |
"grad_norm": 0.4723568856716156, | |
"learning_rate": 6.205673758865249e-06, | |
"loss": 0.0248, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 4.397163120567376, | |
"grad_norm": 0.3988282382488251, | |
"learning_rate": 6.028368794326241e-06, | |
"loss": 0.0123, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 4.414893617021277, | |
"grad_norm": 0.080900177359581, | |
"learning_rate": 5.851063829787235e-06, | |
"loss": 0.0361, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 4.432624113475177, | |
"grad_norm": 0.28941839933395386, | |
"learning_rate": 5.673758865248227e-06, | |
"loss": 0.0173, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 4.450354609929078, | |
"grad_norm": 0.16654981672763824, | |
"learning_rate": 5.4964539007092206e-06, | |
"loss": 0.0374, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 4.468085106382979, | |
"grad_norm": 0.5223755240440369, | |
"learning_rate": 5.319148936170213e-06, | |
"loss": 0.0077, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 4.48581560283688, | |
"grad_norm": 0.7028219103813171, | |
"learning_rate": 5.1418439716312056e-06, | |
"loss": 0.0299, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 4.50354609929078, | |
"grad_norm": 0.3213287889957428, | |
"learning_rate": 4.964539007092199e-06, | |
"loss": 0.0569, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 4.5212765957446805, | |
"grad_norm": 0.26597318053245544, | |
"learning_rate": 4.787234042553191e-06, | |
"loss": 0.0122, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 4.539007092198582, | |
"grad_norm": 1.5103939771652222, | |
"learning_rate": 4.609929078014184e-06, | |
"loss": 0.021, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 4.556737588652482, | |
"grad_norm": 0.4471294581890106, | |
"learning_rate": 4.432624113475177e-06, | |
"loss": 0.0361, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 4.574468085106383, | |
"grad_norm": 0.21643757820129395, | |
"learning_rate": 4.255319148936171e-06, | |
"loss": 0.0294, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 4.592198581560284, | |
"grad_norm": 2.008852958679199, | |
"learning_rate": 4.078014184397164e-06, | |
"loss": 0.0301, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 4.609929078014185, | |
"grad_norm": 1.9454808235168457, | |
"learning_rate": 3.9007092198581565e-06, | |
"loss": 0.0468, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 4.627659574468085, | |
"grad_norm": 0.3638037443161011, | |
"learning_rate": 3.723404255319149e-06, | |
"loss": 0.009, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 4.6453900709219855, | |
"grad_norm": 0.5267730951309204, | |
"learning_rate": 3.5460992907801423e-06, | |
"loss": 0.0106, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 4.663120567375887, | |
"grad_norm": 0.6058443188667297, | |
"learning_rate": 3.368794326241135e-06, | |
"loss": 0.0408, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 4.680851063829787, | |
"grad_norm": 0.7102274894714355, | |
"learning_rate": 3.1914893617021277e-06, | |
"loss": 0.0177, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 4.698581560283688, | |
"grad_norm": 0.03302247077226639, | |
"learning_rate": 3.0141843971631207e-06, | |
"loss": 0.0263, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 4.716312056737589, | |
"grad_norm": 0.29536840319633484, | |
"learning_rate": 2.8368794326241136e-06, | |
"loss": 0.0176, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 4.73404255319149, | |
"grad_norm": 0.303792268037796, | |
"learning_rate": 2.6595744680851065e-06, | |
"loss": 0.0149, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 4.75177304964539, | |
"grad_norm": 1.4943726062774658, | |
"learning_rate": 2.4822695035460995e-06, | |
"loss": 0.0324, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 4.7695035460992905, | |
"grad_norm": 0.2641482949256897, | |
"learning_rate": 2.304964539007092e-06, | |
"loss": 0.0258, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 4.787234042553192, | |
"grad_norm": 1.3771953582763672, | |
"learning_rate": 2.1276595744680853e-06, | |
"loss": 0.0325, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 4.804964539007092, | |
"grad_norm": 2.062373638153076, | |
"learning_rate": 1.9503546099290782e-06, | |
"loss": 0.0152, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 4.822695035460993, | |
"grad_norm": 0.5422120094299316, | |
"learning_rate": 1.7730496453900712e-06, | |
"loss": 0.0387, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 4.840425531914894, | |
"grad_norm": 0.02120036445558071, | |
"learning_rate": 1.5957446808510639e-06, | |
"loss": 0.0122, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 4.858156028368795, | |
"grad_norm": 0.2176242172718048, | |
"learning_rate": 1.4184397163120568e-06, | |
"loss": 0.0205, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 4.875886524822695, | |
"grad_norm": 2.1586310863494873, | |
"learning_rate": 1.2411347517730497e-06, | |
"loss": 0.0245, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 4.8936170212765955, | |
"grad_norm": 0.3032481074333191, | |
"learning_rate": 1.0638297872340427e-06, | |
"loss": 0.0317, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 4.911347517730497, | |
"grad_norm": 0.1574692726135254, | |
"learning_rate": 8.865248226950356e-07, | |
"loss": 0.0126, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 4.929078014184397, | |
"grad_norm": 1.4468116760253906, | |
"learning_rate": 7.092198581560284e-07, | |
"loss": 0.0166, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 4.946808510638298, | |
"grad_norm": 0.5175489783287048, | |
"learning_rate": 5.319148936170213e-07, | |
"loss": 0.0101, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 4.964539007092198, | |
"grad_norm": 0.04873587563633919, | |
"learning_rate": 3.546099290780142e-07, | |
"loss": 0.0167, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 4.9822695035460995, | |
"grad_norm": 3.589585304260254, | |
"learning_rate": 1.773049645390071e-07, | |
"loss": 0.0284, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 5.0, | |
"grad_norm": 0.1343780905008316, | |
"learning_rate": 0.0, | |
"loss": 0.0284, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": 0.9989655681986109, | |
"eval_loss": 0.006617749575525522, | |
"eval_runtime": 99.2975, | |
"eval_samples_per_second": 68.149, | |
"eval_steps_per_second": 1.067, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 5.0, | |
"step": 1410, | |
"total_flos": 6.98086588443052e+18, | |
"train_loss": 0.042465827911299596, | |
"train_runtime": 3748.8226, | |
"train_samples_per_second": 24.03, | |
"train_steps_per_second": 0.376 | |
} | |
], | |
"logging_steps": 5, | |
"max_steps": 1410, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 5, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 6.98086588443052e+18, | |
"train_batch_size": 64, | |
"trial_name": null, | |
"trial_params": null | |
} | |