|
{ |
|
"best_metric": 0.9714285714285714, |
|
"best_model_checkpoint": "videomae-small-finetuned-kinetics-finetuned-ucf101-subset/checkpoint-1200", |
|
"epoch": 3.25, |
|
"eval_steps": 500, |
|
"global_step": 1200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.566545486450195, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 2.3603, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.607881546020508, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 2.3165, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 13.733409881591797, |
|
"learning_rate": 1.25e-05, |
|
"loss": 2.2965, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.29304027557373, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.126, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 9.99641227722168, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 2.2969, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 10.398500442504883, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.2344, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.230289459228516, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 2.0098, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.157608032226562, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.0368, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 16.360591888427734, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.8206, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 20.42489242553711, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 1.8453, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 24.402835845947266, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 1.8986, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.82651710510254, |
|
"learning_rate": 5e-05, |
|
"loss": 1.6124, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.78799819946289, |
|
"learning_rate": 4.9537037037037035e-05, |
|
"loss": 1.5551, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.022598266601562, |
|
"learning_rate": 4.9074074074074075e-05, |
|
"loss": 1.503, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.275959014892578, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 1.4129, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 24.9034423828125, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 1.2563, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 14.254481315612793, |
|
"learning_rate": 4.768518518518519e-05, |
|
"loss": 1.2807, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.42249298095703, |
|
"learning_rate": 4.722222222222222e-05, |
|
"loss": 1.3364, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.118966102600098, |
|
"learning_rate": 4.675925925925926e-05, |
|
"loss": 1.4398, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.908171653747559, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.945, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.181930541992188, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 1.2498, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.386661529541016, |
|
"learning_rate": 4.5370370370370374e-05, |
|
"loss": 1.0773, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.680155754089355, |
|
"learning_rate": 4.490740740740741e-05, |
|
"loss": 0.9573, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 14.58272933959961, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.0516, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 8.87700080871582, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 0.9562, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 10.340452194213867, |
|
"learning_rate": 4.351851851851852e-05, |
|
"loss": 0.832, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 20.359683990478516, |
|
"learning_rate": 4.305555555555556e-05, |
|
"loss": 0.8898, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 11.624580383300781, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 0.7074, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 12.319985389709473, |
|
"learning_rate": 4.212962962962963e-05, |
|
"loss": 0.6718, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 10.40351390838623, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.7637, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_accuracy": 0.9285714285714286, |
|
"eval_loss": 0.6597293615341187, |
|
"eval_runtime": 17.0748, |
|
"eval_samples_per_second": 4.1, |
|
"eval_steps_per_second": 4.1, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 9.951855659484863, |
|
"learning_rate": 4.1203703703703705e-05, |
|
"loss": 0.7042, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 9.25134563446045, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.3955, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 25.354598999023438, |
|
"learning_rate": 4.027777777777778e-05, |
|
"loss": 0.6692, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 9.671762466430664, |
|
"learning_rate": 3.981481481481482e-05, |
|
"loss": 0.5101, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.988225221633911, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 0.3187, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 6.8107829093933105, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.4615, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 23.338829040527344, |
|
"learning_rate": 3.8425925925925924e-05, |
|
"loss": 0.3835, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 7.323309421539307, |
|
"learning_rate": 3.7962962962962964e-05, |
|
"loss": 0.3547, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.660543441772461, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.2491, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.3633012771606445, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2851, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.248176097869873, |
|
"learning_rate": 3.6574074074074076e-05, |
|
"loss": 0.1751, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.8538141250610352, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 0.4174, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 3.1465137004852295, |
|
"learning_rate": 3.564814814814815e-05, |
|
"loss": 0.3802, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.7329912185668945, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 0.2329, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 37.63209915161133, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 0.3905, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.17129373550415, |
|
"learning_rate": 3.425925925925926e-05, |
|
"loss": 0.5252, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.6671462059021, |
|
"learning_rate": 3.3796296296296295e-05, |
|
"loss": 0.2377, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.0317806005477905, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.5215, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 4.563450813293457, |
|
"learning_rate": 3.2870370370370375e-05, |
|
"loss": 0.2337, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 44.28984069824219, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 0.5828, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 4.3612260818481445, |
|
"learning_rate": 3.194444444444444e-05, |
|
"loss": 0.1757, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 3.157717227935791, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.2369, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.7239309549331665, |
|
"learning_rate": 3.101851851851852e-05, |
|
"loss": 0.378, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 8.372401237487793, |
|
"learning_rate": 3.055555555555556e-05, |
|
"loss": 0.131, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.4280123710632324, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 0.1566, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.6428464651107788, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.1315, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.2491836547851562, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 0.3698, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.8162739872932434, |
|
"learning_rate": 2.8703703703703706e-05, |
|
"loss": 0.1631, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 3.682142972946167, |
|
"learning_rate": 2.824074074074074e-05, |
|
"loss": 0.4162, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.344789981842041, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.512, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_accuracy": 0.9142857142857143, |
|
"eval_loss": 0.28013283014297485, |
|
"eval_runtime": 17.3712, |
|
"eval_samples_per_second": 4.03, |
|
"eval_steps_per_second": 4.03, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.431007981300354, |
|
"learning_rate": 2.7314814814814816e-05, |
|
"loss": 0.104, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.896948516368866, |
|
"learning_rate": 2.6851851851851855e-05, |
|
"loss": 0.078, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 4.150156021118164, |
|
"learning_rate": 2.6388888888888892e-05, |
|
"loss": 0.0748, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.5186155438423157, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.0979, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 4.285228252410889, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 0.1827, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 60.02833557128906, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.1822, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 5.049980640411377, |
|
"learning_rate": 2.4537037037037038e-05, |
|
"loss": 0.0794, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 7.939412593841553, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.0854, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.5244776606559753, |
|
"learning_rate": 2.361111111111111e-05, |
|
"loss": 0.0561, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.540111780166626, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 0.3101, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.268227458000183, |
|
"learning_rate": 2.2685185185185187e-05, |
|
"loss": 0.0772, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.848170280456543, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.0756, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.4123232662677765, |
|
"learning_rate": 2.175925925925926e-05, |
|
"loss": 0.0551, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.0512717962265015, |
|
"learning_rate": 2.1296296296296296e-05, |
|
"loss": 0.0531, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.3583945035934448, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.0664, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.6737596988677979, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.047, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.3283467292785645, |
|
"learning_rate": 1.990740740740741e-05, |
|
"loss": 0.0639, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.4911932945251465, |
|
"learning_rate": 1.9444444444444445e-05, |
|
"loss": 0.0508, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 12.502443313598633, |
|
"learning_rate": 1.8981481481481482e-05, |
|
"loss": 0.0937, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.463198721408844, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.0593, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.4377615749835968, |
|
"learning_rate": 1.8055555555555555e-05, |
|
"loss": 0.04, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.7480078339576721, |
|
"learning_rate": 1.7592592592592595e-05, |
|
"loss": 0.0491, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.6011290550231934, |
|
"learning_rate": 1.712962962962963e-05, |
|
"loss": 0.0384, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.36971336603164673, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.1024, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.1993064880371094, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 0.0484, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.5378360152244568, |
|
"learning_rate": 1.574074074074074e-05, |
|
"loss": 0.0816, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.3339988589286804, |
|
"learning_rate": 1.527777777777778e-05, |
|
"loss": 0.0409, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.35232287645339966, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.05, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.5304961204528809, |
|
"learning_rate": 1.4351851851851853e-05, |
|
"loss": 0.0407, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.890812873840332, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.0404, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_accuracy": 0.8571428571428571, |
|
"eval_loss": 0.4797164797782898, |
|
"eval_runtime": 19.7535, |
|
"eval_samples_per_second": 3.544, |
|
"eval_steps_per_second": 3.544, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.30314165353775024, |
|
"learning_rate": 1.3425925925925928e-05, |
|
"loss": 0.0451, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.8032521605491638, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.0301, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.33515045046806335, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.0343, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 2.0815067291259766, |
|
"learning_rate": 1.2037037037037037e-05, |
|
"loss": 0.041, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.49005648493766785, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 0.0319, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.4375312030315399, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.0367, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 0.27408626675605774, |
|
"learning_rate": 1.0648148148148148e-05, |
|
"loss": 0.0273, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.2981482446193695, |
|
"learning_rate": 1.0185185185185185e-05, |
|
"loss": 0.0319, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.4230247735977173, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.0338, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.37668561935424805, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.256, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 2.5758321285247803, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 0.0417, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.5381589531898499, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.038, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 0.45010364055633545, |
|
"learning_rate": 7.87037037037037e-06, |
|
"loss": 0.0422, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.579723596572876, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.0264, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.37265080213546753, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.0289, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 0.85558021068573, |
|
"learning_rate": 6.481481481481481e-06, |
|
"loss": 0.0442, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.4955598711967468, |
|
"learning_rate": 6.0185185185185185e-06, |
|
"loss": 0.0429, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.3474845886230469, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.0323, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.5767329931259155, |
|
"learning_rate": 5.092592592592592e-06, |
|
"loss": 0.0352, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 0.396416574716568, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 0.0256, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 0.38486164808273315, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.0286, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 0.31546345353126526, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.0593, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.4107477068901062, |
|
"learning_rate": 3.2407407407407406e-06, |
|
"loss": 0.03, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.29505640268325806, |
|
"learning_rate": 2.777777777777778e-06, |
|
"loss": 0.0289, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.41277098655700684, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 0.0355, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.515819787979126, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.0273, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.34232866764068604, |
|
"learning_rate": 1.388888888888889e-06, |
|
"loss": 0.1049, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.36424246430397034, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 0.0722, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.5086659789085388, |
|
"learning_rate": 4.6296296296296297e-07, |
|
"loss": 0.0257, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.790651798248291, |
|
"learning_rate": 0.0, |
|
"loss": 0.4026, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.9714285714285714, |
|
"eval_loss": 0.12024375051259995, |
|
"eval_runtime": 21.114, |
|
"eval_samples_per_second": 3.315, |
|
"eval_steps_per_second": 3.315, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"step": 1200, |
|
"total_flos": 3.794829164937216e+17, |
|
"train_loss": 0.49671098495523136, |
|
"train_runtime": 661.0871, |
|
"train_samples_per_second": 1.815, |
|
"train_steps_per_second": 1.815 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.967741935483871, |
|
"eval_loss": 0.11705803126096725, |
|
"eval_runtime": 42.6289, |
|
"eval_samples_per_second": 3.636, |
|
"eval_steps_per_second": 3.636, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.967741935483871, |
|
"eval_loss": 0.11705802381038666, |
|
"eval_runtime": 38.8628, |
|
"eval_samples_per_second": 3.988, |
|
"eval_steps_per_second": 3.988, |
|
"step": 1200 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"total_flos": 3.794829164937216e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|