| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 19.05, |
| "eval_steps": 500, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005, |
| "grad_norm": 5.616273880004883, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.9513, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.260721206665039, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 1.7789, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.015, |
| "grad_norm": 5.373858451843262, |
| "learning_rate": 5.8e-06, |
| "loss": 1.6535, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 4.073526859283447, |
| "learning_rate": 7.800000000000002e-06, |
| "loss": 1.4488, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.025, |
| "grad_norm": 4.743659973144531, |
| "learning_rate": 9.800000000000001e-06, |
| "loss": 1.4239, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 5.532003879547119, |
| "learning_rate": 1.18e-05, |
| "loss": 1.4531, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.035, |
| "grad_norm": 4.564126491546631, |
| "learning_rate": 1.38e-05, |
| "loss": 1.4279, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 4.471712112426758, |
| "learning_rate": 1.58e-05, |
| "loss": 1.3619, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.045, |
| "grad_norm": 5.032183647155762, |
| "learning_rate": 1.7800000000000002e-05, |
| "loss": 1.3614, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 4.518551349639893, |
| "learning_rate": 1.98e-05, |
| "loss": 1.3551, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.005, |
| "grad_norm": 6.518773555755615, |
| "learning_rate": 1.990526315789474e-05, |
| "loss": 0.8287, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.01, |
| "grad_norm": 3.6112170219421387, |
| "learning_rate": 1.98e-05, |
| "loss": 0.7035, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.015, |
| "grad_norm": 5.7912726402282715, |
| "learning_rate": 1.9694736842105263e-05, |
| "loss": 0.8074, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.02, |
| "grad_norm": 3.595136880874634, |
| "learning_rate": 1.9589473684210527e-05, |
| "loss": 0.7915, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.025, |
| "grad_norm": 4.417491912841797, |
| "learning_rate": 1.9484210526315793e-05, |
| "loss": 0.8106, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.03, |
| "grad_norm": 4.246488094329834, |
| "learning_rate": 1.9378947368421053e-05, |
| "loss": 0.8249, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.035, |
| "grad_norm": 5.055039882659912, |
| "learning_rate": 1.9273684210526317e-05, |
| "loss": 0.8426, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.04, |
| "grad_norm": 4.677482604980469, |
| "learning_rate": 1.916842105263158e-05, |
| "loss": 0.8494, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.045, |
| "grad_norm": 4.659126281738281, |
| "learning_rate": 1.9063157894736843e-05, |
| "loss": 0.8388, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.05, |
| "grad_norm": 4.688161849975586, |
| "learning_rate": 1.8957894736842106e-05, |
| "loss": 0.8079, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.005, |
| "grad_norm": 5.431162357330322, |
| "learning_rate": 1.885263157894737e-05, |
| "loss": 0.3679, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.01, |
| "grad_norm": 3.8187713623046875, |
| "learning_rate": 1.8747368421052633e-05, |
| "loss": 0.358, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.015, |
| "grad_norm": 3.1807172298431396, |
| "learning_rate": 1.8642105263157896e-05, |
| "loss": 0.3387, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.02, |
| "grad_norm": 4.693820953369141, |
| "learning_rate": 1.853684210526316e-05, |
| "loss": 0.3047, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.025, |
| "grad_norm": 4.142691135406494, |
| "learning_rate": 1.8431578947368423e-05, |
| "loss": 0.3039, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.03, |
| "grad_norm": 3.9841394424438477, |
| "learning_rate": 1.8326315789473686e-05, |
| "loss": 0.3011, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.035, |
| "grad_norm": 4.5235676765441895, |
| "learning_rate": 1.822105263157895e-05, |
| "loss": 0.3537, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.04, |
| "grad_norm": 4.285941123962402, |
| "learning_rate": 1.8115789473684213e-05, |
| "loss": 0.3105, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.045, |
| "grad_norm": 3.71958327293396, |
| "learning_rate": 1.8010526315789476e-05, |
| "loss": 0.3346, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.05, |
| "grad_norm": 3.477006196975708, |
| "learning_rate": 1.790526315789474e-05, |
| "loss": 0.3125, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.005, |
| "grad_norm": 2.9285550117492676, |
| "learning_rate": 1.7800000000000002e-05, |
| "loss": 0.1397, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.01, |
| "grad_norm": 2.792161703109741, |
| "learning_rate": 1.7694736842105266e-05, |
| "loss": 0.1566, |
| "step": 320 |
| }, |
| { |
| "epoch": 3.015, |
| "grad_norm": 2.6446938514709473, |
| "learning_rate": 1.758947368421053e-05, |
| "loss": 0.1583, |
| "step": 330 |
| }, |
| { |
| "epoch": 3.02, |
| "grad_norm": 3.135241985321045, |
| "learning_rate": 1.748421052631579e-05, |
| "loss": 0.1536, |
| "step": 340 |
| }, |
| { |
| "epoch": 3.025, |
| "grad_norm": 2.987708568572998, |
| "learning_rate": 1.7378947368421052e-05, |
| "loss": 0.1542, |
| "step": 350 |
| }, |
| { |
| "epoch": 3.03, |
| "grad_norm": 2.5585477352142334, |
| "learning_rate": 1.727368421052632e-05, |
| "loss": 0.1742, |
| "step": 360 |
| }, |
| { |
| "epoch": 3.035, |
| "grad_norm": 2.899765729904175, |
| "learning_rate": 1.7168421052631582e-05, |
| "loss": 0.1611, |
| "step": 370 |
| }, |
| { |
| "epoch": 3.04, |
| "grad_norm": 2.752307176589966, |
| "learning_rate": 1.7063157894736845e-05, |
| "loss": 0.2076, |
| "step": 380 |
| }, |
| { |
| "epoch": 3.045, |
| "grad_norm": 2.4184489250183105, |
| "learning_rate": 1.6957894736842105e-05, |
| "loss": 0.1744, |
| "step": 390 |
| }, |
| { |
| "epoch": 3.05, |
| "grad_norm": 2.342548131942749, |
| "learning_rate": 1.685263157894737e-05, |
| "loss": 0.1657, |
| "step": 400 |
| }, |
| { |
| "epoch": 4.005, |
| "grad_norm": 3.1114871501922607, |
| "learning_rate": 1.6747368421052632e-05, |
| "loss": 0.0931, |
| "step": 410 |
| }, |
| { |
| "epoch": 4.01, |
| "grad_norm": 2.3490405082702637, |
| "learning_rate": 1.66421052631579e-05, |
| "loss": 0.097, |
| "step": 420 |
| }, |
| { |
| "epoch": 4.015, |
| "grad_norm": 2.8796679973602295, |
| "learning_rate": 1.653684210526316e-05, |
| "loss": 0.1063, |
| "step": 430 |
| }, |
| { |
| "epoch": 4.02, |
| "grad_norm": 2.324800491333008, |
| "learning_rate": 1.643157894736842e-05, |
| "loss": 0.1073, |
| "step": 440 |
| }, |
| { |
| "epoch": 4.025, |
| "grad_norm": 2.4009687900543213, |
| "learning_rate": 1.6326315789473685e-05, |
| "loss": 0.0968, |
| "step": 450 |
| }, |
| { |
| "epoch": 4.03, |
| "grad_norm": 2.9474356174468994, |
| "learning_rate": 1.6221052631578948e-05, |
| "loss": 0.0925, |
| "step": 460 |
| }, |
| { |
| "epoch": 4.035, |
| "grad_norm": 2.1847965717315674, |
| "learning_rate": 1.611578947368421e-05, |
| "loss": 0.114, |
| "step": 470 |
| }, |
| { |
| "epoch": 4.04, |
| "grad_norm": 1.7918593883514404, |
| "learning_rate": 1.6010526315789475e-05, |
| "loss": 0.1036, |
| "step": 480 |
| }, |
| { |
| "epoch": 4.045, |
| "grad_norm": 2.242753505706787, |
| "learning_rate": 1.5905263157894738e-05, |
| "loss": 0.0939, |
| "step": 490 |
| }, |
| { |
| "epoch": 4.05, |
| "grad_norm": 4.071936130523682, |
| "learning_rate": 1.58e-05, |
| "loss": 0.1138, |
| "step": 500 |
| }, |
| { |
| "epoch": 5.005, |
| "grad_norm": 1.5614343881607056, |
| "learning_rate": 1.5694736842105264e-05, |
| "loss": 0.0469, |
| "step": 510 |
| }, |
| { |
| "epoch": 5.01, |
| "grad_norm": 2.1511878967285156, |
| "learning_rate": 1.5589473684210528e-05, |
| "loss": 0.0544, |
| "step": 520 |
| }, |
| { |
| "epoch": 5.015, |
| "grad_norm": 1.4617661237716675, |
| "learning_rate": 1.548421052631579e-05, |
| "loss": 0.0565, |
| "step": 530 |
| }, |
| { |
| "epoch": 5.02, |
| "grad_norm": 2.533205270767212, |
| "learning_rate": 1.5378947368421054e-05, |
| "loss": 0.0582, |
| "step": 540 |
| }, |
| { |
| "epoch": 5.025, |
| "grad_norm": 2.398082733154297, |
| "learning_rate": 1.5273684210526318e-05, |
| "loss": 0.065, |
| "step": 550 |
| }, |
| { |
| "epoch": 5.03, |
| "grad_norm": 1.9306875467300415, |
| "learning_rate": 1.516842105263158e-05, |
| "loss": 0.0631, |
| "step": 560 |
| }, |
| { |
| "epoch": 5.035, |
| "grad_norm": 2.3642289638519287, |
| "learning_rate": 1.5063157894736844e-05, |
| "loss": 0.0641, |
| "step": 570 |
| }, |
| { |
| "epoch": 5.04, |
| "grad_norm": 1.4011757373809814, |
| "learning_rate": 1.4957894736842107e-05, |
| "loss": 0.0773, |
| "step": 580 |
| }, |
| { |
| "epoch": 5.045, |
| "grad_norm": 2.026855945587158, |
| "learning_rate": 1.4852631578947369e-05, |
| "loss": 0.0664, |
| "step": 590 |
| }, |
| { |
| "epoch": 5.05, |
| "grad_norm": 2.1766583919525146, |
| "learning_rate": 1.4747368421052632e-05, |
| "loss": 0.0545, |
| "step": 600 |
| }, |
| { |
| "epoch": 6.005, |
| "grad_norm": 1.652336835861206, |
| "learning_rate": 1.4642105263157896e-05, |
| "loss": 0.0482, |
| "step": 610 |
| }, |
| { |
| "epoch": 6.01, |
| "grad_norm": 1.5396881103515625, |
| "learning_rate": 1.4536842105263159e-05, |
| "loss": 0.0349, |
| "step": 620 |
| }, |
| { |
| "epoch": 6.015, |
| "grad_norm": 2.64931321144104, |
| "learning_rate": 1.4431578947368424e-05, |
| "loss": 0.0491, |
| "step": 630 |
| }, |
| { |
| "epoch": 6.02, |
| "grad_norm": 1.5272787809371948, |
| "learning_rate": 1.4326315789473685e-05, |
| "loss": 0.0471, |
| "step": 640 |
| }, |
| { |
| "epoch": 6.025, |
| "grad_norm": 1.5077707767486572, |
| "learning_rate": 1.4221052631578949e-05, |
| "loss": 0.0413, |
| "step": 650 |
| }, |
| { |
| "epoch": 6.03, |
| "grad_norm": 1.2149569988250732, |
| "learning_rate": 1.4115789473684212e-05, |
| "loss": 0.0442, |
| "step": 660 |
| }, |
| { |
| "epoch": 6.035, |
| "grad_norm": 1.6021332740783691, |
| "learning_rate": 1.4010526315789475e-05, |
| "loss": 0.0386, |
| "step": 670 |
| }, |
| { |
| "epoch": 6.04, |
| "grad_norm": 1.8967393636703491, |
| "learning_rate": 1.3905263157894737e-05, |
| "loss": 0.047, |
| "step": 680 |
| }, |
| { |
| "epoch": 6.045, |
| "grad_norm": 1.5336661338806152, |
| "learning_rate": 1.38e-05, |
| "loss": 0.0381, |
| "step": 690 |
| }, |
| { |
| "epoch": 6.05, |
| "grad_norm": 2.3286962509155273, |
| "learning_rate": 1.3694736842105265e-05, |
| "loss": 0.0479, |
| "step": 700 |
| }, |
| { |
| "epoch": 7.005, |
| "grad_norm": 1.4702962636947632, |
| "learning_rate": 1.3589473684210528e-05, |
| "loss": 0.0231, |
| "step": 710 |
| }, |
| { |
| "epoch": 7.01, |
| "grad_norm": 1.7746834754943848, |
| "learning_rate": 1.3484210526315792e-05, |
| "loss": 0.0291, |
| "step": 720 |
| }, |
| { |
| "epoch": 7.015, |
| "grad_norm": 1.37478768825531, |
| "learning_rate": 1.3378947368421053e-05, |
| "loss": 0.0368, |
| "step": 730 |
| }, |
| { |
| "epoch": 7.02, |
| "grad_norm": 1.2662794589996338, |
| "learning_rate": 1.3273684210526316e-05, |
| "loss": 0.034, |
| "step": 740 |
| }, |
| { |
| "epoch": 7.025, |
| "grad_norm": 0.6514212489128113, |
| "learning_rate": 1.316842105263158e-05, |
| "loss": 0.0295, |
| "step": 750 |
| }, |
| { |
| "epoch": 7.03, |
| "grad_norm": 0.9872649908065796, |
| "learning_rate": 1.3063157894736845e-05, |
| "loss": 0.0248, |
| "step": 760 |
| }, |
| { |
| "epoch": 7.035, |
| "grad_norm": 0.9525852203369141, |
| "learning_rate": 1.2957894736842108e-05, |
| "loss": 0.0254, |
| "step": 770 |
| }, |
| { |
| "epoch": 7.04, |
| "grad_norm": 1.5137120485305786, |
| "learning_rate": 1.285263157894737e-05, |
| "loss": 0.0238, |
| "step": 780 |
| }, |
| { |
| "epoch": 7.045, |
| "grad_norm": 3.0998730659484863, |
| "learning_rate": 1.2747368421052633e-05, |
| "loss": 0.0337, |
| "step": 790 |
| }, |
| { |
| "epoch": 7.05, |
| "grad_norm": 1.361088752746582, |
| "learning_rate": 1.2642105263157896e-05, |
| "loss": 0.0399, |
| "step": 800 |
| }, |
| { |
| "epoch": 8.005, |
| "grad_norm": 0.9619424343109131, |
| "learning_rate": 1.2536842105263158e-05, |
| "loss": 0.0215, |
| "step": 810 |
| }, |
| { |
| "epoch": 8.01, |
| "grad_norm": 1.0369452238082886, |
| "learning_rate": 1.2431578947368421e-05, |
| "loss": 0.0247, |
| "step": 820 |
| }, |
| { |
| "epoch": 8.015, |
| "grad_norm": 1.344269871711731, |
| "learning_rate": 1.2326315789473686e-05, |
| "loss": 0.025, |
| "step": 830 |
| }, |
| { |
| "epoch": 8.02, |
| "grad_norm": 1.2112865447998047, |
| "learning_rate": 1.2221052631578949e-05, |
| "loss": 0.0216, |
| "step": 840 |
| }, |
| { |
| "epoch": 8.025, |
| "grad_norm": 1.7333956956863403, |
| "learning_rate": 1.2115789473684212e-05, |
| "loss": 0.0183, |
| "step": 850 |
| }, |
| { |
| "epoch": 8.03, |
| "grad_norm": 2.0652589797973633, |
| "learning_rate": 1.2010526315789474e-05, |
| "loss": 0.0241, |
| "step": 860 |
| }, |
| { |
| "epoch": 8.035, |
| "grad_norm": 0.624588131904602, |
| "learning_rate": 1.1905263157894737e-05, |
| "loss": 0.0202, |
| "step": 870 |
| }, |
| { |
| "epoch": 8.04, |
| "grad_norm": 1.0641587972640991, |
| "learning_rate": 1.18e-05, |
| "loss": 0.019, |
| "step": 880 |
| }, |
| { |
| "epoch": 8.045, |
| "grad_norm": 1.1550590991973877, |
| "learning_rate": 1.1694736842105264e-05, |
| "loss": 0.0214, |
| "step": 890 |
| }, |
| { |
| "epoch": 8.05, |
| "grad_norm": 1.3287566900253296, |
| "learning_rate": 1.1589473684210529e-05, |
| "loss": 0.0292, |
| "step": 900 |
| }, |
| { |
| "epoch": 9.005, |
| "grad_norm": 1.3364150524139404, |
| "learning_rate": 1.148421052631579e-05, |
| "loss": 0.0213, |
| "step": 910 |
| }, |
| { |
| "epoch": 9.01, |
| "grad_norm": 0.7193664908409119, |
| "learning_rate": 1.1378947368421054e-05, |
| "loss": 0.0082, |
| "step": 920 |
| }, |
| { |
| "epoch": 9.015, |
| "grad_norm": 1.0872008800506592, |
| "learning_rate": 1.1273684210526317e-05, |
| "loss": 0.0151, |
| "step": 930 |
| }, |
| { |
| "epoch": 9.02, |
| "grad_norm": 0.22245873510837555, |
| "learning_rate": 1.116842105263158e-05, |
| "loss": 0.0128, |
| "step": 940 |
| }, |
| { |
| "epoch": 9.025, |
| "grad_norm": 0.9914424419403076, |
| "learning_rate": 1.1063157894736842e-05, |
| "loss": 0.0174, |
| "step": 950 |
| }, |
| { |
| "epoch": 9.03, |
| "grad_norm": 0.9509072303771973, |
| "learning_rate": 1.0957894736842105e-05, |
| "loss": 0.0181, |
| "step": 960 |
| }, |
| { |
| "epoch": 9.035, |
| "grad_norm": 0.40795469284057617, |
| "learning_rate": 1.085263157894737e-05, |
| "loss": 0.0202, |
| "step": 970 |
| }, |
| { |
| "epoch": 9.04, |
| "grad_norm": 1.3863106966018677, |
| "learning_rate": 1.0747368421052633e-05, |
| "loss": 0.0172, |
| "step": 980 |
| }, |
| { |
| "epoch": 9.045, |
| "grad_norm": 0.6388317942619324, |
| "learning_rate": 1.0642105263157897e-05, |
| "loss": 0.0185, |
| "step": 990 |
| }, |
| { |
| "epoch": 9.05, |
| "grad_norm": 1.0237617492675781, |
| "learning_rate": 1.0536842105263158e-05, |
| "loss": 0.0193, |
| "step": 1000 |
| }, |
| { |
| "epoch": 10.005, |
| "grad_norm": 0.2478121817111969, |
| "learning_rate": 1.0431578947368421e-05, |
| "loss": 0.0111, |
| "step": 1010 |
| }, |
| { |
| "epoch": 10.01, |
| "grad_norm": 1.0051342248916626, |
| "learning_rate": 1.0326315789473685e-05, |
| "loss": 0.0102, |
| "step": 1020 |
| }, |
| { |
| "epoch": 10.015, |
| "grad_norm": 0.9585472345352173, |
| "learning_rate": 1.0221052631578948e-05, |
| "loss": 0.0141, |
| "step": 1030 |
| }, |
| { |
| "epoch": 10.02, |
| "grad_norm": 1.248799204826355, |
| "learning_rate": 1.0115789473684213e-05, |
| "loss": 0.0088, |
| "step": 1040 |
| }, |
| { |
| "epoch": 10.025, |
| "grad_norm": 1.050134301185608, |
| "learning_rate": 1.0010526315789474e-05, |
| "loss": 0.0097, |
| "step": 1050 |
| }, |
| { |
| "epoch": 10.03, |
| "grad_norm": 0.2019995152950287, |
| "learning_rate": 9.905263157894738e-06, |
| "loss": 0.011, |
| "step": 1060 |
| }, |
| { |
| "epoch": 10.035, |
| "grad_norm": 0.9100256562232971, |
| "learning_rate": 9.800000000000001e-06, |
| "loss": 0.0149, |
| "step": 1070 |
| }, |
| { |
| "epoch": 10.04, |
| "grad_norm": 0.8645588159561157, |
| "learning_rate": 9.694736842105263e-06, |
| "loss": 0.011, |
| "step": 1080 |
| }, |
| { |
| "epoch": 10.045, |
| "grad_norm": 0.9465664029121399, |
| "learning_rate": 9.589473684210528e-06, |
| "loss": 0.0116, |
| "step": 1090 |
| }, |
| { |
| "epoch": 10.05, |
| "grad_norm": 0.7357518076896667, |
| "learning_rate": 9.484210526315791e-06, |
| "loss": 0.0132, |
| "step": 1100 |
| }, |
| { |
| "epoch": 11.005, |
| "grad_norm": 0.0960564911365509, |
| "learning_rate": 9.378947368421052e-06, |
| "loss": 0.0079, |
| "step": 1110 |
| }, |
| { |
| "epoch": 11.01, |
| "grad_norm": 0.07488369941711426, |
| "learning_rate": 9.273684210526317e-06, |
| "loss": 0.0079, |
| "step": 1120 |
| }, |
| { |
| "epoch": 11.015, |
| "grad_norm": 0.4596630334854126, |
| "learning_rate": 9.168421052631579e-06, |
| "loss": 0.0073, |
| "step": 1130 |
| }, |
| { |
| "epoch": 11.02, |
| "grad_norm": 0.915014386177063, |
| "learning_rate": 9.063157894736842e-06, |
| "loss": 0.0058, |
| "step": 1140 |
| }, |
| { |
| "epoch": 11.025, |
| "grad_norm": 0.4357207119464874, |
| "learning_rate": 8.957894736842107e-06, |
| "loss": 0.0082, |
| "step": 1150 |
| }, |
| { |
| "epoch": 11.03, |
| "grad_norm": 0.6345843076705933, |
| "learning_rate": 8.852631578947369e-06, |
| "loss": 0.009, |
| "step": 1160 |
| }, |
| { |
| "epoch": 11.035, |
| "grad_norm": 0.2351154237985611, |
| "learning_rate": 8.747368421052632e-06, |
| "loss": 0.0071, |
| "step": 1170 |
| }, |
| { |
| "epoch": 11.04, |
| "grad_norm": 0.5750226378440857, |
| "learning_rate": 8.642105263157895e-06, |
| "loss": 0.0065, |
| "step": 1180 |
| }, |
| { |
| "epoch": 11.045, |
| "grad_norm": 0.49880605936050415, |
| "learning_rate": 8.536842105263159e-06, |
| "loss": 0.0061, |
| "step": 1190 |
| }, |
| { |
| "epoch": 11.05, |
| "grad_norm": 0.6473588347434998, |
| "learning_rate": 8.431578947368422e-06, |
| "loss": 0.0091, |
| "step": 1200 |
| }, |
| { |
| "epoch": 12.005, |
| "grad_norm": 0.02745138294994831, |
| "learning_rate": 8.326315789473685e-06, |
| "loss": 0.0027, |
| "step": 1210 |
| }, |
| { |
| "epoch": 12.01, |
| "grad_norm": 0.6922830939292908, |
| "learning_rate": 8.221052631578948e-06, |
| "loss": 0.0058, |
| "step": 1220 |
| }, |
| { |
| "epoch": 12.015, |
| "grad_norm": 0.6273651719093323, |
| "learning_rate": 8.115789473684212e-06, |
| "loss": 0.0034, |
| "step": 1230 |
| }, |
| { |
| "epoch": 12.02, |
| "grad_norm": 1.0830379724502563, |
| "learning_rate": 8.010526315789473e-06, |
| "loss": 0.0063, |
| "step": 1240 |
| }, |
| { |
| "epoch": 12.025, |
| "grad_norm": 0.824835479259491, |
| "learning_rate": 7.905263157894737e-06, |
| "loss": 0.0064, |
| "step": 1250 |
| }, |
| { |
| "epoch": 12.03, |
| "grad_norm": 0.5051027536392212, |
| "learning_rate": 7.800000000000002e-06, |
| "loss": 0.0029, |
| "step": 1260 |
| }, |
| { |
| "epoch": 12.035, |
| "grad_norm": 0.6191169619560242, |
| "learning_rate": 7.694736842105263e-06, |
| "loss": 0.0036, |
| "step": 1270 |
| }, |
| { |
| "epoch": 12.04, |
| "grad_norm": 0.03212520107626915, |
| "learning_rate": 7.589473684210526e-06, |
| "loss": 0.001, |
| "step": 1280 |
| }, |
| { |
| "epoch": 12.045, |
| "grad_norm": 0.027301745489239693, |
| "learning_rate": 7.4842105263157905e-06, |
| "loss": 0.0024, |
| "step": 1290 |
| }, |
| { |
| "epoch": 12.05, |
| "grad_norm": 0.6917107701301575, |
| "learning_rate": 7.378947368421053e-06, |
| "loss": 0.002, |
| "step": 1300 |
| }, |
| { |
| "epoch": 13.005, |
| "grad_norm": 0.009404812939465046, |
| "learning_rate": 7.273684210526316e-06, |
| "loss": 0.0023, |
| "step": 1310 |
| }, |
| { |
| "epoch": 13.01, |
| "grad_norm": 0.5634567737579346, |
| "learning_rate": 7.16842105263158e-06, |
| "loss": 0.001, |
| "step": 1320 |
| }, |
| { |
| "epoch": 13.015, |
| "grad_norm": 0.02036663331091404, |
| "learning_rate": 7.063157894736843e-06, |
| "loss": 0.0025, |
| "step": 1330 |
| }, |
| { |
| "epoch": 13.02, |
| "grad_norm": 0.05707384645938873, |
| "learning_rate": 6.957894736842106e-06, |
| "loss": 0.0012, |
| "step": 1340 |
| }, |
| { |
| "epoch": 13.025, |
| "grad_norm": 0.022448807954788208, |
| "learning_rate": 6.8526315789473685e-06, |
| "loss": 0.0008, |
| "step": 1350 |
| }, |
| { |
| "epoch": 13.03, |
| "grad_norm": 0.012089096009731293, |
| "learning_rate": 6.747368421052633e-06, |
| "loss": 0.0039, |
| "step": 1360 |
| }, |
| { |
| "epoch": 13.035, |
| "grad_norm": 0.14159785211086273, |
| "learning_rate": 6.642105263157895e-06, |
| "loss": 0.0005, |
| "step": 1370 |
| }, |
| { |
| "epoch": 13.04, |
| "grad_norm": 0.05650542676448822, |
| "learning_rate": 6.536842105263158e-06, |
| "loss": 0.0011, |
| "step": 1380 |
| }, |
| { |
| "epoch": 13.045, |
| "grad_norm": 0.030941806733608246, |
| "learning_rate": 6.431578947368422e-06, |
| "loss": 0.0024, |
| "step": 1390 |
| }, |
| { |
| "epoch": 13.05, |
| "grad_norm": 0.022141387686133385, |
| "learning_rate": 6.326315789473685e-06, |
| "loss": 0.0035, |
| "step": 1400 |
| }, |
| { |
| "epoch": 14.005, |
| "grad_norm": 0.010789740830659866, |
| "learning_rate": 6.221052631578947e-06, |
| "loss": 0.0002, |
| "step": 1410 |
| }, |
| { |
| "epoch": 14.01, |
| "grad_norm": 0.008089865557849407, |
| "learning_rate": 6.1157894736842106e-06, |
| "loss": 0.0002, |
| "step": 1420 |
| }, |
| { |
| "epoch": 14.015, |
| "grad_norm": 0.009247499518096447, |
| "learning_rate": 6.010526315789475e-06, |
| "loss": 0.0004, |
| "step": 1430 |
| }, |
| { |
| "epoch": 14.02, |
| "grad_norm": 0.005500028375536203, |
| "learning_rate": 5.905263157894737e-06, |
| "loss": 0.0019, |
| "step": 1440 |
| }, |
| { |
| "epoch": 14.025, |
| "grad_norm": 0.04416996240615845, |
| "learning_rate": 5.8e-06, |
| "loss": 0.0013, |
| "step": 1450 |
| }, |
| { |
| "epoch": 14.03, |
| "grad_norm": 0.10680264979600906, |
| "learning_rate": 5.694736842105264e-06, |
| "loss": 0.0012, |
| "step": 1460 |
| }, |
| { |
| "epoch": 14.035, |
| "grad_norm": 0.008884921669960022, |
| "learning_rate": 5.589473684210527e-06, |
| "loss": 0.0002, |
| "step": 1470 |
| }, |
| { |
| "epoch": 14.04, |
| "grad_norm": 0.179422989487648, |
| "learning_rate": 5.484210526315789e-06, |
| "loss": 0.0009, |
| "step": 1480 |
| }, |
| { |
| "epoch": 14.045, |
| "grad_norm": 0.7773061990737915, |
| "learning_rate": 5.3789473684210535e-06, |
| "loss": 0.0003, |
| "step": 1490 |
| }, |
| { |
| "epoch": 14.05, |
| "grad_norm": 0.04718916490674019, |
| "learning_rate": 5.273684210526317e-06, |
| "loss": 0.0009, |
| "step": 1500 |
| }, |
| { |
| "epoch": 15.005, |
| "grad_norm": 0.0084745017811656, |
| "learning_rate": 5.168421052631579e-06, |
| "loss": 0.0014, |
| "step": 1510 |
| }, |
| { |
| "epoch": 15.01, |
| "grad_norm": 0.006006155628710985, |
| "learning_rate": 5.0631578947368424e-06, |
| "loss": 0.0008, |
| "step": 1520 |
| }, |
| { |
| "epoch": 15.015, |
| "grad_norm": 0.007198017556220293, |
| "learning_rate": 4.957894736842106e-06, |
| "loss": 0.0002, |
| "step": 1530 |
| }, |
| { |
| "epoch": 15.02, |
| "grad_norm": 0.004182496573776007, |
| "learning_rate": 4.852631578947369e-06, |
| "loss": 0.0003, |
| "step": 1540 |
| }, |
| { |
| "epoch": 15.025, |
| "grad_norm": 0.00469869002699852, |
| "learning_rate": 4.747368421052632e-06, |
| "loss": 0.0001, |
| "step": 1550 |
| }, |
| { |
| "epoch": 15.03, |
| "grad_norm": 0.005362135358154774, |
| "learning_rate": 4.642105263157895e-06, |
| "loss": 0.0002, |
| "step": 1560 |
| }, |
| { |
| "epoch": 15.035, |
| "grad_norm": 0.005833903793245554, |
| "learning_rate": 4.536842105263158e-06, |
| "loss": 0.0003, |
| "step": 1570 |
| }, |
| { |
| "epoch": 15.04, |
| "grad_norm": 0.006048844661563635, |
| "learning_rate": 4.431578947368421e-06, |
| "loss": 0.0002, |
| "step": 1580 |
| }, |
| { |
| "epoch": 15.045, |
| "grad_norm": 0.005061362404376268, |
| "learning_rate": 4.3263157894736845e-06, |
| "loss": 0.0001, |
| "step": 1590 |
| }, |
| { |
| "epoch": 15.05, |
| "grad_norm": 0.007132957689464092, |
| "learning_rate": 4.221052631578948e-06, |
| "loss": 0.0004, |
| "step": 1600 |
| }, |
| { |
| "epoch": 16.005, |
| "grad_norm": 0.004316328559070826, |
| "learning_rate": 4.115789473684211e-06, |
| "loss": 0.0001, |
| "step": 1610 |
| }, |
| { |
| "epoch": 16.01, |
| "grad_norm": 0.0065157352946698666, |
| "learning_rate": 4.010526315789474e-06, |
| "loss": 0.0001, |
| "step": 1620 |
| }, |
| { |
| "epoch": 16.015, |
| "grad_norm": 0.004985947627574205, |
| "learning_rate": 3.905263157894737e-06, |
| "loss": 0.0001, |
| "step": 1630 |
| }, |
| { |
| "epoch": 16.02, |
| "grad_norm": 0.0039834328927099705, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0003, |
| "step": 1640 |
| }, |
| { |
| "epoch": 16.025, |
| "grad_norm": 0.15245139598846436, |
| "learning_rate": 3.6947368421052637e-06, |
| "loss": 0.0001, |
| "step": 1650 |
| }, |
| { |
| "epoch": 16.03, |
| "grad_norm": 0.13511842489242554, |
| "learning_rate": 3.5894736842105266e-06, |
| "loss": 0.0003, |
| "step": 1660 |
| }, |
| { |
| "epoch": 16.035, |
| "grad_norm": 0.0028432030230760574, |
| "learning_rate": 3.48421052631579e-06, |
| "loss": 0.0001, |
| "step": 1670 |
| }, |
| { |
| "epoch": 16.04, |
| "grad_norm": 0.003842442063614726, |
| "learning_rate": 3.3789473684210527e-06, |
| "loss": 0.0003, |
| "step": 1680 |
| }, |
| { |
| "epoch": 16.045, |
| "grad_norm": 0.005007196217775345, |
| "learning_rate": 3.273684210526316e-06, |
| "loss": 0.0002, |
| "step": 1690 |
| }, |
| { |
| "epoch": 16.05, |
| "grad_norm": 0.003745683468878269, |
| "learning_rate": 3.168421052631579e-06, |
| "loss": 0.0003, |
| "step": 1700 |
| }, |
| { |
| "epoch": 17.005, |
| "grad_norm": 0.0036488028708845377, |
| "learning_rate": 3.0631578947368425e-06, |
| "loss": 0.0002, |
| "step": 1710 |
| }, |
| { |
| "epoch": 17.01, |
| "grad_norm": 0.0031844789627939463, |
| "learning_rate": 2.957894736842106e-06, |
| "loss": 0.0001, |
| "step": 1720 |
| }, |
| { |
| "epoch": 17.015, |
| "grad_norm": 0.008146918378770351, |
| "learning_rate": 2.8526315789473687e-06, |
| "loss": 0.0002, |
| "step": 1730 |
| }, |
| { |
| "epoch": 17.02, |
| "grad_norm": 0.0034727228339761496, |
| "learning_rate": 2.747368421052632e-06, |
| "loss": 0.0001, |
| "step": 1740 |
| }, |
| { |
| "epoch": 17.025, |
| "grad_norm": 0.00454725231975317, |
| "learning_rate": 2.6421052631578948e-06, |
| "loss": 0.0001, |
| "step": 1750 |
| }, |
| { |
| "epoch": 17.03, |
| "grad_norm": 0.005313561297953129, |
| "learning_rate": 2.536842105263158e-06, |
| "loss": 0.0002, |
| "step": 1760 |
| }, |
| { |
| "epoch": 17.035, |
| "grad_norm": 0.1343342512845993, |
| "learning_rate": 2.4315789473684213e-06, |
| "loss": 0.0002, |
| "step": 1770 |
| }, |
| { |
| "epoch": 17.04, |
| "grad_norm": 0.004856111016124487, |
| "learning_rate": 2.326315789473684e-06, |
| "loss": 0.0001, |
| "step": 1780 |
| }, |
| { |
| "epoch": 17.045, |
| "grad_norm": 0.10679910331964493, |
| "learning_rate": 2.221052631578948e-06, |
| "loss": 0.0002, |
| "step": 1790 |
| }, |
| { |
| "epoch": 17.05, |
| "grad_norm": 0.003918065223842859, |
| "learning_rate": 2.1157894736842107e-06, |
| "loss": 0.0001, |
| "step": 1800 |
| }, |
| { |
| "epoch": 18.005, |
| "grad_norm": 0.0038473308086395264, |
| "learning_rate": 2.010526315789474e-06, |
| "loss": 0.0001, |
| "step": 1810 |
| }, |
| { |
| "epoch": 18.01, |
| "grad_norm": 0.0027081076987087727, |
| "learning_rate": 1.905263157894737e-06, |
| "loss": 0.0002, |
| "step": 1820 |
| }, |
| { |
| "epoch": 18.015, |
| "grad_norm": 0.003243462648242712, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0002, |
| "step": 1830 |
| }, |
| { |
| "epoch": 18.02, |
| "grad_norm": 0.004891443997621536, |
| "learning_rate": 1.6947368421052632e-06, |
| "loss": 0.0001, |
| "step": 1840 |
| }, |
| { |
| "epoch": 18.025, |
| "grad_norm": 0.00323393102735281, |
| "learning_rate": 1.5894736842105265e-06, |
| "loss": 0.0001, |
| "step": 1850 |
| }, |
| { |
| "epoch": 18.03, |
| "grad_norm": 0.00286379037424922, |
| "learning_rate": 1.4842105263157897e-06, |
| "loss": 0.0001, |
| "step": 1860 |
| }, |
| { |
| "epoch": 18.035, |
| "grad_norm": 0.0032987131271511316, |
| "learning_rate": 1.3789473684210528e-06, |
| "loss": 0.0001, |
| "step": 1870 |
| }, |
| { |
| "epoch": 18.04, |
| "grad_norm": 0.003127966308966279, |
| "learning_rate": 1.2736842105263159e-06, |
| "loss": 0.0001, |
| "step": 1880 |
| }, |
| { |
| "epoch": 18.045, |
| "grad_norm": 0.0026523026172071695, |
| "learning_rate": 1.1684210526315791e-06, |
| "loss": 0.0002, |
| "step": 1890 |
| }, |
| { |
| "epoch": 18.05, |
| "grad_norm": 0.0035338886082172394, |
| "learning_rate": 1.0631578947368422e-06, |
| "loss": 0.0002, |
| "step": 1900 |
| }, |
| { |
| "epoch": 19.005, |
| "grad_norm": 0.0032154133077710867, |
| "learning_rate": 9.578947368421053e-07, |
| "loss": 0.0001, |
| "step": 1910 |
| }, |
| { |
| "epoch": 19.01, |
| "grad_norm": 0.0040828268975019455, |
| "learning_rate": 8.526315789473685e-07, |
| "loss": 0.0001, |
| "step": 1920 |
| }, |
| { |
| "epoch": 19.015, |
| "grad_norm": 0.002462048316374421, |
| "learning_rate": 7.473684210526316e-07, |
| "loss": 0.0001, |
| "step": 1930 |
| }, |
| { |
| "epoch": 19.02, |
| "grad_norm": 0.00201555248349905, |
| "learning_rate": 6.421052631578948e-07, |
| "loss": 0.0003, |
| "step": 1940 |
| }, |
| { |
| "epoch": 19.025, |
| "grad_norm": 0.0026351658161729574, |
| "learning_rate": 5.368421052631579e-07, |
| "loss": 0.0001, |
| "step": 1950 |
| }, |
| { |
| "epoch": 19.03, |
| "grad_norm": 0.002890300936996937, |
| "learning_rate": 4.3157894736842105e-07, |
| "loss": 0.0001, |
| "step": 1960 |
| }, |
| { |
| "epoch": 19.035, |
| "grad_norm": 0.002067763591185212, |
| "learning_rate": 3.263157894736842e-07, |
| "loss": 0.0001, |
| "step": 1970 |
| }, |
| { |
| "epoch": 19.04, |
| "grad_norm": 0.1408688873052597, |
| "learning_rate": 2.2105263157894736e-07, |
| "loss": 0.0003, |
| "step": 1980 |
| }, |
| { |
| "epoch": 19.045, |
| "grad_norm": 0.002841346897184849, |
| "learning_rate": 1.1578947368421054e-07, |
| "loss": 0.0001, |
| "step": 1990 |
| }, |
| { |
| "epoch": 19.05, |
| "grad_norm": 0.0034785745665431023, |
| "learning_rate": 1.0526315789473684e-08, |
| "loss": 0.0001, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.010451846896026e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|