|
{ |
|
"best_metric": 0.6027244925498962, |
|
"best_model_checkpoint": "runs/deepseek_CMU-AIR2/math-deepseek_FULL_HardArith_Interm_20240424-065814/checkpoint-5000", |
|
"epoch": 0.25, |
|
"eval_steps": 500, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.6839, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.6587, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.6644, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.6101, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.6188, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.6524, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.75, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.6322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.676, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.6578, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.25, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.597, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.6543, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.6844, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.6658, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.7388, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6627, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.125, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.7036, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.6136, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.5902, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.6254, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6322, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.6152, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.5843, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.7049, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.4978, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.603, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.6845, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.5937, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.5744, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.7565, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.6332, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.5884, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.6418, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.6532, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.5984, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.7039, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.5297, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.6239, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.6246, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.6023, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.6448, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.75, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.7238, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.6137, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.6496, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.5152, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.5874, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.875, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.6662, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.6845, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.6917, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.5512, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6037, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"eval_loss": 0.6478177309036255, |
|
"eval_runtime": 38.0753, |
|
"eval_samples_per_second": 26.264, |
|
"eval_steps_per_second": 26.264, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.9978947368421054e-05, |
|
"loss": 0.6223, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.9957894736842107e-05, |
|
"loss": 0.6301, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 1.993684210526316e-05, |
|
"loss": 0.6224, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.9915789473684212e-05, |
|
"loss": 0.6255, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.9894736842105265e-05, |
|
"loss": 0.6737, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 1.9873684210526318e-05, |
|
"loss": 0.6218, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.985263157894737e-05, |
|
"loss": 0.5187, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 1.9831578947368423e-05, |
|
"loss": 0.6786, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.9810526315789476e-05, |
|
"loss": 0.6641, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.9789473684210528e-05, |
|
"loss": 0.5541, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.976842105263158e-05, |
|
"loss": 0.565, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.375, |
|
"learning_rate": 1.9747368421052633e-05, |
|
"loss": 0.7099, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.9726315789473686e-05, |
|
"loss": 0.5988, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.970526315789474e-05, |
|
"loss": 0.7724, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.968421052631579e-05, |
|
"loss": 0.582, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.9663157894736844e-05, |
|
"loss": 0.6805, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 11.0, |
|
"learning_rate": 1.9642105263157897e-05, |
|
"loss": 0.6248, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 1.962105263157895e-05, |
|
"loss": 0.5691, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.6657, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.9578947368421055e-05, |
|
"loss": 0.4786, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.9557894736842107e-05, |
|
"loss": 0.6368, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.953684210526316e-05, |
|
"loss": 0.6108, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 1.9515789473684213e-05, |
|
"loss": 0.6331, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.9494736842105265e-05, |
|
"loss": 0.5432, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.9473684210526318e-05, |
|
"loss": 0.7021, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.375, |
|
"learning_rate": 1.945263157894737e-05, |
|
"loss": 0.68, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.9431578947368423e-05, |
|
"loss": 0.629, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.9410526315789476e-05, |
|
"loss": 0.6685, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.9389473684210525e-05, |
|
"loss": 0.5552, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 1.936842105263158e-05, |
|
"loss": 0.657, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.9347368421052634e-05, |
|
"loss": 0.5431, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.9326315789473687e-05, |
|
"loss": 0.619, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.53125, |
|
"learning_rate": 1.930526315789474e-05, |
|
"loss": 0.7058, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.9284210526315792e-05, |
|
"loss": 0.631, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.9263157894736845e-05, |
|
"loss": 0.5991, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.9242105263157894e-05, |
|
"loss": 0.6852, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 1.922105263157895e-05, |
|
"loss": 0.6474, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.5979, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.9178947368421055e-05, |
|
"loss": 0.7035, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 1.9157894736842108e-05, |
|
"loss": 0.7001, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 1.913684210526316e-05, |
|
"loss": 0.6453, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.9115789473684213e-05, |
|
"loss": 0.6356, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 1.9094736842105262e-05, |
|
"loss": 0.5679, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.907368421052632e-05, |
|
"loss": 0.7062, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.9052631578947368e-05, |
|
"loss": 0.665, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 1.9031578947368424e-05, |
|
"loss": 0.6772, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.9010526315789476e-05, |
|
"loss": 0.5347, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.898947368421053e-05, |
|
"loss": 0.6161, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.8968421052631582e-05, |
|
"loss": 0.595, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 0.6752, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 0.6129263639450073, |
|
"eval_runtime": 38.0435, |
|
"eval_samples_per_second": 26.286, |
|
"eval_steps_per_second": 26.286, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.5, |
|
"learning_rate": 1.8926315789473687e-05, |
|
"loss": 0.6876, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 1.8905263157894736e-05, |
|
"loss": 0.6364, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.8884210526315792e-05, |
|
"loss": 0.6598, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.886315789473684e-05, |
|
"loss": 0.6473, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.8842105263157898e-05, |
|
"loss": 0.5972, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.882105263157895e-05, |
|
"loss": 0.7538, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.6282, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.8778947368421056e-05, |
|
"loss": 0.7304, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.8757894736842105e-05, |
|
"loss": 0.6744, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.873684210526316e-05, |
|
"loss": 0.7072, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.871578947368421e-05, |
|
"loss": 0.6935, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.8694736842105266e-05, |
|
"loss": 0.6507, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.8673684210526316e-05, |
|
"loss": 0.5451, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.0, |
|
"learning_rate": 1.8652631578947368e-05, |
|
"loss": 0.6145, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.8631578947368424e-05, |
|
"loss": 0.6846, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.8610526315789473e-05, |
|
"loss": 0.6139, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 1.858947368421053e-05, |
|
"loss": 0.5147, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.856842105263158e-05, |
|
"loss": 0.5526, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 13.6875, |
|
"learning_rate": 1.8547368421052635e-05, |
|
"loss": 0.6973, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.8526315789473684e-05, |
|
"loss": 0.6338, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 1.8505263157894737e-05, |
|
"loss": 0.5985, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.8484210526315793e-05, |
|
"loss": 0.6832, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.8463157894736842e-05, |
|
"loss": 0.6766, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.8442105263157898e-05, |
|
"loss": 0.6342, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 0.6043, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.5879, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.8378947368421053e-05, |
|
"loss": 0.6553, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.8357894736842105e-05, |
|
"loss": 0.6256, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.75, |
|
"learning_rate": 1.8336842105263158e-05, |
|
"loss": 0.7604, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.0, |
|
"learning_rate": 1.831578947368421e-05, |
|
"loss": 0.5902, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 1.8294736842105267e-05, |
|
"loss": 0.6895, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 1.8273684210526316e-05, |
|
"loss": 0.557, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.8252631578947372e-05, |
|
"loss": 0.5967, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.823157894736842e-05, |
|
"loss": 0.7342, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.8210526315789477e-05, |
|
"loss": 0.5225, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 1.8189473684210527e-05, |
|
"loss": 0.6079, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 1.816842105263158e-05, |
|
"loss": 0.5944, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.8147368421052632e-05, |
|
"loss": 0.5262, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.8126315789473685e-05, |
|
"loss": 0.5117, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 1.810526315789474e-05, |
|
"loss": 0.7201, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.808421052631579e-05, |
|
"loss": 0.6592, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.8063157894736846e-05, |
|
"loss": 0.6966, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.8042105263157895e-05, |
|
"loss": 0.5328, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.8021052631578948e-05, |
|
"loss": 0.6245, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.5457, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.7978947368421053e-05, |
|
"loss": 0.6026, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.795789473684211e-05, |
|
"loss": 0.6519, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.793684210526316e-05, |
|
"loss": 0.5364, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.7915789473684214e-05, |
|
"loss": 0.6421, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 0.5376, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_loss": 0.6211587190628052, |
|
"eval_runtime": 38.148, |
|
"eval_samples_per_second": 26.214, |
|
"eval_steps_per_second": 26.214, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.7873684210526316e-05, |
|
"loss": 0.5633, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 14.0, |
|
"learning_rate": 1.785263157894737e-05, |
|
"loss": 0.6917, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.7831578947368422e-05, |
|
"loss": 0.5882, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 1.7810526315789474e-05, |
|
"loss": 0.5752, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.7789473684210527e-05, |
|
"loss": 0.6439, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 1.7768421052631583e-05, |
|
"loss": 0.6806, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.7747368421052632e-05, |
|
"loss": 0.5903, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.7726315789473685e-05, |
|
"loss": 0.6394, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.7705263157894738e-05, |
|
"loss": 0.53, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 10.625, |
|
"learning_rate": 1.768421052631579e-05, |
|
"loss": 0.7113, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 10.875, |
|
"learning_rate": 1.7663157894736843e-05, |
|
"loss": 0.6491, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.7642105263157896e-05, |
|
"loss": 0.6439, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 1.7621052631578948e-05, |
|
"loss": 0.7248, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.6754, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.7578947368421054e-05, |
|
"loss": 0.6029, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.7557894736842106e-05, |
|
"loss": 0.6225, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 1.753684210526316e-05, |
|
"loss": 0.5892, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 1.751578947368421e-05, |
|
"loss": 0.7917, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.7494736842105264e-05, |
|
"loss": 0.5863, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.7473684210526317e-05, |
|
"loss": 0.5613, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 1.745263157894737e-05, |
|
"loss": 0.6582, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.7431578947368422e-05, |
|
"loss": 0.6967, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.7410526315789475e-05, |
|
"loss": 0.6296, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.7389473684210527e-05, |
|
"loss": 0.5804, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.736842105263158e-05, |
|
"loss": 0.7102, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.7347368421052633e-05, |
|
"loss": 0.6108, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.7326315789473685e-05, |
|
"loss": 0.6847, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.7305263157894738e-05, |
|
"loss": 0.7885, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.728421052631579e-05, |
|
"loss": 0.674, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.7263157894736843e-05, |
|
"loss": 0.6753, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.7242105263157896e-05, |
|
"loss": 0.5909, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.722105263157895e-05, |
|
"loss": 0.6107, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.623, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.7178947368421054e-05, |
|
"loss": 0.5906, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.7157894736842107e-05, |
|
"loss": 0.5833, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 9.25, |
|
"learning_rate": 1.713684210526316e-05, |
|
"loss": 0.5935, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.7115789473684212e-05, |
|
"loss": 0.5972, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.7094736842105265e-05, |
|
"loss": 0.5128, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.7073684210526317e-05, |
|
"loss": 0.6828, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.705263157894737e-05, |
|
"loss": 0.5969, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.7031578947368423e-05, |
|
"loss": 0.5967, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.7010526315789475e-05, |
|
"loss": 0.6486, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.6989473684210528e-05, |
|
"loss": 0.6343, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 1.696842105263158e-05, |
|
"loss": 0.6137, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.6947368421052633e-05, |
|
"loss": 0.5584, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.6926315789473686e-05, |
|
"loss": 0.5924, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.690526315789474e-05, |
|
"loss": 0.5658, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.688421052631579e-05, |
|
"loss": 0.6455, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.6863157894736844e-05, |
|
"loss": 0.7716, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 0.6834, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 0.6163671612739563, |
|
"eval_runtime": 38.1399, |
|
"eval_samples_per_second": 26.219, |
|
"eval_steps_per_second": 26.219, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 1.682105263157895e-05, |
|
"loss": 0.655, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.625, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.6942, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.6778947368421054e-05, |
|
"loss": 0.6266, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.6757894736842107e-05, |
|
"loss": 0.6633, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.625, |
|
"learning_rate": 1.673684210526316e-05, |
|
"loss": 0.6354, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.6715789473684212e-05, |
|
"loss": 0.6723, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.6694736842105265e-05, |
|
"loss": 0.677, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.6673684210526318e-05, |
|
"loss": 0.6329, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.665263157894737e-05, |
|
"loss": 0.6193, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.6631578947368423e-05, |
|
"loss": 0.6225, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.75, |
|
"learning_rate": 1.6610526315789476e-05, |
|
"loss": 0.6449, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.658947368421053e-05, |
|
"loss": 0.5844, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.656842105263158e-05, |
|
"loss": 0.6374, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.6547368421052634e-05, |
|
"loss": 0.6147, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.6526315789473686e-05, |
|
"loss": 0.6639, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.650526315789474e-05, |
|
"loss": 0.6715, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.648421052631579e-05, |
|
"loss": 0.718, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.6463157894736844e-05, |
|
"loss": 0.7205, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.6442105263157897e-05, |
|
"loss": 0.6613, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 1.642105263157895e-05, |
|
"loss": 0.7353, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.5747, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.6378947368421055e-05, |
|
"loss": 0.4669, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.6357894736842108e-05, |
|
"loss": 0.5741, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.633684210526316e-05, |
|
"loss": 0.6338, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.6315789473684213e-05, |
|
"loss": 0.5429, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.6294736842105265e-05, |
|
"loss": 0.6671, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.6273684210526318e-05, |
|
"loss": 0.681, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 10.125, |
|
"learning_rate": 1.6252631578947367e-05, |
|
"loss": 0.5757, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.6231578947368423e-05, |
|
"loss": 0.559, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.6210526315789473e-05, |
|
"loss": 0.6697, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.618947368421053e-05, |
|
"loss": 0.6065, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.616842105263158e-05, |
|
"loss": 0.6676, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.6147368421052634e-05, |
|
"loss": 0.5642, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.6126315789473687e-05, |
|
"loss": 0.6657, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.6105263157894736e-05, |
|
"loss": 0.6927, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 1.6084210526315792e-05, |
|
"loss": 0.6291, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.606315789473684e-05, |
|
"loss": 0.728, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 1.6042105263157897e-05, |
|
"loss": 0.621, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 1.6021052631578947e-05, |
|
"loss": 0.6176, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.6321, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.25, |
|
"learning_rate": 1.5978947368421055e-05, |
|
"loss": 0.6174, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 1.5957894736842105e-05, |
|
"loss": 0.5024, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.593684210526316e-05, |
|
"loss": 0.5771, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.591578947368421e-05, |
|
"loss": 0.6005, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.5894736842105266e-05, |
|
"loss": 0.6532, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.5873684210526315e-05, |
|
"loss": 0.7415, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.585263157894737e-05, |
|
"loss": 0.7431, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 1.5831578947368424e-05, |
|
"loss": 0.4914, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.5810526315789473e-05, |
|
"loss": 0.5853, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 0.7048, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 0.637090802192688, |
|
"eval_runtime": 38.18, |
|
"eval_samples_per_second": 26.192, |
|
"eval_steps_per_second": 26.192, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.576842105263158e-05, |
|
"loss": 0.6227, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.5747368421052635e-05, |
|
"loss": 0.5951, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.5726315789473684e-05, |
|
"loss": 0.6014, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.570526315789474e-05, |
|
"loss": 0.6057, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.568421052631579e-05, |
|
"loss": 0.6255, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.5663157894736842e-05, |
|
"loss": 0.6768, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.5642105263157898e-05, |
|
"loss": 0.6953, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.5621052631578947e-05, |
|
"loss": 0.6471, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.623, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.5578947368421052e-05, |
|
"loss": 0.6352, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.555789473684211e-05, |
|
"loss": 0.478, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.5536842105263158e-05, |
|
"loss": 0.553, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.551578947368421e-05, |
|
"loss": 0.5354, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.0, |
|
"learning_rate": 1.5494736842105263e-05, |
|
"loss": 0.6313, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.5473684210526316e-05, |
|
"loss": 0.6313, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.545263157894737e-05, |
|
"loss": 0.5761, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.543157894736842e-05, |
|
"loss": 0.6076, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.5410526315789477e-05, |
|
"loss": 0.5827, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.5389473684210526e-05, |
|
"loss": 0.5284, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.536842105263158e-05, |
|
"loss": 0.6868, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.534736842105263e-05, |
|
"loss": 0.6856, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.5326315789473684e-05, |
|
"loss": 0.6687, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.530526315789474e-05, |
|
"loss": 0.5599, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.528421052631579e-05, |
|
"loss": 0.5339, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.5263157894736846e-05, |
|
"loss": 0.5145, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.75, |
|
"learning_rate": 1.5242105263157897e-05, |
|
"loss": 0.559, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.5221052631578948e-05, |
|
"loss": 0.6415, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.6152, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 1.5178947368421053e-05, |
|
"loss": 0.5812, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.5157894736842107e-05, |
|
"loss": 0.5348, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.5136842105263158e-05, |
|
"loss": 0.5899, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.5115789473684212e-05, |
|
"loss": 0.5686, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.5094736842105263e-05, |
|
"loss": 0.4602, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.5073684210526316e-05, |
|
"loss": 0.6561, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.75, |
|
"learning_rate": 1.505263157894737e-05, |
|
"loss": 0.5948, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.5031578947368421e-05, |
|
"loss": 0.6053, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.5010526315789476e-05, |
|
"loss": 0.5915, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.4989473684210527e-05, |
|
"loss": 0.6329, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.4968421052631581e-05, |
|
"loss": 0.6849, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 1.4947368421052632e-05, |
|
"loss": 0.6583, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.25, |
|
"learning_rate": 1.4926315789473686e-05, |
|
"loss": 0.6508, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.4905263157894739e-05, |
|
"loss": 0.5244, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.488421052631579e-05, |
|
"loss": 0.6146, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.4863157894736844e-05, |
|
"loss": 0.6245, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.4842105263157895e-05, |
|
"loss": 0.6862, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.482105263157895e-05, |
|
"loss": 0.5638, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.4932, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.4778947368421055e-05, |
|
"loss": 0.626, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.4757894736842106e-05, |
|
"loss": 0.5843, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 0.5773, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 0.613276481628418, |
|
"eval_runtime": 38.1471, |
|
"eval_samples_per_second": 26.214, |
|
"eval_steps_per_second": 26.214, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.4715789473684213e-05, |
|
"loss": 0.475, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 1.4694736842105264e-05, |
|
"loss": 0.592, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.4673684210526318e-05, |
|
"loss": 0.6149, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.465263157894737e-05, |
|
"loss": 0.5379, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.4631578947368424e-05, |
|
"loss": 0.5233, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.4610526315789474e-05, |
|
"loss": 0.6428, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 1.4589473684210527e-05, |
|
"loss": 0.56, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 1.456842105263158e-05, |
|
"loss": 0.6643, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 1.4547368421052632e-05, |
|
"loss": 0.6606, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.4526315789473687e-05, |
|
"loss": 0.6734, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 1.4505263157894738e-05, |
|
"loss": 0.6948, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 1.4484210526315792e-05, |
|
"loss": 0.6735, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.4463157894736843e-05, |
|
"loss": 0.6107, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.4442105263157896e-05, |
|
"loss": 0.5645, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.4421052631578948e-05, |
|
"loss": 0.6415, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.4961, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.4378947368421054e-05, |
|
"loss": 0.609, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.4357894736842106e-05, |
|
"loss": 0.6139, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.433684210526316e-05, |
|
"loss": 0.701, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.4315789473684212e-05, |
|
"loss": 0.5645, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.0, |
|
"learning_rate": 1.4294736842105263e-05, |
|
"loss": 0.6149, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.4273684210526317e-05, |
|
"loss": 0.6544, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 1.425263157894737e-05, |
|
"loss": 0.7131, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.4231578947368422e-05, |
|
"loss": 0.5974, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.4210526315789475e-05, |
|
"loss": 0.5418, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.418947368421053e-05, |
|
"loss": 0.5797, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.416842105263158e-05, |
|
"loss": 0.5284, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.4147368421052631e-05, |
|
"loss": 0.6928, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.4126315789473686e-05, |
|
"loss": 0.5994, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.4105263157894738e-05, |
|
"loss": 0.4793, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.4084210526315791e-05, |
|
"loss": 0.6549, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.4063157894736844e-05, |
|
"loss": 0.6774, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.4042105263157896e-05, |
|
"loss": 0.7274, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.4021052631578949e-05, |
|
"loss": 0.625, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.6704, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.3978947368421054e-05, |
|
"loss": 0.717, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.3957894736842105e-05, |
|
"loss": 0.5063, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.393684210526316e-05, |
|
"loss": 0.5717, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.3915789473684212e-05, |
|
"loss": 0.5619, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.75, |
|
"learning_rate": 1.3894736842105265e-05, |
|
"loss": 0.5526, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.3873684210526317e-05, |
|
"loss": 0.5975, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.3852631578947368e-05, |
|
"loss": 0.6742, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.3831578947368423e-05, |
|
"loss": 0.5027, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.3810526315789474e-05, |
|
"loss": 0.6286, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.3789473684210528e-05, |
|
"loss": 0.5871, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.3768421052631579e-05, |
|
"loss": 0.5191, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 1.3747368421052633e-05, |
|
"loss": 0.6679, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 42.5, |
|
"learning_rate": 1.3726315789473686e-05, |
|
"loss": 0.6627, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.3705263157894737e-05, |
|
"loss": 0.5305, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 0.6408, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 0.6289076209068298, |
|
"eval_runtime": 38.1752, |
|
"eval_samples_per_second": 26.195, |
|
"eval_steps_per_second": 26.195, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.3663157894736842e-05, |
|
"loss": 0.6025, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.875, |
|
"learning_rate": 1.3642105263157897e-05, |
|
"loss": 0.6137, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.3621052631578948e-05, |
|
"loss": 0.5813, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.4769, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.3578947368421055e-05, |
|
"loss": 0.7036, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.3557894736842106e-05, |
|
"loss": 0.6237, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 1.353684210526316e-05, |
|
"loss": 0.5743, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.3515789473684211e-05, |
|
"loss": 0.6427, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 12.5, |
|
"learning_rate": 1.3494736842105265e-05, |
|
"loss": 0.5552, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.3473684210526316e-05, |
|
"loss": 0.6391, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.345263157894737e-05, |
|
"loss": 0.5062, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.3431578947368421e-05, |
|
"loss": 0.6708, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.3410526315789474e-05, |
|
"loss": 0.5664, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.3389473684210528e-05, |
|
"loss": 0.6353, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0, |
|
"learning_rate": 1.336842105263158e-05, |
|
"loss": 0.7542, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.3347368421052634e-05, |
|
"loss": 0.5818, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.3326315789473685e-05, |
|
"loss": 0.5839, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.3305263157894739e-05, |
|
"loss": 0.5676, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.328421052631579e-05, |
|
"loss": 0.6292, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.3263157894736843e-05, |
|
"loss": 0.5808, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3242105263157895e-05, |
|
"loss": 0.7451, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.3221052631578948e-05, |
|
"loss": 0.4822, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.5966, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.3178947368421053e-05, |
|
"loss": 0.6074, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.3157894736842108e-05, |
|
"loss": 0.5465, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.3136842105263159e-05, |
|
"loss": 0.6226, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.3115789473684211e-05, |
|
"loss": 0.6031, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 1.3094736842105264e-05, |
|
"loss": 0.6693, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.3073684210526317e-05, |
|
"loss": 0.6126, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.305263157894737e-05, |
|
"loss": 0.615, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 1.3031578947368422e-05, |
|
"loss": 0.6213, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 1.3010526315789476e-05, |
|
"loss": 0.5922, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 1.2989473684210527e-05, |
|
"loss": 0.5783, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.2968421052631578e-05, |
|
"loss": 0.5909, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.2947368421052633e-05, |
|
"loss": 0.6543, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.2926315789473685e-05, |
|
"loss": 0.5652, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 11.75, |
|
"learning_rate": 1.2905263157894738e-05, |
|
"loss": 0.6078, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.288421052631579e-05, |
|
"loss": 0.6246, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.2863157894736845e-05, |
|
"loss": 0.5988, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.2842105263157896e-05, |
|
"loss": 0.7113, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 1.2821052631578947e-05, |
|
"loss": 0.5497, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.6188, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.2778947368421054e-05, |
|
"loss": 0.6139, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 0.6298, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.2736842105263159e-05, |
|
"loss": 0.5542, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.2715789473684212e-05, |
|
"loss": 0.4915, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.2694736842105264e-05, |
|
"loss": 0.6733, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.2673684210526315e-05, |
|
"loss": 0.5441, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.265263157894737e-05, |
|
"loss": 0.6336, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.5396, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 0.6335766315460205, |
|
"eval_runtime": 38.1481, |
|
"eval_samples_per_second": 26.214, |
|
"eval_steps_per_second": 26.214, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.2610526315789475e-05, |
|
"loss": 0.5444, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.2589473684210528e-05, |
|
"loss": 0.5945, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.256842105263158e-05, |
|
"loss": 0.5489, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.2547368421052633e-05, |
|
"loss": 0.6619, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.2526315789473684e-05, |
|
"loss": 0.6392, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 1.2505263157894738e-05, |
|
"loss": 0.6275, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.248421052631579e-05, |
|
"loss": 0.6209, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.2463157894736844e-05, |
|
"loss": 0.6417, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.2442105263157895e-05, |
|
"loss": 0.5294, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.2421052631578949e-05, |
|
"loss": 0.7045, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.5133, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.2378947368421053e-05, |
|
"loss": 0.5812, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.2357894736842107e-05, |
|
"loss": 0.5628, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.2336842105263158e-05, |
|
"loss": 0.5895, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.2315789473684212e-05, |
|
"loss": 0.6542, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 1.2294736842105263e-05, |
|
"loss": 0.6245, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.2273684210526317e-05, |
|
"loss": 0.6157, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.225263157894737e-05, |
|
"loss": 0.602, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.2231578947368421e-05, |
|
"loss": 0.6496, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.2210526315789475e-05, |
|
"loss": 0.6249, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.2189473684210526e-05, |
|
"loss": 0.6666, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.216842105263158e-05, |
|
"loss": 0.6132, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.2147368421052632e-05, |
|
"loss": 0.5799, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.2126315789473686e-05, |
|
"loss": 0.6745, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.2105263157894737e-05, |
|
"loss": 0.6618, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.208421052631579e-05, |
|
"loss": 0.5125, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.75, |
|
"learning_rate": 1.2063157894736844e-05, |
|
"loss": 0.6907, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.2042105263157895e-05, |
|
"loss": 0.5876, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.202105263157895e-05, |
|
"loss": 0.5825, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.624, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.1978947368421055e-05, |
|
"loss": 0.6119, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 1.1957894736842106e-05, |
|
"loss": 0.6331, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.1936842105263158e-05, |
|
"loss": 0.5641, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.1915789473684211e-05, |
|
"loss": 0.4946, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.1894736842105264e-05, |
|
"loss": 0.5547, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.1873684210526318e-05, |
|
"loss": 0.8141, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 1.1852631578947369e-05, |
|
"loss": 0.5756, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.1831578947368423e-05, |
|
"loss": 0.5353, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.1810526315789474e-05, |
|
"loss": 0.7419, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.1789473684210527e-05, |
|
"loss": 0.72, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.176842105263158e-05, |
|
"loss": 0.6064, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.1747368421052632e-05, |
|
"loss": 0.5641, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.1726315789473685e-05, |
|
"loss": 0.6017, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.1705263157894737e-05, |
|
"loss": 0.5863, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.1684210526315792e-05, |
|
"loss": 0.6267, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.1663157894736843e-05, |
|
"loss": 0.5873, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.25, |
|
"learning_rate": 1.1642105263157897e-05, |
|
"loss": 0.5499, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.1621052631578948e-05, |
|
"loss": 0.621, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.5808, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 0.7199, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"eval_loss": 0.6083159446716309, |
|
"eval_runtime": 38.1826, |
|
"eval_samples_per_second": 26.19, |
|
"eval_steps_per_second": 26.19, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.1557894736842106e-05, |
|
"loss": 0.5804, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.153684210526316e-05, |
|
"loss": 0.7276, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.1515789473684211e-05, |
|
"loss": 0.6052, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 14.6875, |
|
"learning_rate": 1.1494736842105266e-05, |
|
"loss": 0.6354, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.1473684210526317e-05, |
|
"loss": 0.7017, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 23.875, |
|
"learning_rate": 1.145263157894737e-05, |
|
"loss": 0.6074, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.5, |
|
"learning_rate": 1.1431578947368422e-05, |
|
"loss": 0.395, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.1410526315789475e-05, |
|
"loss": 0.5674, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 1.1389473684210527e-05, |
|
"loss": 0.5567, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.136842105263158e-05, |
|
"loss": 0.607, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.1347368421052634e-05, |
|
"loss": 0.6487, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.1326315789473685e-05, |
|
"loss": 0.5341, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 1.1305263157894736e-05, |
|
"loss": 0.6164, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.128421052631579e-05, |
|
"loss": 0.4827, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 1.1263157894736843e-05, |
|
"loss": 0.647, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 1.1242105263157896e-05, |
|
"loss": 0.5681, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.1221052631578949e-05, |
|
"loss": 0.5795, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.6383, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.1178947368421054e-05, |
|
"loss": 0.5925, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 10.625, |
|
"learning_rate": 1.1157894736842105e-05, |
|
"loss": 0.5283, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.1136842105263159e-05, |
|
"loss": 0.7346, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 1.111578947368421e-05, |
|
"loss": 0.6452, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.1094736842105264e-05, |
|
"loss": 0.6553, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.1073684210526317e-05, |
|
"loss": 0.5828, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.105263157894737e-05, |
|
"loss": 0.5143, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.1031578947368422e-05, |
|
"loss": 0.6772, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.1010526315789473e-05, |
|
"loss": 0.6578, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.0989473684210528e-05, |
|
"loss": 0.5947, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.0968421052631579e-05, |
|
"loss": 0.6039, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.0947368421052633e-05, |
|
"loss": 0.6658, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.0926315789473686e-05, |
|
"loss": 0.4581, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.0905263157894738e-05, |
|
"loss": 0.5935, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.0884210526315791e-05, |
|
"loss": 0.6028, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.0863157894736842e-05, |
|
"loss": 0.6113, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.0842105263157896e-05, |
|
"loss": 0.575, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.0821052631578947e-05, |
|
"loss": 0.5985, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.643, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 1.0778947368421053e-05, |
|
"loss": 0.6964, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.0757894736842107e-05, |
|
"loss": 0.655, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.073684210526316e-05, |
|
"loss": 0.6014, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.071578947368421e-05, |
|
"loss": 0.6139, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.0694736842105265e-05, |
|
"loss": 0.5815, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.0673684210526316e-05, |
|
"loss": 0.6578, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.065263157894737e-05, |
|
"loss": 0.5326, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.0631578947368421e-05, |
|
"loss": 0.5406, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.0610526315789476e-05, |
|
"loss": 0.6739, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.0589473684210526e-05, |
|
"loss": 0.5474, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.0568421052631579e-05, |
|
"loss": 0.5629, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.0547368421052633e-05, |
|
"loss": 0.7017, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 0.4835, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 0.6027244925498962, |
|
"eval_runtime": 38.1439, |
|
"eval_samples_per_second": 26.216, |
|
"eval_steps_per_second": 26.216, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 1.0505263157894739e-05, |
|
"loss": 0.7573, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.048421052631579e-05, |
|
"loss": 0.5092, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.9375, |
|
"learning_rate": 1.0463157894736844e-05, |
|
"loss": 0.642, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.0442105263157895e-05, |
|
"loss": 0.6185, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.0421052631578948e-05, |
|
"loss": 0.6183, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.5496, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.0378947368421053e-05, |
|
"loss": 0.5861, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 1.0357894736842107e-05, |
|
"loss": 0.6365, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.0336842105263158e-05, |
|
"loss": 0.6688, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 1.0315789473684213e-05, |
|
"loss": 0.5361, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.0294736842105264e-05, |
|
"loss": 0.6006, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.0273684210526316e-05, |
|
"loss": 0.6353, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.0252631578947369e-05, |
|
"loss": 0.6475, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.0231578947368422e-05, |
|
"loss": 0.498, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.0210526315789476e-05, |
|
"loss": 0.6467, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.0189473684210527e-05, |
|
"loss": 0.611, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 9.75, |
|
"learning_rate": 1.0168421052631581e-05, |
|
"loss": 0.5392, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 13.25, |
|
"learning_rate": 1.0147368421052632e-05, |
|
"loss": 0.645, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.0126315789473685e-05, |
|
"loss": 0.6287, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.0105263157894738e-05, |
|
"loss": 0.5719, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.008421052631579e-05, |
|
"loss": 0.621, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.0063157894736843e-05, |
|
"loss": 0.5169, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.0042105263157896e-05, |
|
"loss": 0.6793, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 117.0, |
|
"learning_rate": 1.002105263157895e-05, |
|
"loss": 0.6005, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6674, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 9.978947368421053e-06, |
|
"loss": 0.6105, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.75, |
|
"learning_rate": 9.957894736842106e-06, |
|
"loss": 0.5876, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 9.936842105263159e-06, |
|
"loss": 0.5551, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 9.915789473684211e-06, |
|
"loss": 0.5699, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 9.894736842105264e-06, |
|
"loss": 0.5652, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 9.873684210526317e-06, |
|
"loss": 0.6494, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 9.85263157894737e-06, |
|
"loss": 0.6, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 9.831578947368422e-06, |
|
"loss": 0.603, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 9.810526315789475e-06, |
|
"loss": 0.6117, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.625, |
|
"learning_rate": 9.789473684210527e-06, |
|
"loss": 0.5883, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 9.76842105263158e-06, |
|
"loss": 0.5722, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 9.747368421052633e-06, |
|
"loss": 0.5692, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 9.726315789473685e-06, |
|
"loss": 0.7724, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.25, |
|
"learning_rate": 9.705263157894738e-06, |
|
"loss": 0.6017, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 9.68421052631579e-06, |
|
"loss": 0.6049, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.663157894736843e-06, |
|
"loss": 0.5158, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 9.642105263157896e-06, |
|
"loss": 0.6487, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 9.621052631578947e-06, |
|
"loss": 0.5599, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.5035, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 9.578947368421054e-06, |
|
"loss": 0.5768, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 48.25, |
|
"learning_rate": 9.557894736842107e-06, |
|
"loss": 0.5815, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 9.53684210526316e-06, |
|
"loss": 0.6133, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.515789473684212e-06, |
|
"loss": 0.5997, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 9.494736842105265e-06, |
|
"loss": 0.5689, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.6511, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 0.6153059005737305, |
|
"eval_runtime": 38.2125, |
|
"eval_samples_per_second": 26.169, |
|
"eval_steps_per_second": 26.169, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 9.452631578947368e-06, |
|
"loss": 0.5449, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 9.43157894736842e-06, |
|
"loss": 0.6634, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.875, |
|
"learning_rate": 9.410526315789475e-06, |
|
"loss": 0.6222, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 9.389473684210528e-06, |
|
"loss": 0.5707, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 9.36842105263158e-06, |
|
"loss": 0.4959, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 9.347368421052633e-06, |
|
"loss": 0.5941, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 9.326315789473684e-06, |
|
"loss": 0.6101, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 9.305263157894737e-06, |
|
"loss": 0.5916, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.28421052631579e-06, |
|
"loss": 0.5927, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 9.263157894736842e-06, |
|
"loss": 0.5575, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.242105263157896e-06, |
|
"loss": 0.5709, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.5, |
|
"learning_rate": 9.221052631578949e-06, |
|
"loss": 0.6237, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.25, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.6183, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 9.178947368421053e-06, |
|
"loss": 0.579, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 9.157894736842105e-06, |
|
"loss": 0.5801, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 9.136842105263158e-06, |
|
"loss": 0.5568, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 9.11578947368421e-06, |
|
"loss": 0.6277, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.125, |
|
"learning_rate": 9.094736842105263e-06, |
|
"loss": 0.5189, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.073684210526316e-06, |
|
"loss": 0.6304, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.05263157894737e-06, |
|
"loss": 0.5956, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 9.031578947368423e-06, |
|
"loss": 0.6483, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 9.010526315789474e-06, |
|
"loss": 0.601, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.5, |
|
"learning_rate": 8.989473684210527e-06, |
|
"loss": 0.5887, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 8.96842105263158e-06, |
|
"loss": 0.63, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 0.5988, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 8.926315789473685e-06, |
|
"loss": 0.5692, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 8.905263157894737e-06, |
|
"loss": 0.5917, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 8.884210526315792e-06, |
|
"loss": 0.565, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 8.863157894736842e-06, |
|
"loss": 0.5388, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.875, |
|
"learning_rate": 8.842105263157895e-06, |
|
"loss": 0.6622, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.25, |
|
"learning_rate": 8.821052631578948e-06, |
|
"loss": 0.6043, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.6431, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 8.778947368421053e-06, |
|
"loss": 0.563, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 8.757894736842106e-06, |
|
"loss": 0.6342, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 8.736842105263158e-06, |
|
"loss": 0.6206, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 8.715789473684211e-06, |
|
"loss": 0.5046, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 13.75, |
|
"learning_rate": 8.694736842105264e-06, |
|
"loss": 0.5405, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 8.673684210526316e-06, |
|
"loss": 0.6021, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.0, |
|
"learning_rate": 8.652631578947369e-06, |
|
"loss": 0.7437, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 8.631578947368422e-06, |
|
"loss": 0.6618, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 8.610526315789474e-06, |
|
"loss": 0.6088, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 8.589473684210527e-06, |
|
"loss": 0.5808, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.25, |
|
"learning_rate": 8.56842105263158e-06, |
|
"loss": 0.6744, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 8.547368421052632e-06, |
|
"loss": 0.5432, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 8.526315789473685e-06, |
|
"loss": 0.6601, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 8.505263157894738e-06, |
|
"loss": 0.4648, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 8.48421052631579e-06, |
|
"loss": 0.5452, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 8.463157894736843e-06, |
|
"loss": 0.5761, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 8.442105263157896e-06, |
|
"loss": 0.5394, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.6339, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"eval_loss": 0.6167545318603516, |
|
"eval_runtime": 38.2022, |
|
"eval_samples_per_second": 26.177, |
|
"eval_steps_per_second": 26.177, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.5043, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 8.378947368421054e-06, |
|
"loss": 0.6012, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 8.357894736842106e-06, |
|
"loss": 0.5963, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 8.336842105263159e-06, |
|
"loss": 0.6281, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 8.315789473684212e-06, |
|
"loss": 0.4643, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 8.294736842105264e-06, |
|
"loss": 0.6371, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 8.273684210526317e-06, |
|
"loss": 0.5769, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 8.25263157894737e-06, |
|
"loss": 0.7226, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.375, |
|
"learning_rate": 8.231578947368422e-06, |
|
"loss": 0.541, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 8.210526315789475e-06, |
|
"loss": 0.5965, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 8.189473684210527e-06, |
|
"loss": 0.5999, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 8.16842105263158e-06, |
|
"loss": 0.6388, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 8.147368421052633e-06, |
|
"loss": 0.5696, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 8.126315789473684e-06, |
|
"loss": 0.4886, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 8.105263157894736e-06, |
|
"loss": 0.5406, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 8.08421052631579e-06, |
|
"loss": 0.6134, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.875, |
|
"learning_rate": 8.063157894736843e-06, |
|
"loss": 0.6944, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 8.042105263157896e-06, |
|
"loss": 0.5848, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 8.021052631578949e-06, |
|
"loss": 0.5441, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.678, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.625, |
|
"learning_rate": 7.978947368421052e-06, |
|
"loss": 0.639, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 7.957894736842105e-06, |
|
"loss": 0.6824, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 7.936842105263158e-06, |
|
"loss": 0.6219, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 7.915789473684212e-06, |
|
"loss": 0.6267, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 7.894736842105265e-06, |
|
"loss": 0.5348, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.873684210526317e-06, |
|
"loss": 0.6523, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.25, |
|
"learning_rate": 7.85263157894737e-06, |
|
"loss": 0.6024, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.875, |
|
"learning_rate": 7.831578947368421e-06, |
|
"loss": 0.5896, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 7.810526315789474e-06, |
|
"loss": 0.7023, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 7.789473684210526e-06, |
|
"loss": 0.5793, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.375, |
|
"learning_rate": 7.768421052631579e-06, |
|
"loss": 0.6332, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 7.747368421052631e-06, |
|
"loss": 0.5124, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.726315789473686e-06, |
|
"loss": 0.6329, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.0, |
|
"learning_rate": 7.705263157894738e-06, |
|
"loss": 0.7025, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 7.68421052631579e-06, |
|
"loss": 0.5197, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 7.663157894736842e-06, |
|
"loss": 0.6173, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 7.642105263157895e-06, |
|
"loss": 0.5852, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 7.621052631578948e-06, |
|
"loss": 0.5688, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.5457, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 7.578947368421054e-06, |
|
"loss": 0.6564, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.557894736842106e-06, |
|
"loss": 0.5177, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 7.536842105263158e-06, |
|
"loss": 0.6287, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 7.515789473684211e-06, |
|
"loss": 0.6026, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 7.494736842105263e-06, |
|
"loss": 0.5795, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 7.473684210526316e-06, |
|
"loss": 0.6848, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 7.4526315789473695e-06, |
|
"loss": 0.4139, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.0, |
|
"learning_rate": 7.431578947368422e-06, |
|
"loss": 0.5696, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 7.410526315789475e-06, |
|
"loss": 0.6651, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 7.3894736842105275e-06, |
|
"loss": 0.6557, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 0.5214, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 0.6133315563201904, |
|
"eval_runtime": 38.1535, |
|
"eval_samples_per_second": 26.21, |
|
"eval_steps_per_second": 26.21, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 7.347368421052632e-06, |
|
"loss": 0.6287, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.75, |
|
"learning_rate": 7.326315789473685e-06, |
|
"loss": 0.6126, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 7.305263157894737e-06, |
|
"loss": 0.68, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 7.28421052631579e-06, |
|
"loss": 0.4831, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 7.263157894736843e-06, |
|
"loss": 0.6983, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 7.242105263157896e-06, |
|
"loss": 0.5214, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 7.221052631578948e-06, |
|
"loss": 0.5789, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.6369, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 7.178947368421053e-06, |
|
"loss": 0.608, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.25, |
|
"learning_rate": 7.157894736842106e-06, |
|
"loss": 0.6514, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 7.1368421052631585e-06, |
|
"loss": 0.4964, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 7.115789473684211e-06, |
|
"loss": 0.6726, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 7.094736842105265e-06, |
|
"loss": 0.636, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 7.073684210526316e-06, |
|
"loss": 0.69, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.375, |
|
"learning_rate": 7.052631578947369e-06, |
|
"loss": 0.5622, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 7.031578947368422e-06, |
|
"loss": 0.5597, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 7.010526315789474e-06, |
|
"loss": 0.6246, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 6.989473684210527e-06, |
|
"loss": 0.5107, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 6.96842105263158e-06, |
|
"loss": 0.646, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 6.947368421052632e-06, |
|
"loss": 0.659, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 6.926315789473684e-06, |
|
"loss": 0.4536, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 6.905263157894737e-06, |
|
"loss": 0.6416, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 6.8842105263157895e-06, |
|
"loss": 0.6546, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 6.863157894736843e-06, |
|
"loss": 0.6834, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 6.842105263157896e-06, |
|
"loss": 0.6329, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.75, |
|
"learning_rate": 6.821052631578948e-06, |
|
"loss": 0.5559, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.5685, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 6.778947368421053e-06, |
|
"loss": 0.6151, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 6.7578947368421054e-06, |
|
"loss": 0.6197, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 6.736842105263158e-06, |
|
"loss": 0.565, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 6.715789473684211e-06, |
|
"loss": 0.5766, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 6.694736842105264e-06, |
|
"loss": 0.5622, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.125, |
|
"learning_rate": 6.673684210526317e-06, |
|
"loss": 0.6691, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 9.375, |
|
"learning_rate": 6.6526315789473695e-06, |
|
"loss": 0.5872, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 6.631578947368421e-06, |
|
"loss": 0.5883, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 6.610526315789474e-06, |
|
"loss": 0.6339, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 6.589473684210527e-06, |
|
"loss": 0.612, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 6.568421052631579e-06, |
|
"loss": 0.5187, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 6.547368421052632e-06, |
|
"loss": 0.5306, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 6.526315789473685e-06, |
|
"loss": 0.5485, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 6.505263157894738e-06, |
|
"loss": 0.6115, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 6.484210526315789e-06, |
|
"loss": 0.5446, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 6.463157894736843e-06, |
|
"loss": 0.6284, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 6.442105263157895e-06, |
|
"loss": 0.526, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 6.421052631578948e-06, |
|
"loss": 0.6324, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.575, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 6.378947368421053e-06, |
|
"loss": 0.5664, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.75, |
|
"learning_rate": 6.357894736842106e-06, |
|
"loss": 0.5779, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 6.336842105263158e-06, |
|
"loss": 0.5258, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 0.5891, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 0.6335042119026184, |
|
"eval_runtime": 38.1328, |
|
"eval_samples_per_second": 26.224, |
|
"eval_steps_per_second": 26.224, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 6.294736842105264e-06, |
|
"loss": 0.524, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 6.2736842105263165e-06, |
|
"loss": 0.6672, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.252631578947369e-06, |
|
"loss": 0.5906, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 6.231578947368422e-06, |
|
"loss": 0.6795, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 6.2105263157894745e-06, |
|
"loss": 0.574, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 6.189473684210526e-06, |
|
"loss": 0.6008, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 6.168421052631579e-06, |
|
"loss": 0.5116, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 6.1473684210526316e-06, |
|
"loss": 0.6047, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.126315789473685e-06, |
|
"loss": 0.3864, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 6.105263157894738e-06, |
|
"loss": 0.5869, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.875, |
|
"learning_rate": 6.08421052631579e-06, |
|
"loss": 0.5377, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 6.063157894736843e-06, |
|
"loss": 0.5649, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.875, |
|
"learning_rate": 6.042105263157895e-06, |
|
"loss": 0.5955, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 6.0210526315789475e-06, |
|
"loss": 0.5352, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.625, |
|
"learning_rate": 6e-06, |
|
"loss": 0.5801, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.375, |
|
"learning_rate": 5.978947368421053e-06, |
|
"loss": 0.4599, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 5.9578947368421055e-06, |
|
"loss": 0.5337, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 5.936842105263159e-06, |
|
"loss": 0.5677, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 5.915789473684212e-06, |
|
"loss": 0.6926, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 5.8947368421052634e-06, |
|
"loss": 0.6243, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.5, |
|
"learning_rate": 5.873684210526316e-06, |
|
"loss": 0.5837, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 5.852631578947369e-06, |
|
"loss": 0.6096, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 5.831578947368421e-06, |
|
"loss": 0.5598, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 5.810526315789474e-06, |
|
"loss": 0.5448, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 5.789473684210527e-06, |
|
"loss": 0.5487, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 5.76842105263158e-06, |
|
"loss": 0.5739, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 5.747368421052633e-06, |
|
"loss": 0.6278, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 5.726315789473685e-06, |
|
"loss": 0.5801, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 5.705263157894737e-06, |
|
"loss": 0.4992, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 5.68421052631579e-06, |
|
"loss": 0.607, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.0, |
|
"learning_rate": 5.663157894736843e-06, |
|
"loss": 0.5189, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 5.642105263157895e-06, |
|
"loss": 0.6886, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 5.621052631578948e-06, |
|
"loss": 0.6127, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.875, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.625, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 16.25, |
|
"learning_rate": 5.578947368421052e-06, |
|
"loss": 0.537, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 5.557894736842105e-06, |
|
"loss": 0.5933, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 13.375, |
|
"learning_rate": 5.5368421052631586e-06, |
|
"loss": 0.6001, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.75, |
|
"learning_rate": 5.515789473684211e-06, |
|
"loss": 0.6809, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 5.494736842105264e-06, |
|
"loss": 0.5997, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 5.4736842105263165e-06, |
|
"loss": 0.6418, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 5.452631578947369e-06, |
|
"loss": 0.5285, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 5.431578947368421e-06, |
|
"loss": 0.6465, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 5.410526315789474e-06, |
|
"loss": 0.6697, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 5.389473684210526e-06, |
|
"loss": 0.5943, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.875, |
|
"learning_rate": 5.36842105263158e-06, |
|
"loss": 0.6122, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 5.3473684210526325e-06, |
|
"loss": 0.5825, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 27.75, |
|
"learning_rate": 5.326315789473685e-06, |
|
"loss": 0.6151, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 5.305263157894738e-06, |
|
"loss": 0.621, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 5.2842105263157896e-06, |
|
"loss": 0.5397, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 0.6428, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 0.6119253039360046, |
|
"eval_runtime": 38.1677, |
|
"eval_samples_per_second": 26.2, |
|
"eval_steps_per_second": 26.2, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 5.242105263157895e-06, |
|
"loss": 0.5938, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 5.2210526315789475e-06, |
|
"loss": 0.6286, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.7065, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 5.178947368421054e-06, |
|
"loss": 0.5862, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.5, |
|
"learning_rate": 5.157894736842106e-06, |
|
"loss": 0.5015, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 5.136842105263158e-06, |
|
"loss": 0.6041, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 5.115789473684211e-06, |
|
"loss": 0.6845, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 5.0947368421052635e-06, |
|
"loss": 0.698, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 5.073684210526316e-06, |
|
"loss": 0.4452, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 5.052631578947369e-06, |
|
"loss": 0.6485, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 5.0315789473684214e-06, |
|
"loss": 0.5703, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 5.010526315789475e-06, |
|
"loss": 0.5871, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 4.989473684210527e-06, |
|
"loss": 0.6024, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 4.968421052631579e-06, |
|
"loss": 0.5723, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 4.947368421052632e-06, |
|
"loss": 0.584, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 4.926315789473685e-06, |
|
"loss": 0.6387, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 4.905263157894737e-06, |
|
"loss": 0.6574, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.88421052631579e-06, |
|
"loss": 0.5907, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.5, |
|
"learning_rate": 4.863157894736843e-06, |
|
"loss": 0.6319, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 4.842105263157895e-06, |
|
"loss": 0.5053, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 4.821052631578948e-06, |
|
"loss": 0.5135, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 17.375, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.6893, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 4.778947368421053e-06, |
|
"loss": 0.6257, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 4.757894736842106e-06, |
|
"loss": 0.5808, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 0.518, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 4.71578947368421e-06, |
|
"loss": 0.5991, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 4.694736842105264e-06, |
|
"loss": 0.6514, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 4.6736842105263166e-06, |
|
"loss": 0.489, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 4.652631578947368e-06, |
|
"loss": 0.5181, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.631578947368421e-06, |
|
"loss": 0.5767, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 4.6105263157894745e-06, |
|
"loss": 0.4968, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 4.589473684210526e-06, |
|
"loss": 0.6562, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.568421052631579e-06, |
|
"loss": 0.6273, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.75, |
|
"learning_rate": 4.547368421052632e-06, |
|
"loss": 0.6351, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 4.526315789473685e-06, |
|
"loss": 0.5495, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 4.505263157894737e-06, |
|
"loss": 0.6603, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.25, |
|
"learning_rate": 4.48421052631579e-06, |
|
"loss": 0.5525, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 4.463157894736842e-06, |
|
"loss": 0.6219, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 4.442105263157896e-06, |
|
"loss": 0.5518, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 4.4210526315789476e-06, |
|
"loss": 0.5462, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.5776, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 4.378947368421053e-06, |
|
"loss": 0.4804, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.3578947368421055e-06, |
|
"loss": 0.5858, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.336842105263158e-06, |
|
"loss": 0.6847, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 4.315789473684211e-06, |
|
"loss": 0.6626, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 4.2947368421052635e-06, |
|
"loss": 0.5526, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.625, |
|
"learning_rate": 4.273684210526316e-06, |
|
"loss": 0.5661, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 4.252631578947369e-06, |
|
"loss": 0.6182, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 4.2315789473684215e-06, |
|
"loss": 0.629, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 0.7233, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.6029936671257019, |
|
"eval_runtime": 38.1479, |
|
"eval_samples_per_second": 26.214, |
|
"eval_steps_per_second": 26.214, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 4.189473684210527e-06, |
|
"loss": 0.5451, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.0, |
|
"learning_rate": 4.1684210526315794e-06, |
|
"loss": 0.5498, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.875, |
|
"learning_rate": 4.147368421052632e-06, |
|
"loss": 0.5518, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 4.126315789473685e-06, |
|
"loss": 0.6088, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 41.0, |
|
"learning_rate": 4.105263157894737e-06, |
|
"loss": 0.6399, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.08421052631579e-06, |
|
"loss": 0.5412, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 4.063157894736842e-06, |
|
"loss": 0.4933, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 4.042105263157895e-06, |
|
"loss": 0.6182, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.021052631578948e-06, |
|
"loss": 0.5342, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5838, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 3.9789473684210525e-06, |
|
"loss": 0.6717, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.0, |
|
"learning_rate": 3.957894736842106e-06, |
|
"loss": 0.6484, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 3.936842105263159e-06, |
|
"loss": 0.6253, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 3.9157894736842104e-06, |
|
"loss": 0.5562, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 3.894736842105263e-06, |
|
"loss": 0.6162, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 3.873684210526316e-06, |
|
"loss": 0.5602, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 3.852631578947369e-06, |
|
"loss": 0.5414, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 3.831578947368421e-06, |
|
"loss": 0.5783, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 3.810526315789474e-06, |
|
"loss": 0.583, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 3.789473684210527e-06, |
|
"loss": 0.632, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.625, |
|
"learning_rate": 3.768421052631579e-06, |
|
"loss": 0.5573, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 3.7473684210526317e-06, |
|
"loss": 0.5176, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 3.7263157894736848e-06, |
|
"loss": 0.4684, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 3.7052631578947374e-06, |
|
"loss": 0.6114, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 3.6842105263157896e-06, |
|
"loss": 0.5437, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 3.6631578947368423e-06, |
|
"loss": 0.5119, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 3.642105263157895e-06, |
|
"loss": 0.5742, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 3.621052631578948e-06, |
|
"loss": 0.6704, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.25, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.5655, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 3.578947368421053e-06, |
|
"loss": 0.5382, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.875, |
|
"learning_rate": 3.5578947368421056e-06, |
|
"loss": 0.5485, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 3.536842105263158e-06, |
|
"loss": 0.5124, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.625, |
|
"learning_rate": 3.515789473684211e-06, |
|
"loss": 0.4819, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 3.4947368421052635e-06, |
|
"loss": 0.5999, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 3.473684210526316e-06, |
|
"loss": 0.5241, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 3.4526315789473684e-06, |
|
"loss": 0.6193, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 3.4315789473684215e-06, |
|
"loss": 0.6701, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 3.410526315789474e-06, |
|
"loss": 0.4458, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 3.3894736842105264e-06, |
|
"loss": 0.5441, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.368421052631579e-06, |
|
"loss": 0.5787, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 3.347368421052632e-06, |
|
"loss": 0.5972, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 3.3263157894736848e-06, |
|
"loss": 0.5784, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 3.305263157894737e-06, |
|
"loss": 0.6005, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.875, |
|
"learning_rate": 3.2842105263157897e-06, |
|
"loss": 0.5343, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.0, |
|
"learning_rate": 3.2631578947368423e-06, |
|
"loss": 0.5407, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 3.2421052631578945e-06, |
|
"loss": 0.5047, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 3.2210526315789476e-06, |
|
"loss": 0.4705, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.5838, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 3.178947368421053e-06, |
|
"loss": 0.547, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 0.6311, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_loss": 0.6170388460159302, |
|
"eval_runtime": 38.1693, |
|
"eval_samples_per_second": 26.199, |
|
"eval_steps_per_second": 26.199, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 3.1368421052631582e-06, |
|
"loss": 0.6415, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 3.115789473684211e-06, |
|
"loss": 0.5877, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 3.094736842105263e-06, |
|
"loss": 0.5951, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.0736842105263158e-06, |
|
"loss": 0.6047, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 3.052631578947369e-06, |
|
"loss": 0.5694, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.0315789473684215e-06, |
|
"loss": 0.6126, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.75, |
|
"learning_rate": 3.0105263157894737e-06, |
|
"loss": 0.521, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 2.9894736842105264e-06, |
|
"loss": 0.6389, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.75, |
|
"learning_rate": 2.9684210526315795e-06, |
|
"loss": 0.5909, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 2.9473684210526317e-06, |
|
"loss": 0.6578, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.0, |
|
"learning_rate": 2.9263157894736844e-06, |
|
"loss": 0.5277, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 2.905263157894737e-06, |
|
"loss": 0.5313, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 2.88421052631579e-06, |
|
"loss": 0.5201, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.0, |
|
"learning_rate": 2.8631578947368423e-06, |
|
"loss": 0.7751, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 2.842105263157895e-06, |
|
"loss": 0.6536, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 2.8210526315789476e-06, |
|
"loss": 0.6661, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.6284, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 2.7789473684210525e-06, |
|
"loss": 0.673, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.7578947368421056e-06, |
|
"loss": 0.646, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 2.7368421052631583e-06, |
|
"loss": 0.5917, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 2.7157894736842105e-06, |
|
"loss": 0.5706, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 2.694736842105263e-06, |
|
"loss": 0.477, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.875, |
|
"learning_rate": 2.6736842105263162e-06, |
|
"loss": 0.646, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.75, |
|
"learning_rate": 2.652631578947369e-06, |
|
"loss": 0.638, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 0.6282, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 2.6105263157894738e-06, |
|
"loss": 0.6418, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 2.589473684210527e-06, |
|
"loss": 0.6228, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.568421052631579e-06, |
|
"loss": 0.5794, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 2.5473684210526317e-06, |
|
"loss": 0.5207, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.75, |
|
"learning_rate": 2.5263157894736844e-06, |
|
"loss": 0.623, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 2.5052631578947375e-06, |
|
"loss": 0.6086, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 2.4842105263157897e-06, |
|
"loss": 0.579, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 2.4631578947368424e-06, |
|
"loss": 0.4531, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.75, |
|
"learning_rate": 2.442105263157895e-06, |
|
"loss": 0.5167, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 2.4210526315789477e-06, |
|
"loss": 0.6362, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.6628, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 2.378947368421053e-06, |
|
"loss": 0.722, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 2.357894736842105e-06, |
|
"loss": 0.5313, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.25, |
|
"learning_rate": 2.3368421052631583e-06, |
|
"loss": 0.6277, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.0, |
|
"learning_rate": 2.3157894736842105e-06, |
|
"loss": 0.5938, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 2.294736842105263e-06, |
|
"loss": 0.6851, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.0, |
|
"learning_rate": 2.273684210526316e-06, |
|
"loss": 0.4746, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.0, |
|
"learning_rate": 2.2526315789473685e-06, |
|
"loss": 0.5109, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 2.231578947368421e-06, |
|
"loss": 0.4732, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 2.2105263157894738e-06, |
|
"loss": 0.7736, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 2.1894736842105264e-06, |
|
"loss": 0.6251, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.75, |
|
"learning_rate": 2.168421052631579e-06, |
|
"loss": 0.6428, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 2.1473684210526317e-06, |
|
"loss": 0.5886, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.1263157894736844e-06, |
|
"loss": 0.597, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 0.527, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"eval_loss": 0.6106051802635193, |
|
"eval_runtime": 38.1285, |
|
"eval_samples_per_second": 26.227, |
|
"eval_steps_per_second": 26.227, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.375, |
|
"learning_rate": 2.0842105263157897e-06, |
|
"loss": 0.5752, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 2.0631578947368424e-06, |
|
"loss": 0.5578, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 2.042105263157895e-06, |
|
"loss": 0.6715, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 2.0210526315789477e-06, |
|
"loss": 0.5559, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.5839, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.978947368421053e-06, |
|
"loss": 0.6546, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.9578947368421052e-06, |
|
"loss": 0.4581, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.936842105263158e-06, |
|
"loss": 0.6111, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.9157894736842105e-06, |
|
"loss": 0.569, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.25, |
|
"learning_rate": 1.8947368421052634e-06, |
|
"loss": 0.6089, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.8736842105263158e-06, |
|
"loss": 0.5164, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.8526315789473687e-06, |
|
"loss": 0.5259, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.8315789473684211e-06, |
|
"loss": 0.6318, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.810526315789474e-06, |
|
"loss": 0.5954, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.7894736842105265e-06, |
|
"loss": 0.5146, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.768421052631579e-06, |
|
"loss": 0.6194, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.7473684210526318e-06, |
|
"loss": 0.5031, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.7263157894736842e-06, |
|
"loss": 0.5199, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 12.75, |
|
"learning_rate": 1.705263157894737e-06, |
|
"loss": 0.6261, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 1.6842105263157895e-06, |
|
"loss": 0.6349, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.6631578947368424e-06, |
|
"loss": 0.5244, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.6421052631578948e-06, |
|
"loss": 0.5428, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.625, |
|
"learning_rate": 1.6210526315789473e-06, |
|
"loss": 0.6296, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.5923, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 0.6146, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 1.5578947368421054e-06, |
|
"loss": 0.6558, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.5368421052631579e-06, |
|
"loss": 0.5426, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.5157894736842108e-06, |
|
"loss": 0.611, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.4947368421052632e-06, |
|
"loss": 0.5013, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.4736842105263159e-06, |
|
"loss": 0.599, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.4526315789473685e-06, |
|
"loss": 0.5549, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.4315789473684212e-06, |
|
"loss": 0.5597, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.4105263157894738e-06, |
|
"loss": 0.5402, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.3894736842105263e-06, |
|
"loss": 0.56, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.3684210526315791e-06, |
|
"loss": 0.5991, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.3473684210526316e-06, |
|
"loss": 0.499, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3263157894736844e-06, |
|
"loss": 0.5946, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.3052631578947369e-06, |
|
"loss": 0.678, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.2842105263157895e-06, |
|
"loss": 0.7289, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 1.2631578947368422e-06, |
|
"loss": 0.6926, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 1.2421052631578948e-06, |
|
"loss": 0.6079, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.2210526315789475e-06, |
|
"loss": 0.6642, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.5805, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.1789473684210526e-06, |
|
"loss": 0.564, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.1578947368421053e-06, |
|
"loss": 0.571, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.136842105263158e-06, |
|
"loss": 0.538, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 1.1157894736842106e-06, |
|
"loss": 0.5777, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.0947368421052632e-06, |
|
"loss": 0.683, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.0736842105263159e-06, |
|
"loss": 0.5143, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 0.4899, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 0.6154074668884277, |
|
"eval_runtime": 38.1761, |
|
"eval_samples_per_second": 26.194, |
|
"eval_steps_per_second": 26.194, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.0315789473684212e-06, |
|
"loss": 0.7389, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.0105263157894738e-06, |
|
"loss": 0.5922, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 9.894736842105265e-07, |
|
"loss": 0.5658, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 9.68421052631579e-07, |
|
"loss": 0.608, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 9.473684210526317e-07, |
|
"loss": 0.5337, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 9.263157894736844e-07, |
|
"loss": 0.5736, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 9.05263157894737e-07, |
|
"loss": 0.5319, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 8.842105263157895e-07, |
|
"loss": 0.5011, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 8.631578947368421e-07, |
|
"loss": 0.6734, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.625, |
|
"learning_rate": 8.421052631578948e-07, |
|
"loss": 0.5045, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 8.210526315789474e-07, |
|
"loss": 0.5957, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.6944, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 7.789473684210527e-07, |
|
"loss": 0.5249, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 7.578947368421054e-07, |
|
"loss": 0.5981, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.25, |
|
"learning_rate": 7.368421052631579e-07, |
|
"loss": 0.633, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 7.157894736842106e-07, |
|
"loss": 0.5535, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 6.947368421052631e-07, |
|
"loss": 0.6233, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 6.736842105263158e-07, |
|
"loss": 0.5603, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 6.526315789473684e-07, |
|
"loss": 0.5977, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.25, |
|
"learning_rate": 6.315789473684211e-07, |
|
"loss": 0.5737, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 6.105263157894738e-07, |
|
"loss": 0.5797, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 5.894736842105263e-07, |
|
"loss": 0.6256, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 5.68421052631579e-07, |
|
"loss": 0.4984, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 5.473684210526316e-07, |
|
"loss": 0.6489, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.263157894736843e-07, |
|
"loss": 0.5204, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 5.052631578947369e-07, |
|
"loss": 0.6044, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 4.842105263157895e-07, |
|
"loss": 0.6061, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 4.631578947368422e-07, |
|
"loss": 0.5491, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 4.421052631578947e-07, |
|
"loss": 0.5567, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 4.210526315789474e-07, |
|
"loss": 0.6661, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.672, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 3.789473684210527e-07, |
|
"loss": 0.5887, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 3.578947368421053e-07, |
|
"loss": 0.4978, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 3.368421052631579e-07, |
|
"loss": 0.5254, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 3.1578947368421055e-07, |
|
"loss": 0.5738, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 2.9473684210526315e-07, |
|
"loss": 0.5846, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 2.736842105263158e-07, |
|
"loss": 0.6591, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.0, |
|
"learning_rate": 2.5263157894736846e-07, |
|
"loss": 0.6697, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.25, |
|
"learning_rate": 2.315789473684211e-07, |
|
"loss": 0.4831, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 2.105263157894737e-07, |
|
"loss": 0.5504, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 1.8947368421052634e-07, |
|
"loss": 0.5576, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.6842105263157895e-07, |
|
"loss": 0.4733, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 1.4736842105263158e-07, |
|
"loss": 0.5077, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.2631578947368423e-07, |
|
"loss": 0.5068, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.0526315789473685e-07, |
|
"loss": 0.4754, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 8.421052631578947e-08, |
|
"loss": 0.5751, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 6.315789473684211e-08, |
|
"loss": 0.5133, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 4.2105263157894737e-08, |
|
"loss": 0.6084, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 16.5, |
|
"learning_rate": 2.1052631578947368e-08, |
|
"loss": 0.6159, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 0.0, |
|
"loss": 0.6108, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.6137469410896301, |
|
"eval_runtime": 38.1278, |
|
"eval_samples_per_second": 26.228, |
|
"eval_steps_per_second": 26.228, |
|
"step": 10000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2500, |
|
"total_flos": 1.5733698330624e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|