|
{ |
|
"best_metric": 0.018313532695174217, |
|
"best_model_checkpoint": "runs/deepseek_lora_20240423-133229/checkpoint-2000", |
|
"epoch": 1.2532309861361322, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.5461480617523193, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.5239, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.797250747680664, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.4966, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.274460315704346, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.4788, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.9981650114059448, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.4001, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2322547435760498, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.4683, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.250878930091858, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.1961, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.917982578277588, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.4261, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.885340929031372, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.2742, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.170296311378479, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.253, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.992724895477295, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2521, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.9540568590164185, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.0828, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9779148697853088, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.1255, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2271130084991455, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.9496, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.1939122676849365, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.953, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.5918376445770264, |
|
"learning_rate": 6e-06, |
|
"loss": 0.9348, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.772602379322052, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.7762, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8226258158683777, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.6798, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.0945135354995728, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.5936, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6972842812538147, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.4161, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.9560859203338623, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3959, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4297197759151459, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.3166, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4494841992855072, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.2056, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6981032490730286, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.3024, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5783653259277344, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.4392, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.33407163619995117, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3708, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3096323311328888, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.1637, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.6187375783920288, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.1078, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.27954748272895813, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.2668, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4082590937614441, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.1843, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2993428707122803, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.1969, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.0335004329681396, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.122, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.0972365066409111, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.1354, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.26203760504722595, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.17, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.179258346557617, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.1409, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.032190825790166855, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.076, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.02556140348315239, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0468, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.667189121246338, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.1345, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.07244217395782471, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0411, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.11255475878715515, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.1907, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9602845907211304, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0717, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.013551376760005951, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0808, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.14421305060386658, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.0604, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.01705547235906124, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.1742, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.021360402926802635, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0886, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.04369840770959854, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.1063, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.2816083431243896, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.1061, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.00989863183349371, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.2317, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.014644264243543148, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.1054, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.13385985791683197, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.1196, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.099637985229492, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1584, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 0.05546905845403671, |
|
"eval_runtime": 62.1757, |
|
"eval_samples_per_second": 16.083, |
|
"eval_steps_per_second": 16.083, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.013259253464639187, |
|
"learning_rate": 1.9955555555555557e-05, |
|
"loss": 0.0373, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1753215193748474, |
|
"learning_rate": 1.9911111111111112e-05, |
|
"loss": 0.0986, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.006998735014349222, |
|
"learning_rate": 1.9866666666666667e-05, |
|
"loss": 0.1372, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3990759253501892, |
|
"learning_rate": 1.9822222222222226e-05, |
|
"loss": 0.127, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8248735666275024, |
|
"learning_rate": 1.977777777777778e-05, |
|
"loss": 0.1239, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.005832708440721035, |
|
"learning_rate": 1.9733333333333336e-05, |
|
"loss": 0.1181, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.10705375671386719, |
|
"learning_rate": 1.968888888888889e-05, |
|
"loss": 0.0972, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.7429046630859375, |
|
"learning_rate": 1.9644444444444447e-05, |
|
"loss": 0.1543, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.010058793239295483, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.1146, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.012124909088015556, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 0.1582, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.012700803577899933, |
|
"learning_rate": 1.9511111111111113e-05, |
|
"loss": 0.0711, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2153177261352539, |
|
"learning_rate": 1.9466666666666668e-05, |
|
"loss": 0.0452, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.01289213914424181, |
|
"learning_rate": 1.9422222222222223e-05, |
|
"loss": 0.067, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.007021366152912378, |
|
"learning_rate": 1.9377777777777778e-05, |
|
"loss": 0.0776, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.005274974275380373, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 0.2159, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.6235843896865845, |
|
"learning_rate": 1.928888888888889e-05, |
|
"loss": 0.0664, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.009710369631648064, |
|
"learning_rate": 1.9244444444444444e-05, |
|
"loss": 0.1125, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0918051227927208, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.0755, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.012988925911486149, |
|
"learning_rate": 1.9155555555555558e-05, |
|
"loss": 0.095, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.09063850343227386, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 0.0419, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.1540229320526123, |
|
"learning_rate": 1.9066666666666668e-05, |
|
"loss": 0.0338, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.009780442342162132, |
|
"learning_rate": 1.9022222222222223e-05, |
|
"loss": 0.0935, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.011248274706304073, |
|
"learning_rate": 1.897777777777778e-05, |
|
"loss": 0.017, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.03275947645306587, |
|
"learning_rate": 1.8933333333333334e-05, |
|
"loss": 0.1128, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.224815607070923, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 0.1213, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.9777376651763916, |
|
"learning_rate": 1.8844444444444444e-05, |
|
"loss": 0.1128, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.6119561195373535, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.0557, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.7092978954315186, |
|
"learning_rate": 1.8755555555555558e-05, |
|
"loss": 0.1196, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.08959495276212692, |
|
"learning_rate": 1.8711111111111113e-05, |
|
"loss": 0.0231, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.716322183609009, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 0.1253, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.006216592621058226, |
|
"learning_rate": 1.8622222222222224e-05, |
|
"loss": 0.0377, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.005503402091562748, |
|
"learning_rate": 1.857777777777778e-05, |
|
"loss": 0.1072, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4769572913646698, |
|
"learning_rate": 1.8533333333333334e-05, |
|
"loss": 0.0888, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.034854818135499954, |
|
"learning_rate": 1.848888888888889e-05, |
|
"loss": 0.0424, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.00536635797470808, |
|
"learning_rate": 1.8444444444444448e-05, |
|
"loss": 0.0161, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.002790963975712657, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.025, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.003568426240235567, |
|
"learning_rate": 1.835555555555556e-05, |
|
"loss": 0.0597, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.005008562933653593, |
|
"learning_rate": 1.8311111111111114e-05, |
|
"loss": 0.1817, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.006756258197128773, |
|
"learning_rate": 1.826666666666667e-05, |
|
"loss": 0.0084, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.3732385635375977, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 0.072, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.003554440103471279, |
|
"learning_rate": 1.817777777777778e-05, |
|
"loss": 0.0507, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.06785377860069275, |
|
"learning_rate": 1.8133333333333335e-05, |
|
"loss": 0.0428, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.009546133689582348, |
|
"learning_rate": 1.808888888888889e-05, |
|
"loss": 0.0179, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.595461368560791, |
|
"learning_rate": 1.8044444444444445e-05, |
|
"loss": 0.0506, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.310696601867676, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0719, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.8953649997711182, |
|
"learning_rate": 1.7955555555555556e-05, |
|
"loss": 0.1718, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.706213116645813, |
|
"learning_rate": 1.791111111111111e-05, |
|
"loss": 0.0807, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.06022366136312485, |
|
"learning_rate": 1.7866666666666666e-05, |
|
"loss": 0.0792, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.033781811594963074, |
|
"learning_rate": 1.782222222222222e-05, |
|
"loss": 0.0864, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.014879009686410427, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 0.0527, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 0.03033074364066124, |
|
"eval_runtime": 62.1716, |
|
"eval_samples_per_second": 16.085, |
|
"eval_steps_per_second": 16.085, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.003431650809943676, |
|
"learning_rate": 1.7733333333333335e-05, |
|
"loss": 0.0425, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.005604119040071964, |
|
"learning_rate": 1.768888888888889e-05, |
|
"loss": 0.0727, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.058546032756567, |
|
"learning_rate": 1.7644444444444446e-05, |
|
"loss": 0.1304, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0267212390899658, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0496, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.5381667613983154, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 0.0935, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.006851164624094963, |
|
"learning_rate": 1.751111111111111e-05, |
|
"loss": 0.0344, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.005180824548006058, |
|
"learning_rate": 1.7466666666666667e-05, |
|
"loss": 0.0195, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.586403489112854, |
|
"learning_rate": 1.7422222222222222e-05, |
|
"loss": 0.0868, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.14490236341953278, |
|
"learning_rate": 1.737777777777778e-05, |
|
"loss": 0.0489, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.005752285942435265, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 0.0625, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8000261187553406, |
|
"learning_rate": 1.728888888888889e-05, |
|
"loss": 0.0923, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.024827931076288223, |
|
"learning_rate": 1.7244444444444446e-05, |
|
"loss": 0.0355, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.424243927001953, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.0659, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.4055392742156982, |
|
"learning_rate": 1.7155555555555557e-05, |
|
"loss": 0.0681, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0077874124981462955, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 0.106, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.006299824919551611, |
|
"learning_rate": 1.706666666666667e-05, |
|
"loss": 0.0438, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.004886701237410307, |
|
"learning_rate": 1.7022222222222226e-05, |
|
"loss": 0.0403, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.0028305843006819487, |
|
"learning_rate": 1.697777777777778e-05, |
|
"loss": 0.0299, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.36411717534065247, |
|
"learning_rate": 1.6933333333333336e-05, |
|
"loss": 0.0639, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.004984024912118912, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 0.1363, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.4117214679718018, |
|
"learning_rate": 1.6844444444444447e-05, |
|
"loss": 0.018, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.2183905839920044, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.0062, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.05338621512055397, |
|
"learning_rate": 1.6755555555555557e-05, |
|
"loss": 0.0343, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.009272832423448563, |
|
"learning_rate": 1.6711111111111112e-05, |
|
"loss": 0.073, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.0033628540113568306, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.0272, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.055562809109687805, |
|
"learning_rate": 1.6622222222222223e-05, |
|
"loss": 0.0821, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.6196753978729248, |
|
"learning_rate": 1.6577777777777778e-05, |
|
"loss": 0.0153, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.004278264008462429, |
|
"learning_rate": 1.6533333333333333e-05, |
|
"loss": 0.022, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.044023897498846054, |
|
"learning_rate": 1.648888888888889e-05, |
|
"loss": 0.0652, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.08157920837402344, |
|
"learning_rate": 1.6444444444444444e-05, |
|
"loss": 0.0745, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.1145012378692627, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0497, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.00495466822758317, |
|
"learning_rate": 1.6355555555555557e-05, |
|
"loss": 0.0688, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.055875778198242, |
|
"learning_rate": 1.6311111111111113e-05, |
|
"loss": 0.0456, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.260204315185547, |
|
"learning_rate": 1.6266666666666668e-05, |
|
"loss": 0.0968, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.00300275138579309, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 0.0031, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6148861050605774, |
|
"learning_rate": 1.617777777777778e-05, |
|
"loss": 0.1008, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.005163553170859814, |
|
"learning_rate": 1.6133333333333334e-05, |
|
"loss": 0.0454, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.313544988632202, |
|
"learning_rate": 1.608888888888889e-05, |
|
"loss": 0.0843, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.061028964817523956, |
|
"learning_rate": 1.6044444444444444e-05, |
|
"loss": 0.0467, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.011183706112205982, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0539, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.00391357159242034, |
|
"learning_rate": 1.5955555555555558e-05, |
|
"loss": 0.007, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.21193568408489227, |
|
"learning_rate": 1.5911111111111113e-05, |
|
"loss": 0.0723, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.344232201576233, |
|
"learning_rate": 1.586666666666667e-05, |
|
"loss": 0.0093, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.125556707382202, |
|
"learning_rate": 1.5822222222222224e-05, |
|
"loss": 0.1089, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.9358251094818115, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 0.129, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.25835612416267395, |
|
"learning_rate": 1.5733333333333334e-05, |
|
"loss": 0.0225, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.004413430579006672, |
|
"learning_rate": 1.5688888888888893e-05, |
|
"loss": 0.0184, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.6624996662139893, |
|
"learning_rate": 1.5644444444444448e-05, |
|
"loss": 0.0924, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.023669052869081497, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.0468, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9958231449127197, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 0.0602, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 0.023086324334144592, |
|
"eval_runtime": 62.044, |
|
"eval_samples_per_second": 16.118, |
|
"eval_steps_per_second": 16.118, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0457289032638073, |
|
"learning_rate": 1.5511111111111114e-05, |
|
"loss": 0.0725, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.0417847633361816, |
|
"learning_rate": 1.546666666666667e-05, |
|
"loss": 0.0446, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.007491718512028456, |
|
"learning_rate": 1.5422222222222224e-05, |
|
"loss": 0.0693, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0023406276013702154, |
|
"learning_rate": 1.537777777777778e-05, |
|
"loss": 0.0398, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.03808426856994629, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 0.0827, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.04878142476081848, |
|
"learning_rate": 1.528888888888889e-05, |
|
"loss": 0.0094, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.002342456951737404, |
|
"learning_rate": 1.5244444444444447e-05, |
|
"loss": 0.0308, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.002332750940695405, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0703, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.027806026861071587, |
|
"learning_rate": 1.5155555555555557e-05, |
|
"loss": 0.0577, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.853770136833191, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 0.0872, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.02809343859553337, |
|
"learning_rate": 1.5066666666666668e-05, |
|
"loss": 0.0651, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.29635584354400635, |
|
"learning_rate": 1.5022222222222223e-05, |
|
"loss": 0.0784, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.006036743987351656, |
|
"learning_rate": 1.497777777777778e-05, |
|
"loss": 0.0702, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0020562768913805485, |
|
"learning_rate": 1.4933333333333335e-05, |
|
"loss": 0.1133, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.0853707790374756, |
|
"learning_rate": 1.488888888888889e-05, |
|
"loss": 0.0717, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.024653434753418, |
|
"learning_rate": 1.4844444444444445e-05, |
|
"loss": 0.0978, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0029632914811372757, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.0269, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.2619192600250244, |
|
"learning_rate": 1.4755555555555556e-05, |
|
"loss": 0.088, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9590596556663513, |
|
"learning_rate": 1.4711111111111111e-05, |
|
"loss": 0.0348, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.01058279536664486, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 0.0473, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.378236293792725, |
|
"learning_rate": 1.4622222222222225e-05, |
|
"loss": 0.0839, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.04728490114212036, |
|
"learning_rate": 1.457777777777778e-05, |
|
"loss": 0.1131, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.03082725591957569, |
|
"learning_rate": 1.4533333333333335e-05, |
|
"loss": 0.058, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4159008264541626, |
|
"learning_rate": 1.448888888888889e-05, |
|
"loss": 0.027, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.09299231320619583, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 0.0152, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0018398365937173367, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0005, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0032970942556858063, |
|
"learning_rate": 1.4355555555555556e-05, |
|
"loss": 0.068, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.9869989156723022, |
|
"learning_rate": 1.4311111111111111e-05, |
|
"loss": 0.0802, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.4349244236946106, |
|
"learning_rate": 1.4266666666666668e-05, |
|
"loss": 0.0022, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 6.195014476776123, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 0.075, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.658276557922363, |
|
"learning_rate": 1.4177777777777779e-05, |
|
"loss": 0.0341, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.3556618690490723, |
|
"learning_rate": 1.4133333333333334e-05, |
|
"loss": 0.0735, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.00235918490216136, |
|
"learning_rate": 1.408888888888889e-05, |
|
"loss": 0.0133, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0019710322376340628, |
|
"learning_rate": 1.4044444444444445e-05, |
|
"loss": 0.0714, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0038756057620048523, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.058, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.13870251178741455, |
|
"learning_rate": 1.3955555555555558e-05, |
|
"loss": 0.0598, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0035449981223791838, |
|
"learning_rate": 1.3911111111111114e-05, |
|
"loss": 0.0825, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.005458540748804808, |
|
"learning_rate": 1.3866666666666669e-05, |
|
"loss": 0.0485, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0037788543850183487, |
|
"learning_rate": 1.3822222222222224e-05, |
|
"loss": 0.0279, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.4127650260925293, |
|
"learning_rate": 1.377777777777778e-05, |
|
"loss": 0.0414, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.9315361976623535, |
|
"learning_rate": 1.3733333333333335e-05, |
|
"loss": 0.0541, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.001827953034080565, |
|
"learning_rate": 1.368888888888889e-05, |
|
"loss": 0.0607, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.0561037063598633, |
|
"learning_rate": 1.3644444444444445e-05, |
|
"loss": 0.0489, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.7366918325424194, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.0107, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.0025562909431755543, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 0.0305, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.006456757429987192, |
|
"learning_rate": 1.3511111111111112e-05, |
|
"loss": 0.0741, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.003276415169239044, |
|
"learning_rate": 1.3466666666666668e-05, |
|
"loss": 0.0276, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.0036139176227152348, |
|
"learning_rate": 1.3422222222222223e-05, |
|
"loss": 0.0886, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.006649693939834833, |
|
"learning_rate": 1.3377777777777778e-05, |
|
"loss": 0.0179, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.002991742454469204, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.0372, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 0.018313532695174217, |
|
"eval_runtime": 62.0835, |
|
"eval_samples_per_second": 16.107, |
|
"eval_steps_per_second": 16.107, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.0025979657657444477, |
|
"learning_rate": 1.3288888888888889e-05, |
|
"loss": 0.0485, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.00765944505110383, |
|
"learning_rate": 1.3244444444444447e-05, |
|
"loss": 0.0292, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9524846076965332, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.0545, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.0015762520488351583, |
|
"learning_rate": 1.3155555555555558e-05, |
|
"loss": 0.0003, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12439166754484177, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 0.0346, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.002841875422745943, |
|
"learning_rate": 1.3066666666666668e-05, |
|
"loss": 0.0785, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3388820886611938, |
|
"learning_rate": 1.3022222222222223e-05, |
|
"loss": 0.0748, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.003183074528351426, |
|
"learning_rate": 1.2977777777777779e-05, |
|
"loss": 0.0653, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0637716054916382, |
|
"learning_rate": 1.2933333333333334e-05, |
|
"loss": 0.0522, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.1531606912612915, |
|
"learning_rate": 1.288888888888889e-05, |
|
"loss": 0.0968, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.0031077053863555193, |
|
"learning_rate": 1.2844444444444446e-05, |
|
"loss": 0.0419, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.011700381524860859, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.0525, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0015547301154583693, |
|
"learning_rate": 1.2755555555555556e-05, |
|
"loss": 0.0473, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0020142821595072746, |
|
"learning_rate": 1.2711111111111112e-05, |
|
"loss": 0.044, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.005582904908806086, |
|
"learning_rate": 1.2666666666666667e-05, |
|
"loss": 0.0789, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0064437431283295155, |
|
"learning_rate": 1.2622222222222222e-05, |
|
"loss": 0.099, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0012363777495920658, |
|
"learning_rate": 1.257777777777778e-05, |
|
"loss": 0.0411, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.14603936672210693, |
|
"learning_rate": 1.2533333333333336e-05, |
|
"loss": 0.006, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.001298425835557282, |
|
"learning_rate": 1.2488888888888891e-05, |
|
"loss": 0.1263, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.001547910855151713, |
|
"learning_rate": 1.2444444444444446e-05, |
|
"loss": 0.0342, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0013805734924972057, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.0824, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.123624563217163, |
|
"learning_rate": 1.2355555555555557e-05, |
|
"loss": 0.0202, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.0014894693158566952, |
|
"learning_rate": 1.2311111111111112e-05, |
|
"loss": 0.0313, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.194322109222412, |
|
"learning_rate": 1.2266666666666667e-05, |
|
"loss": 0.123, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.4329869747161865, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 0.0619, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.0048148720525205135, |
|
"learning_rate": 1.217777777777778e-05, |
|
"loss": 0.0105, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8393607139587402, |
|
"learning_rate": 1.2133333333333335e-05, |
|
"loss": 0.0763, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.0013370034284889698, |
|
"learning_rate": 1.208888888888889e-05, |
|
"loss": 0.0263, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0011019165394827724, |
|
"learning_rate": 1.2044444444444445e-05, |
|
"loss": 0.0451, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0017341958591714501, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.0612, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0012070784578099847, |
|
"learning_rate": 1.1955555555555556e-05, |
|
"loss": 0.0383, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.004143261816352606, |
|
"learning_rate": 1.191111111111111e-05, |
|
"loss": 0.0523, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.0022921899799257517, |
|
"learning_rate": 1.186666666666667e-05, |
|
"loss": 0.054, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.052677869796753, |
|
"learning_rate": 1.1822222222222225e-05, |
|
"loss": 0.0329, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5296663641929626, |
|
"learning_rate": 1.177777777777778e-05, |
|
"loss": 0.075, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.001526731881313026, |
|
"learning_rate": 1.1733333333333335e-05, |
|
"loss": 0.0386, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.293755531311035, |
|
"learning_rate": 1.168888888888889e-05, |
|
"loss": 0.0576, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.002120732795447111, |
|
"learning_rate": 1.1644444444444446e-05, |
|
"loss": 0.0398, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.001393395708873868, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.0335, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.001357159111648798, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 0.0186, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.845559597015381, |
|
"learning_rate": 1.1511111111111113e-05, |
|
"loss": 0.0231, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.001933318912051618, |
|
"learning_rate": 1.1466666666666668e-05, |
|
"loss": 0.0572, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.08605944365262985, |
|
"learning_rate": 1.1422222222222223e-05, |
|
"loss": 0.0361, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.6697850227355957, |
|
"learning_rate": 1.1377777777777779e-05, |
|
"loss": 0.1098, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.6600360870361328, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 0.0599, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.8962271213531494, |
|
"learning_rate": 1.1288888888888889e-05, |
|
"loss": 0.0333, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.3499470055103302, |
|
"learning_rate": 1.1244444444444444e-05, |
|
"loss": 0.0276, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0011357500916346908, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.0662, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9571266174316406, |
|
"learning_rate": 1.1155555555555556e-05, |
|
"loss": 0.0403, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0018001727294176817, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 0.0549, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 0.019961679354310036, |
|
"eval_runtime": 62.0505, |
|
"eval_samples_per_second": 16.116, |
|
"eval_steps_per_second": 16.116, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.002932826289907098, |
|
"learning_rate": 1.1066666666666669e-05, |
|
"loss": 0.0785, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.0028327910695225, |
|
"learning_rate": 1.1022222222222224e-05, |
|
"loss": 0.0283, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.9044318199157715, |
|
"learning_rate": 1.0977777777777779e-05, |
|
"loss": 0.1637, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0019232028862461448, |
|
"learning_rate": 1.0933333333333334e-05, |
|
"loss": 0.004, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.002454034984111786, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 0.0091, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0015948887448757887, |
|
"learning_rate": 1.0844444444444446e-05, |
|
"loss": 0.0276, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.5870649814605713, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.0262, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.2778539657592773, |
|
"learning_rate": 1.0755555555555557e-05, |
|
"loss": 0.0424, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.6451878547668457, |
|
"learning_rate": 1.0711111111111112e-05, |
|
"loss": 0.0679, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.001484246808104217, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 0.0373, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.001308325445279479, |
|
"learning_rate": 1.0622222222222223e-05, |
|
"loss": 0.0191, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.0027939951978623867, |
|
"learning_rate": 1.0577777777777778e-05, |
|
"loss": 0.0459, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.009125540032982826, |
|
"learning_rate": 1.0533333333333333e-05, |
|
"loss": 0.0704, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.0012461950536817312, |
|
"learning_rate": 1.048888888888889e-05, |
|
"loss": 0.0463, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.906261920928955, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 0.0923, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.149310827255249, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.0558, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.07791195064783096, |
|
"learning_rate": 1.0355555555555557e-05, |
|
"loss": 0.0148, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0020532067865133286, |
|
"learning_rate": 1.0311111111111113e-05, |
|
"loss": 0.062, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0010498109040781856, |
|
"learning_rate": 1.0266666666666668e-05, |
|
"loss": 0.0577, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1568715125322342, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 0.0025, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.4488503932952881, |
|
"learning_rate": 1.0177777777777778e-05, |
|
"loss": 0.0395, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.2777860164642334, |
|
"learning_rate": 1.0133333333333335e-05, |
|
"loss": 0.0293, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.08910083770751953, |
|
"learning_rate": 1.008888888888889e-05, |
|
"loss": 0.0319, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0010414342395961285, |
|
"learning_rate": 1.0044444444444446e-05, |
|
"loss": 0.0082, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0031285579316318035, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0415, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.005843820981681347, |
|
"learning_rate": 9.955555555555556e-06, |
|
"loss": 0.0557, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0012017801636829972, |
|
"learning_rate": 9.911111111111113e-06, |
|
"loss": 0.052, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.001439902582205832, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 0.0853, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0035338387824594975, |
|
"learning_rate": 9.822222222222223e-06, |
|
"loss": 0.0227, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0015976278809830546, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.0647, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0012226419057697058, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 0.1104, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5588458776473999, |
|
"learning_rate": 9.688888888888889e-06, |
|
"loss": 0.0732, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.45813611149787903, |
|
"learning_rate": 9.644444444444444e-06, |
|
"loss": 0.0565, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5053278207778931, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0676, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.002007542410865426, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.0256, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0025233286432921886, |
|
"learning_rate": 9.511111111111112e-06, |
|
"loss": 0.057, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0035814358852803707, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.1282, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0031847648788243532, |
|
"learning_rate": 9.422222222222222e-06, |
|
"loss": 0.0126, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0034481678158044815, |
|
"learning_rate": 9.377777777777779e-06, |
|
"loss": 0.0627, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.18580594658851624, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.035, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0014439361402764916, |
|
"learning_rate": 9.28888888888889e-06, |
|
"loss": 0.0766, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.001558059360831976, |
|
"learning_rate": 9.244444444444445e-06, |
|
"loss": 0.023, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.0029459816869348288, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.041, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.0017032440518960357, |
|
"learning_rate": 9.155555555555557e-06, |
|
"loss": 0.0191, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.002062348648905754, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.0263, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.001492173527367413, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 0.0419, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.903357982635498, |
|
"learning_rate": 9.022222222222223e-06, |
|
"loss": 0.0557, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.0011335255112498999, |
|
"learning_rate": 8.977777777777778e-06, |
|
"loss": 0.0065, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0009899291908368468, |
|
"learning_rate": 8.933333333333333e-06, |
|
"loss": 0.0339, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0012059457367286086, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.0374, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"eval_loss": 0.022419294342398643, |
|
"eval_runtime": 62.1056, |
|
"eval_samples_per_second": 16.102, |
|
"eval_steps_per_second": 16.102, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.89267098903656, |
|
"learning_rate": 8.844444444444445e-06, |
|
"loss": 0.1307, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.0008318617474287748, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.0331, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.00299050472676754, |
|
"learning_rate": 8.755555555555556e-06, |
|
"loss": 0.0166, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.3594794273376465, |
|
"learning_rate": 8.711111111111111e-06, |
|
"loss": 0.1204, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.039698030799627304, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.0397, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.0027413545176386833, |
|
"learning_rate": 8.622222222222223e-06, |
|
"loss": 0.0555, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.7610267400741577, |
|
"learning_rate": 8.577777777777778e-06, |
|
"loss": 0.0595, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.001599436509422958, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 0.0525, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.3269574642181396, |
|
"learning_rate": 8.48888888888889e-06, |
|
"loss": 0.0149, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.0009359756950289011, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.0772, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.002731655491515994, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.1128, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.01761116273701191, |
|
"learning_rate": 8.355555555555556e-06, |
|
"loss": 0.0228, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.0019919571932405233, |
|
"learning_rate": 8.311111111111111e-06, |
|
"loss": 0.0389, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.0010057106846943498, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 0.1025, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3677994012832642, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.0159, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0024271418806165457, |
|
"learning_rate": 8.177777777777779e-06, |
|
"loss": 0.031, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0010294296080246568, |
|
"learning_rate": 8.133333333333334e-06, |
|
"loss": 0.0168, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.0011234580306336284, |
|
"learning_rate": 8.08888888888889e-06, |
|
"loss": 0.058, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.0011441799579188228, |
|
"learning_rate": 8.044444444444444e-06, |
|
"loss": 0.0243, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.860286295413971, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.0128, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.7325726747512817, |
|
"learning_rate": 7.955555555555557e-06, |
|
"loss": 0.0156, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 2.548293352127075, |
|
"learning_rate": 7.911111111111112e-06, |
|
"loss": 0.0724, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.0010266635799780488, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 0.0194, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.34962719678878784, |
|
"learning_rate": 7.822222222222224e-06, |
|
"loss": 0.0086, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.0028716479428112507, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.0143, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.1319417804479599, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.0231, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 2.1257731914520264, |
|
"learning_rate": 7.68888888888889e-06, |
|
"loss": 0.0767, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.001123911701142788, |
|
"learning_rate": 7.644444444444445e-06, |
|
"loss": 0.0082, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0013617794029414654, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.0003, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0023631304502487183, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.0154, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.145058512687683, |
|
"learning_rate": 7.511111111111111e-06, |
|
"loss": 0.0369, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.0006993517745286226, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 0.0195, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5191490650177002, |
|
"learning_rate": 7.422222222222223e-06, |
|
"loss": 0.1223, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.084077835083008, |
|
"learning_rate": 7.377777777777778e-06, |
|
"loss": 0.0498, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.01493588462471962, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.0161, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.0015668733976781368, |
|
"learning_rate": 7.28888888888889e-06, |
|
"loss": 0.0276, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.0019036026205867529, |
|
"learning_rate": 7.244444444444445e-06, |
|
"loss": 0.0464, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.003628369653597474, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0636, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 4.771296501159668, |
|
"learning_rate": 7.155555555555556e-06, |
|
"loss": 0.0138, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.001274996087886393, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.0074, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.4918831586837769, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 0.0227, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0008646890637464821, |
|
"learning_rate": 7.022222222222222e-06, |
|
"loss": 0.0054, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0009093726403079927, |
|
"learning_rate": 6.977777777777779e-06, |
|
"loss": 0.0434, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0018424297450110316, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 0.0601, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0018895555986091495, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.0056, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0008156482363119721, |
|
"learning_rate": 6.844444444444445e-06, |
|
"loss": 0.0229, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.46498626470565796, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.0192, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.8943893909454346, |
|
"learning_rate": 6.755555555555556e-06, |
|
"loss": 0.0321, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.5523977279663086, |
|
"learning_rate": 6.711111111111111e-06, |
|
"loss": 0.0447, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.000732199230697006, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.032, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"eval_loss": 0.02055978961288929, |
|
"eval_runtime": 62.0715, |
|
"eval_samples_per_second": 16.11, |
|
"eval_steps_per_second": 16.11, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.002656770870089531, |
|
"learning_rate": 6.6222222222222236e-06, |
|
"loss": 0.0632, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.0010300944559276104, |
|
"learning_rate": 6.577777777777779e-06, |
|
"loss": 0.0144, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.3620741367340088, |
|
"learning_rate": 6.533333333333334e-06, |
|
"loss": 0.0497, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.04097941517829895, |
|
"learning_rate": 6.488888888888889e-06, |
|
"loss": 0.025, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.0011906519066542387, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.0059, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0008852950995787978, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0354, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0006289037992246449, |
|
"learning_rate": 6.355555555555556e-06, |
|
"loss": 0.0315, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.001330237602815032, |
|
"learning_rate": 6.311111111111111e-06, |
|
"loss": 0.0161, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0021519146393984556, |
|
"learning_rate": 6.266666666666668e-06, |
|
"loss": 0.0105, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.5696661472320557, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.0244, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.001931514940224588, |
|
"learning_rate": 6.177777777777778e-06, |
|
"loss": 0.0001, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0009438694687560201, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 0.0366, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.6622722148895264, |
|
"learning_rate": 6.08888888888889e-06, |
|
"loss": 0.0113, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.6738483309745789, |
|
"learning_rate": 6.044444444444445e-06, |
|
"loss": 0.0314, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 2.3022451400756836, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0252, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 2.823756694793701, |
|
"learning_rate": 5.955555555555555e-06, |
|
"loss": 0.0468, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.8479762077331543, |
|
"learning_rate": 5.911111111111112e-06, |
|
"loss": 0.0275, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.0007701431168243289, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 0.032, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.1393382549285889, |
|
"learning_rate": 5.822222222222223e-06, |
|
"loss": 0.0791, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.9080690145492554, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.0259, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.001581453951075673, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 0.0409, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.03290742263197899, |
|
"learning_rate": 5.688888888888889e-06, |
|
"loss": 0.0929, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0012888371711596847, |
|
"learning_rate": 5.6444444444444445e-06, |
|
"loss": 0.0168, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0007963385432958603, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.0525, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.015161274001002312, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.0375, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0009869037894532084, |
|
"learning_rate": 5.511111111111112e-06, |
|
"loss": 0.0246, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.002037523780018091, |
|
"learning_rate": 5.466666666666667e-06, |
|
"loss": 0.0934, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.002192340325564146, |
|
"learning_rate": 5.422222222222223e-06, |
|
"loss": 0.0138, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.10088424384593964, |
|
"learning_rate": 5.3777777777777784e-06, |
|
"loss": 0.0259, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.38258832693099976, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.0266, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.0011203467147424817, |
|
"learning_rate": 5.288888888888889e-06, |
|
"loss": 0.0069, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.0006357720121741295, |
|
"learning_rate": 5.244444444444445e-06, |
|
"loss": 0.0272, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.12165514379739761, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.0175, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.0015337629010900855, |
|
"learning_rate": 5.155555555555556e-06, |
|
"loss": 0.027, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.0012696600751951337, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.0005, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.8595750331878662, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 0.0367, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.002280933316797018, |
|
"learning_rate": 5.022222222222223e-06, |
|
"loss": 0.0374, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.0005754129961133003, |
|
"learning_rate": 4.977777777777778e-06, |
|
"loss": 0.0168, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.8617763519287109, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 0.0155, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.5179722905158997, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.0486, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.0009101899340748787, |
|
"learning_rate": 4.8444444444444446e-06, |
|
"loss": 0.0027, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.008863825350999832, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.055, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.593704342842102, |
|
"learning_rate": 4.755555555555556e-06, |
|
"loss": 0.0128, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.0012650558492168784, |
|
"learning_rate": 4.711111111111111e-06, |
|
"loss": 0.0, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0025375504046678543, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.0254, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0035014720633625984, |
|
"learning_rate": 4.622222222222222e-06, |
|
"loss": 0.0254, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0010467343963682652, |
|
"learning_rate": 4.5777777777777785e-06, |
|
"loss": 0.0556, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.0006358217215165496, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 0.0266, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.3243739902973175, |
|
"learning_rate": 4.488888888888889e-06, |
|
"loss": 0.0079, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.04155297204852104, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.0163, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 0.02105984091758728, |
|
"eval_runtime": 62.0961, |
|
"eval_samples_per_second": 16.104, |
|
"eval_steps_per_second": 16.104, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"total_flos": 6.4408503975936e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|