|
{ |
|
"best_metric": 0.3018401563167572, |
|
"best_model_checkpoint": "../../saves/LLaMA3-70B-qlora-bnb/lora/sft/AG_16000-3/checkpoint-3500", |
|
"epoch": 2.9411764705882355, |
|
"eval_steps": 100, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006535947712418301, |
|
"grad_norm": 13.670208930969238, |
|
"learning_rate": 8.714596949891069e-07, |
|
"loss": 2.4115, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013071895424836602, |
|
"grad_norm": 31.536911010742188, |
|
"learning_rate": 2.6143790849673204e-06, |
|
"loss": 2.6282, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 14.474563598632812, |
|
"learning_rate": 4.7930283224400875e-06, |
|
"loss": 2.5659, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.026143790849673203, |
|
"grad_norm": 14.182409286499023, |
|
"learning_rate": 6.971677559912855e-06, |
|
"loss": 2.3685, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.032679738562091505, |
|
"grad_norm": 34.63951873779297, |
|
"learning_rate": 8.932461873638345e-06, |
|
"loss": 2.351, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 23.15449333190918, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 2.1125, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0457516339869281, |
|
"grad_norm": 32.01618576049805, |
|
"learning_rate": 1.328976034858388e-05, |
|
"loss": 1.5225, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05228758169934641, |
|
"grad_norm": 8.76276969909668, |
|
"learning_rate": 1.5468409586056645e-05, |
|
"loss": 1.1141, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 12.724370956420898, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 0.654, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"grad_norm": 13.92119312286377, |
|
"learning_rate": 1.982570806100218e-05, |
|
"loss": 0.4776, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"eval_loss": 0.460627019405365, |
|
"eval_runtime": 7467.6581, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0718954248366013, |
|
"grad_norm": 13.412103652954102, |
|
"learning_rate": 2.2004357298474944e-05, |
|
"loss": 0.4607, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 16.269054412841797, |
|
"learning_rate": 2.4183006535947712e-05, |
|
"loss": 0.3912, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08496732026143791, |
|
"grad_norm": 5.597168922424316, |
|
"learning_rate": 2.636165577342048e-05, |
|
"loss": 0.3446, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0915032679738562, |
|
"grad_norm": 7.308394432067871, |
|
"learning_rate": 2.854030501089325e-05, |
|
"loss": 0.3572, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09803921568627451, |
|
"grad_norm": 8.47480583190918, |
|
"learning_rate": 3.0718954248366014e-05, |
|
"loss": 0.3224, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10457516339869281, |
|
"grad_norm": 6.073367595672607, |
|
"learning_rate": 3.289760348583878e-05, |
|
"loss": 0.4026, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 6.276689052581787, |
|
"learning_rate": 3.507625272331155e-05, |
|
"loss": 0.367, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 11.574933052062988, |
|
"learning_rate": 3.725490196078432e-05, |
|
"loss": 0.3921, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12418300653594772, |
|
"grad_norm": 11.751296043395996, |
|
"learning_rate": 3.943355119825709e-05, |
|
"loss": 0.3898, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13071895424836602, |
|
"grad_norm": 9.689138412475586, |
|
"learning_rate": 4.161220043572985e-05, |
|
"loss": 0.3675, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13071895424836602, |
|
"eval_loss": 0.3592655658721924, |
|
"eval_runtime": 7465.6804, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13725490196078433, |
|
"grad_norm": 5.775482177734375, |
|
"learning_rate": 4.379084967320262e-05, |
|
"loss": 0.3693, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1437908496732026, |
|
"grad_norm": 12.238544464111328, |
|
"learning_rate": 4.5969498910675387e-05, |
|
"loss": 0.4207, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1503267973856209, |
|
"grad_norm": 6.162591457366943, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 0.3702, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 5.743127346038818, |
|
"learning_rate": 5.032679738562092e-05, |
|
"loss": 0.3505, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16339869281045752, |
|
"grad_norm": 12.115300178527832, |
|
"learning_rate": 5.250544662309368e-05, |
|
"loss": 0.3654, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16993464052287582, |
|
"grad_norm": 12.006166458129883, |
|
"learning_rate": 5.4684095860566454e-05, |
|
"loss": 0.352, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 3.973567247390747, |
|
"learning_rate": 5.6862745098039215e-05, |
|
"loss": 0.36, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1830065359477124, |
|
"grad_norm": 5.307390213012695, |
|
"learning_rate": 5.904139433551199e-05, |
|
"loss": 0.3475, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1895424836601307, |
|
"grad_norm": 5.818578243255615, |
|
"learning_rate": 6.122004357298475e-05, |
|
"loss": 0.4053, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"grad_norm": 5.835134506225586, |
|
"learning_rate": 6.339869281045751e-05, |
|
"loss": 0.3761, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"eval_loss": 0.4034684896469116, |
|
"eval_runtime": 7466.244, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.20261437908496732, |
|
"grad_norm": 3.4767305850982666, |
|
"learning_rate": 6.557734204793029e-05, |
|
"loss": 0.3709, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.20915032679738563, |
|
"grad_norm": 5.347959995269775, |
|
"learning_rate": 6.775599128540305e-05, |
|
"loss": 0.35, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.21568627450980393, |
|
"grad_norm": 5.1962480545043945, |
|
"learning_rate": 6.993464052287581e-05, |
|
"loss": 0.3892, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 3.1436469554901123, |
|
"learning_rate": 7.211328976034859e-05, |
|
"loss": 0.3538, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22875816993464052, |
|
"grad_norm": 2.677011489868164, |
|
"learning_rate": 7.429193899782135e-05, |
|
"loss": 0.3533, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 11.516694068908691, |
|
"learning_rate": 7.647058823529411e-05, |
|
"loss": 0.4018, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24183006535947713, |
|
"grad_norm": 10.96320629119873, |
|
"learning_rate": 7.864923747276689e-05, |
|
"loss": 0.43, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24836601307189543, |
|
"grad_norm": 6.594156265258789, |
|
"learning_rate": 8.082788671023965e-05, |
|
"loss": 0.3737, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2549019607843137, |
|
"grad_norm": 4.763483047485352, |
|
"learning_rate": 8.300653594771242e-05, |
|
"loss": 0.3629, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26143790849673204, |
|
"grad_norm": 3.6535167694091797, |
|
"learning_rate": 8.518518518518518e-05, |
|
"loss": 0.3465, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.26143790849673204, |
|
"eval_loss": 0.35558873414993286, |
|
"eval_runtime": 7465.9696, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2679738562091503, |
|
"grad_norm": 10.275704383850098, |
|
"learning_rate": 8.736383442265795e-05, |
|
"loss": 0.3435, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.27450980392156865, |
|
"grad_norm": 6.834077835083008, |
|
"learning_rate": 8.954248366013072e-05, |
|
"loss": 0.3828, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.28104575163398693, |
|
"grad_norm": 13.24137020111084, |
|
"learning_rate": 9.172113289760348e-05, |
|
"loss": 0.3704, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2875816993464052, |
|
"grad_norm": 9.123649597167969, |
|
"learning_rate": 9.389978213507626e-05, |
|
"loss": 0.3569, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 6.183751106262207, |
|
"learning_rate": 9.607843137254903e-05, |
|
"loss": 0.4278, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3006535947712418, |
|
"grad_norm": 7.0605645179748535, |
|
"learning_rate": 9.82570806100218e-05, |
|
"loss": 0.3747, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.30718954248366015, |
|
"grad_norm": 4.283732891082764, |
|
"learning_rate": 9.999994216519553e-05, |
|
"loss": 0.3289, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 12.275751113891602, |
|
"learning_rate": 9.999791796108715e-05, |
|
"loss": 0.4004, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3202614379084967, |
|
"grad_norm": 6.7879958152771, |
|
"learning_rate": 9.999300215054801e-05, |
|
"loss": 0.3768, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.32679738562091504, |
|
"grad_norm": 3.2305805683135986, |
|
"learning_rate": 9.998519501788174e-05, |
|
"loss": 0.394, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32679738562091504, |
|
"eval_loss": 0.3434564173221588, |
|
"eval_runtime": 7464.9222, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 3.8374526500701904, |
|
"learning_rate": 9.997449701461023e-05, |
|
"loss": 0.3625, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.33986928104575165, |
|
"grad_norm": 5.077846050262451, |
|
"learning_rate": 9.996090875944755e-05, |
|
"loss": 0.359, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3464052287581699, |
|
"grad_norm": 3.4323623180389404, |
|
"learning_rate": 9.994443103826414e-05, |
|
"loss": 0.356, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 4.085783004760742, |
|
"learning_rate": 9.992506480404138e-05, |
|
"loss": 0.3463, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.35947712418300654, |
|
"grad_norm": 7.318523406982422, |
|
"learning_rate": 9.990281117681645e-05, |
|
"loss": 0.3896, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3660130718954248, |
|
"grad_norm": 16.51464080810547, |
|
"learning_rate": 9.987767144361759e-05, |
|
"loss": 0.343, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.37254901960784315, |
|
"grad_norm": 2.0446882247924805, |
|
"learning_rate": 9.98496470583896e-05, |
|
"loss": 0.3291, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3790849673202614, |
|
"grad_norm": 2.331265687942505, |
|
"learning_rate": 9.981873964190987e-05, |
|
"loss": 0.3571, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.38562091503267976, |
|
"grad_norm": 6.242280006408691, |
|
"learning_rate": 9.978495098169445e-05, |
|
"loss": 0.3624, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 1.5557068586349487, |
|
"learning_rate": 9.974828303189491e-05, |
|
"loss": 0.3579, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"eval_loss": 0.341349333524704, |
|
"eval_runtime": 7465.0518, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39869281045751637, |
|
"grad_norm": 4.937715530395508, |
|
"learning_rate": 9.970873791318512e-05, |
|
"loss": 0.3576, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.40522875816993464, |
|
"grad_norm": 4.85018253326416, |
|
"learning_rate": 9.966631791263872e-05, |
|
"loss": 0.3567, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 4.347261905670166, |
|
"learning_rate": 9.96210254835968e-05, |
|
"loss": 0.3372, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.41830065359477125, |
|
"grad_norm": 4.017812728881836, |
|
"learning_rate": 9.9572863245526e-05, |
|
"loss": 0.3363, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.42483660130718953, |
|
"grad_norm": 7.451604843139648, |
|
"learning_rate": 9.952183398386706e-05, |
|
"loss": 0.3269, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.43137254901960786, |
|
"grad_norm": 12.005084991455078, |
|
"learning_rate": 9.946794064987371e-05, |
|
"loss": 0.3242, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.43790849673202614, |
|
"grad_norm": 3.397099494934082, |
|
"learning_rate": 9.941118636044193e-05, |
|
"loss": 0.383, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 2.081789493560791, |
|
"learning_rate": 9.935157439792982e-05, |
|
"loss": 0.3354, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.45098039215686275, |
|
"grad_norm": 3.097064733505249, |
|
"learning_rate": 9.928910820996756e-05, |
|
"loss": 0.3281, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.45751633986928103, |
|
"grad_norm": 5.233646869659424, |
|
"learning_rate": 9.922379140925826e-05, |
|
"loss": 0.3524, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.45751633986928103, |
|
"eval_loss": 0.3694455921649933, |
|
"eval_runtime": 7465.0105, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.46405228758169936, |
|
"grad_norm": 4.417613983154297, |
|
"learning_rate": 9.915562777336879e-05, |
|
"loss": 0.3924, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 2.472074508666992, |
|
"learning_rate": 9.908462124451152e-05, |
|
"loss": 0.3488, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.477124183006536, |
|
"grad_norm": 2.744296073913574, |
|
"learning_rate": 9.901077592931612e-05, |
|
"loss": 0.3291, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.48366013071895425, |
|
"grad_norm": 3.440641403198242, |
|
"learning_rate": 9.893409609859222e-05, |
|
"loss": 0.3901, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.49019607843137253, |
|
"grad_norm": 2.749112844467163, |
|
"learning_rate": 9.88545861870823e-05, |
|
"loss": 0.3535, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.49673202614379086, |
|
"grad_norm": 4.953378200531006, |
|
"learning_rate": 9.877225079320526e-05, |
|
"loss": 0.3361, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5032679738562091, |
|
"grad_norm": 2.971996307373047, |
|
"learning_rate": 9.868709467879051e-05, |
|
"loss": 0.3772, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5098039215686274, |
|
"grad_norm": 3.6259024143218994, |
|
"learning_rate": 9.859912276880247e-05, |
|
"loss": 0.3476, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5163398692810458, |
|
"grad_norm": 3.4774110317230225, |
|
"learning_rate": 9.850834015105583e-05, |
|
"loss": 0.3622, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.5228758169934641, |
|
"grad_norm": 5.781759262084961, |
|
"learning_rate": 9.841475207592122e-05, |
|
"loss": 0.3554, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5228758169934641, |
|
"eval_loss": 0.351721853017807, |
|
"eval_runtime": 7466.3074, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 2.1952409744262695, |
|
"learning_rate": 9.831836395602163e-05, |
|
"loss": 0.3345, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5359477124183006, |
|
"grad_norm": 5.838998317718506, |
|
"learning_rate": 9.821918136591934e-05, |
|
"loss": 0.3387, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5424836601307189, |
|
"grad_norm": 2.8472695350646973, |
|
"learning_rate": 9.811721004179352e-05, |
|
"loss": 0.3351, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 4.2800068855285645, |
|
"learning_rate": 9.801245588110848e-05, |
|
"loss": 0.3373, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 2.831066370010376, |
|
"learning_rate": 9.790492494227258e-05, |
|
"loss": 0.3718, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5620915032679739, |
|
"grad_norm": 2.9960384368896484, |
|
"learning_rate": 9.779462344428789e-05, |
|
"loss": 0.3391, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5686274509803921, |
|
"grad_norm": 2.505591869354248, |
|
"learning_rate": 9.768155776639044e-05, |
|
"loss": 0.356, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5751633986928104, |
|
"grad_norm": 5.350845813751221, |
|
"learning_rate": 9.756573444768133e-05, |
|
"loss": 0.3377, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5816993464052288, |
|
"grad_norm": 3.700345516204834, |
|
"learning_rate": 9.744716018674862e-05, |
|
"loss": 0.3306, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 3.254387140274048, |
|
"learning_rate": 9.732584184127973e-05, |
|
"loss": 0.3378, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"eval_loss": 0.3639741837978363, |
|
"eval_runtime": 7465.3231, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5947712418300654, |
|
"grad_norm": 5.645087242126465, |
|
"learning_rate": 9.7201786427665e-05, |
|
"loss": 0.3604, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.6013071895424836, |
|
"grad_norm": 6.0515456199646, |
|
"learning_rate": 9.707500112059183e-05, |
|
"loss": 0.3789, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6078431372549019, |
|
"grad_norm": 7.369515419006348, |
|
"learning_rate": 9.694549325262974e-05, |
|
"loss": 0.3571, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6143790849673203, |
|
"grad_norm": 7.788597106933594, |
|
"learning_rate": 9.681327031380629e-05, |
|
"loss": 0.3424, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6209150326797386, |
|
"grad_norm": 4.71685791015625, |
|
"learning_rate": 9.667833995117391e-05, |
|
"loss": 0.3474, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 4.404577255249023, |
|
"learning_rate": 9.654070996836765e-05, |
|
"loss": 0.3431, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6339869281045751, |
|
"grad_norm": 12.022799491882324, |
|
"learning_rate": 9.640038832515381e-05, |
|
"loss": 0.3246, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6405228758169934, |
|
"grad_norm": 3.9037022590637207, |
|
"learning_rate": 9.625738313696966e-05, |
|
"loss": 0.3394, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 3.2752866744995117, |
|
"learning_rate": 9.611170267445401e-05, |
|
"loss": 0.376, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6535947712418301, |
|
"grad_norm": 4.711609840393066, |
|
"learning_rate": 9.596335536296897e-05, |
|
"loss": 0.3245, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6535947712418301, |
|
"eval_loss": 0.32997554540634155, |
|
"eval_runtime": 7465.3556, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6601307189542484, |
|
"grad_norm": 1.1813262701034546, |
|
"learning_rate": 9.581234978211257e-05, |
|
"loss": 0.3446, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 3.696199417114258, |
|
"learning_rate": 9.565869466522265e-05, |
|
"loss": 0.3484, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.673202614379085, |
|
"grad_norm": 1.4563554525375366, |
|
"learning_rate": 9.550239889887179e-05, |
|
"loss": 0.3155, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6797385620915033, |
|
"grad_norm": 1.6707898378372192, |
|
"learning_rate": 9.534347152235317e-05, |
|
"loss": 0.3364, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6862745098039216, |
|
"grad_norm": 3.146296977996826, |
|
"learning_rate": 9.518192172715807e-05, |
|
"loss": 0.3286, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6928104575163399, |
|
"grad_norm": 8.924692153930664, |
|
"learning_rate": 9.501775885644405e-05, |
|
"loss": 0.3337, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6993464052287581, |
|
"grad_norm": 4.0824480056762695, |
|
"learning_rate": 9.485099240449474e-05, |
|
"loss": 0.328, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 2.718278408050537, |
|
"learning_rate": 9.468163201617062e-05, |
|
"loss": 0.3186, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7124183006535948, |
|
"grad_norm": 3.190894603729248, |
|
"learning_rate": 9.450968748635133e-05, |
|
"loss": 0.3397, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7189542483660131, |
|
"grad_norm": 3.299405813217163, |
|
"learning_rate": 9.433516875936916e-05, |
|
"loss": 0.4178, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7189542483660131, |
|
"eval_loss": 0.3399566113948822, |
|
"eval_runtime": 7464.7626, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7254901960784313, |
|
"grad_norm": 2.694213628768921, |
|
"learning_rate": 9.415808592843383e-05, |
|
"loss": 0.3442, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7320261437908496, |
|
"grad_norm": 1.782759428024292, |
|
"learning_rate": 9.397844923504885e-05, |
|
"loss": 0.2938, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.738562091503268, |
|
"grad_norm": 2.5469982624053955, |
|
"learning_rate": 9.37962690684192e-05, |
|
"loss": 0.3099, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7450980392156863, |
|
"grad_norm": 4.2174906730651855, |
|
"learning_rate": 9.361155596485046e-05, |
|
"loss": 0.3695, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7516339869281046, |
|
"grad_norm": 2.9205563068389893, |
|
"learning_rate": 9.342432060713942e-05, |
|
"loss": 0.3253, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7581699346405228, |
|
"grad_norm": 1.3839212656021118, |
|
"learning_rate": 9.323457382395628e-05, |
|
"loss": 0.3306, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 3.993194341659546, |
|
"learning_rate": 9.304232658921839e-05, |
|
"loss": 0.3048, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7712418300653595, |
|
"grad_norm": 3.2434442043304443, |
|
"learning_rate": 9.284759002145552e-05, |
|
"loss": 0.3235, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 6.1412153244018555, |
|
"learning_rate": 9.26503753831669e-05, |
|
"loss": 0.2977, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 6.04712438583374, |
|
"learning_rate": 9.245069408016977e-05, |
|
"loss": 0.2912, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"eval_loss": 0.34279727935791016, |
|
"eval_runtime": 7466.0217, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7908496732026143, |
|
"grad_norm": 1.9226710796356201, |
|
"learning_rate": 9.224855766093985e-05, |
|
"loss": 0.3681, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7973856209150327, |
|
"grad_norm": 2.343475341796875, |
|
"learning_rate": 9.204397781594331e-05, |
|
"loss": 0.3081, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.803921568627451, |
|
"grad_norm": 4.996776103973389, |
|
"learning_rate": 9.183696637696077e-05, |
|
"loss": 0.3268, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8104575163398693, |
|
"grad_norm": 3.761845111846924, |
|
"learning_rate": 9.162753531640292e-05, |
|
"loss": 1.001, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.8169934640522876, |
|
"grad_norm": 13.737248420715332, |
|
"learning_rate": 9.141569674661817e-05, |
|
"loss": 0.3769, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 8.417398452758789, |
|
"learning_rate": 9.120146291919204e-05, |
|
"loss": 0.402, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8300653594771242, |
|
"grad_norm": 3.1971797943115234, |
|
"learning_rate": 9.098484622423882e-05, |
|
"loss": 0.3705, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8366013071895425, |
|
"grad_norm": 8.737996101379395, |
|
"learning_rate": 9.076585918968468e-05, |
|
"loss": 0.3536, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8431372549019608, |
|
"grad_norm": 3.097640037536621, |
|
"learning_rate": 9.054451448054335e-05, |
|
"loss": 0.3847, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8496732026143791, |
|
"grad_norm": 3.883129596710205, |
|
"learning_rate": 9.03208248981836e-05, |
|
"loss": 0.3905, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8496732026143791, |
|
"eval_loss": 0.36324241757392883, |
|
"eval_runtime": 7459.1059, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8562091503267973, |
|
"grad_norm": 3.2262656688690186, |
|
"learning_rate": 9.009480337958883e-05, |
|
"loss": 0.351, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 2.727170705795288, |
|
"learning_rate": 8.986646299660889e-05, |
|
"loss": 0.3583, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.869281045751634, |
|
"grad_norm": 6.688016414642334, |
|
"learning_rate": 8.963581695520408e-05, |
|
"loss": 0.3513, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8758169934640523, |
|
"grad_norm": 1.6296730041503906, |
|
"learning_rate": 8.940287859468139e-05, |
|
"loss": 0.3563, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 1.2997907400131226, |
|
"learning_rate": 8.916766138692303e-05, |
|
"loss": 0.3741, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 7.427231788635254, |
|
"learning_rate": 8.893017893560727e-05, |
|
"loss": 0.3419, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8954248366013072, |
|
"grad_norm": 1.4134552478790283, |
|
"learning_rate": 8.869044497542172e-05, |
|
"loss": 0.3585, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.9019607843137255, |
|
"grad_norm": 1.9037864208221436, |
|
"learning_rate": 8.844847337126893e-05, |
|
"loss": 0.3563, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.9084967320261438, |
|
"grad_norm": 2.263465404510498, |
|
"learning_rate": 8.820427811746456e-05, |
|
"loss": 0.3429, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.9150326797385621, |
|
"grad_norm": 3.114661455154419, |
|
"learning_rate": 8.795787333692807e-05, |
|
"loss": 0.3389, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9150326797385621, |
|
"eval_loss": 0.35517746210098267, |
|
"eval_runtime": 7467.6922, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9215686274509803, |
|
"grad_norm": 2.826345682144165, |
|
"learning_rate": 8.770927328036575e-05, |
|
"loss": 0.3097, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9281045751633987, |
|
"grad_norm": 2.172060012817383, |
|
"learning_rate": 8.745849232544681e-05, |
|
"loss": 0.3014, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.934640522875817, |
|
"grad_norm": 2.7672016620635986, |
|
"learning_rate": 8.720554497597159e-05, |
|
"loss": 0.3057, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 6.170320510864258, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.3334, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9477124183006536, |
|
"grad_norm": 3.521531581878662, |
|
"learning_rate": 8.669320973417006e-05, |
|
"loss": 0.3057, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.954248366013072, |
|
"grad_norm": 3.1676554679870605, |
|
"learning_rate": 8.643385147251515e-05, |
|
"loss": 0.2932, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9607843137254902, |
|
"grad_norm": 2.741800546646118, |
|
"learning_rate": 8.617238607593319e-05, |
|
"loss": 0.352, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9673202614379085, |
|
"grad_norm": 3.750753164291382, |
|
"learning_rate": 8.590882866615432e-05, |
|
"loss": 0.329, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9738562091503268, |
|
"grad_norm": 4.675954818725586, |
|
"learning_rate": 8.564319448589926e-05, |
|
"loss": 0.2993, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"grad_norm": 3.0706264972686768, |
|
"learning_rate": 8.537549889799781e-05, |
|
"loss": 0.3422, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"eval_loss": 0.35487601161003113, |
|
"eval_runtime": 7465.2366, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9869281045751634, |
|
"grad_norm": 4.867375373840332, |
|
"learning_rate": 8.510575738450032e-05, |
|
"loss": 0.326, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9934640522875817, |
|
"grad_norm": 4.509498119354248, |
|
"learning_rate": 8.483398554578232e-05, |
|
"loss": 0.2879, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.889903545379639, |
|
"learning_rate": 8.456019909964224e-05, |
|
"loss": 0.2913, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.0065359477124183, |
|
"grad_norm": 3.161592960357666, |
|
"learning_rate": 8.428441388039238e-05, |
|
"loss": 0.3207, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.0130718954248366, |
|
"grad_norm": 6.485883712768555, |
|
"learning_rate": 8.400664583794319e-05, |
|
"loss": 0.3892, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 2.0741546154022217, |
|
"learning_rate": 8.372691103688079e-05, |
|
"loss": 0.2883, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.026143790849673, |
|
"grad_norm": 1.5439857244491577, |
|
"learning_rate": 8.34452256555378e-05, |
|
"loss": 0.3029, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.0326797385620916, |
|
"grad_norm": 4.525278568267822, |
|
"learning_rate": 8.316160598505784e-05, |
|
"loss": 0.2775, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.0392156862745099, |
|
"grad_norm": 3.212484121322632, |
|
"learning_rate": 8.28760684284532e-05, |
|
"loss": 0.3194, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.0457516339869282, |
|
"grad_norm": 3.7706832885742188, |
|
"learning_rate": 8.25886294996562e-05, |
|
"loss": 0.2597, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0457516339869282, |
|
"eval_loss": 0.35163000226020813, |
|
"eval_runtime": 7465.7169, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0522875816993464, |
|
"grad_norm": 3.6558139324188232, |
|
"learning_rate": 8.22993058225642e-05, |
|
"loss": 0.3301, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.0588235294117647, |
|
"grad_norm": 2.815463066101074, |
|
"learning_rate": 8.200811413007807e-05, |
|
"loss": 0.2903, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.065359477124183, |
|
"grad_norm": 6.215457439422607, |
|
"learning_rate": 8.171507126313451e-05, |
|
"loss": 0.3399, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.0718954248366013, |
|
"grad_norm": 1.9962519407272339, |
|
"learning_rate": 8.142019416973199e-05, |
|
"loss": 0.3062, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.0784313725490196, |
|
"grad_norm": 1.8813626766204834, |
|
"learning_rate": 8.112349990395065e-05, |
|
"loss": 0.3419, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0849673202614378, |
|
"grad_norm": 5.976492881774902, |
|
"learning_rate": 8.082500562496596e-05, |
|
"loss": 0.3135, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.091503267973856, |
|
"grad_norm": 2.2381093502044678, |
|
"learning_rate": 8.052472859605631e-05, |
|
"loss": 0.3222, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.0980392156862746, |
|
"grad_norm": 3.5524046421051025, |
|
"learning_rate": 8.02226861836046e-05, |
|
"loss": 0.2798, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.1045751633986929, |
|
"grad_norm": 5.400121688842773, |
|
"learning_rate": 7.991889585609387e-05, |
|
"loss": 0.3218, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 37.609107971191406, |
|
"learning_rate": 7.961337518309704e-05, |
|
"loss": 0.3235, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"eval_loss": 0.325325071811676, |
|
"eval_runtime": 7465.0207, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1176470588235294, |
|
"grad_norm": 2.548970937728882, |
|
"learning_rate": 7.930614183426074e-05, |
|
"loss": 0.325, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.1241830065359477, |
|
"grad_norm": 2.6757702827453613, |
|
"learning_rate": 7.89972135782834e-05, |
|
"loss": 0.2955, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.130718954248366, |
|
"grad_norm": 2.190845012664795, |
|
"learning_rate": 7.868660828188765e-05, |
|
"loss": 0.3062, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.1372549019607843, |
|
"grad_norm": 2.709989547729492, |
|
"learning_rate": 7.837434390878698e-05, |
|
"loss": 0.3231, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.1437908496732025, |
|
"grad_norm": 2.8716742992401123, |
|
"learning_rate": 7.806043851864674e-05, |
|
"loss": 0.2853, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1503267973856208, |
|
"grad_norm": 4.456597805023193, |
|
"learning_rate": 7.774491026603985e-05, |
|
"loss": 0.3226, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.156862745098039, |
|
"grad_norm": 13.620016098022461, |
|
"learning_rate": 7.742777739939666e-05, |
|
"loss": 0.2897, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.1633986928104576, |
|
"grad_norm": 2.6009891033172607, |
|
"learning_rate": 7.710905825994962e-05, |
|
"loss": 0.2482, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.1699346405228759, |
|
"grad_norm": 7.119050025939941, |
|
"learning_rate": 7.678877128067261e-05, |
|
"loss": 0.3552, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 2.4954652786254883, |
|
"learning_rate": 7.646693498521471e-05, |
|
"loss": 0.3148, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"eval_loss": 0.31464409828186035, |
|
"eval_runtime": 7466.0649, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1830065359477124, |
|
"grad_norm": 2.542064905166626, |
|
"learning_rate": 7.614356798682904e-05, |
|
"loss": 0.2973, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.1895424836601307, |
|
"grad_norm": 2.8179709911346436, |
|
"learning_rate": 7.581868898729618e-05, |
|
"loss": 0.3049, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.196078431372549, |
|
"grad_norm": 1.8893694877624512, |
|
"learning_rate": 7.549231677584262e-05, |
|
"loss": 0.2867, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.2026143790849673, |
|
"grad_norm": 3.376964807510376, |
|
"learning_rate": 7.516447022805407e-05, |
|
"loss": 0.3599, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.2091503267973855, |
|
"grad_norm": 1.8863316774368286, |
|
"learning_rate": 7.483516830478379e-05, |
|
"loss": 0.3283, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.215686274509804, |
|
"grad_norm": 5.017312049865723, |
|
"learning_rate": 7.450443005105601e-05, |
|
"loss": 0.3335, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 1.3233927488327026, |
|
"learning_rate": 7.417227459496445e-05, |
|
"loss": 0.3031, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.2287581699346406, |
|
"grad_norm": 2.100111484527588, |
|
"learning_rate": 7.383872114656611e-05, |
|
"loss": 0.3251, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.2352941176470589, |
|
"grad_norm": 3.839590311050415, |
|
"learning_rate": 7.35037889967702e-05, |
|
"loss": 0.2726, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.2418300653594772, |
|
"grad_norm": 3.1495704650878906, |
|
"learning_rate": 7.31674975162225e-05, |
|
"loss": 0.3502, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2418300653594772, |
|
"eval_loss": 0.3091997504234314, |
|
"eval_runtime": 7466.4145, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2483660130718954, |
|
"grad_norm": 5.134574890136719, |
|
"learning_rate": 7.282986615418503e-05, |
|
"loss": 0.2902, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.2549019607843137, |
|
"grad_norm": 2.6781771183013916, |
|
"learning_rate": 7.249091443741126e-05, |
|
"loss": 0.2741, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.261437908496732, |
|
"grad_norm": 6.247425079345703, |
|
"learning_rate": 7.215066196901676e-05, |
|
"loss": 0.3026, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.2679738562091503, |
|
"grad_norm": 3.1725962162017822, |
|
"learning_rate": 7.180912842734548e-05, |
|
"loss": 0.3574, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.2745098039215685, |
|
"grad_norm": 5.736270427703857, |
|
"learning_rate": 7.146633356483161e-05, |
|
"loss": 0.3166, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2810457516339868, |
|
"grad_norm": 1.851462960243225, |
|
"learning_rate": 7.11222972068573e-05, |
|
"loss": 0.2737, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.287581699346405, |
|
"grad_norm": 2.9439609050750732, |
|
"learning_rate": 7.077703925060594e-05, |
|
"loss": 0.3556, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.2941176470588236, |
|
"grad_norm": 3.4385366439819336, |
|
"learning_rate": 7.043057966391157e-05, |
|
"loss": 0.3269, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.3006535947712419, |
|
"grad_norm": 3.6838247776031494, |
|
"learning_rate": 7.008293848410396e-05, |
|
"loss": 0.3342, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.3071895424836601, |
|
"grad_norm": 1.6612685918807983, |
|
"learning_rate": 6.973413581684972e-05, |
|
"loss": 0.3009, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3071895424836601, |
|
"eval_loss": 0.33713921904563904, |
|
"eval_runtime": 7466.3969, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3137254901960784, |
|
"grad_norm": 6.528134822845459, |
|
"learning_rate": 6.945427096600644e-05, |
|
"loss": 0.3152, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.3202614379084967, |
|
"grad_norm": 1.521199345588684, |
|
"learning_rate": 6.913856280078387e-05, |
|
"loss": 0.3429, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.326797385620915, |
|
"grad_norm": 1.8718252182006836, |
|
"learning_rate": 6.878672506701743e-05, |
|
"loss": 0.3259, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 2.5089643001556396, |
|
"learning_rate": 6.846914154947518e-05, |
|
"loss": 0.3482, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.3398692810457518, |
|
"grad_norm": 3.0759692192077637, |
|
"learning_rate": 6.811525687459402e-05, |
|
"loss": 0.3382, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.34640522875817, |
|
"grad_norm": 4.5103044509887695, |
|
"learning_rate": 6.776032451222086e-05, |
|
"loss": 0.3476, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.3529411764705883, |
|
"grad_norm": 7.423872470855713, |
|
"learning_rate": 6.740436498970452e-05, |
|
"loss": 0.3198, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.3594771241830066, |
|
"grad_norm": 2.7317986488342285, |
|
"learning_rate": 6.704739889379915e-05, |
|
"loss": 0.279, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.3660130718954249, |
|
"grad_norm": 2.618377923965454, |
|
"learning_rate": 6.668944686947354e-05, |
|
"loss": 0.328, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.3725490196078431, |
|
"grad_norm": 19.447980880737305, |
|
"learning_rate": 6.636646418781669e-05, |
|
"loss": 0.3247, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3725490196078431, |
|
"eval_loss": 0.3066463768482208, |
|
"eval_runtime": 7466.6542, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3790849673202614, |
|
"grad_norm": 1.8498194217681885, |
|
"learning_rate": 6.600669597963677e-05, |
|
"loss": 0.2986, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.3856209150326797, |
|
"grad_norm": 1.9933720827102661, |
|
"learning_rate": 6.564600203160773e-05, |
|
"loss": 0.3237, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.392156862745098, |
|
"grad_norm": 3.7571139335632324, |
|
"learning_rate": 6.5284403204297e-05, |
|
"loss": 0.2923, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.3986928104575163, |
|
"grad_norm": 10.47822380065918, |
|
"learning_rate": 6.49219204106053e-05, |
|
"loss": 0.3003, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.4052287581699345, |
|
"grad_norm": 2.708207607269287, |
|
"learning_rate": 6.459494743075649e-05, |
|
"loss": 0.269, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.4117647058823528, |
|
"grad_norm": 7.0187153816223145, |
|
"learning_rate": 6.423084289810528e-05, |
|
"loss": 0.2916, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.4183006535947713, |
|
"grad_norm": 4.117656707763672, |
|
"learning_rate": 6.38659153312455e-05, |
|
"loss": 0.3612, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.4248366013071896, |
|
"grad_norm": 4.099621295928955, |
|
"learning_rate": 6.350018583559398e-05, |
|
"loss": 0.3231, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.4313725490196079, |
|
"grad_norm": 3.927912712097168, |
|
"learning_rate": 6.313367556294672e-05, |
|
"loss": 0.2905, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.4379084967320261, |
|
"grad_norm": 2.0472779273986816, |
|
"learning_rate": 6.276640571025562e-05, |
|
"loss": 0.3047, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4379084967320261, |
|
"eval_loss": 0.310788094997406, |
|
"eval_runtime": 7464.5268, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 3.7081167697906494, |
|
"learning_rate": 6.23983975184025e-05, |
|
"loss": 0.2777, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.4509803921568627, |
|
"grad_norm": 2.031965494155884, |
|
"learning_rate": 6.202967227097073e-05, |
|
"loss": 0.2724, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.457516339869281, |
|
"grad_norm": 2.594900369644165, |
|
"learning_rate": 6.166025129301422e-05, |
|
"loss": 0.3403, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.4640522875816995, |
|
"grad_norm": 10.884799003601074, |
|
"learning_rate": 6.129015594982416e-05, |
|
"loss": 0.2926, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 3.915910243988037, |
|
"learning_rate": 6.091940764569331e-05, |
|
"loss": 0.3651, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.477124183006536, |
|
"grad_norm": 16.43703269958496, |
|
"learning_rate": 6.054802782267815e-05, |
|
"loss": 0.3281, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.4836601307189543, |
|
"grad_norm": 2.157759666442871, |
|
"learning_rate": 6.0176037959358775e-05, |
|
"loss": 0.3234, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.4901960784313726, |
|
"grad_norm": 8.77623176574707, |
|
"learning_rate": 5.980345956959663e-05, |
|
"loss": 0.2683, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.4967320261437909, |
|
"grad_norm": 10.245627403259277, |
|
"learning_rate": 5.943031420129036e-05, |
|
"loss": 0.3416, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.5032679738562091, |
|
"grad_norm": 2.9165868759155273, |
|
"learning_rate": 5.905662343512952e-05, |
|
"loss": 0.3082, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5032679738562091, |
|
"eval_loss": 0.30869877338409424, |
|
"eval_runtime": 7463.6998, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5098039215686274, |
|
"grad_norm": 2.973310947418213, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.3351, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.5163398692810457, |
|
"grad_norm": 1.646573543548584, |
|
"learning_rate": 5.8307692188466656e-05, |
|
"loss": 0.303, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.522875816993464, |
|
"grad_norm": 3.7888035774230957, |
|
"learning_rate": 5.7932495022056454e-05, |
|
"loss": 0.3052, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.5294117647058822, |
|
"grad_norm": 4.961613655090332, |
|
"learning_rate": 5.755683908347026e-05, |
|
"loss": 0.3076, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.5359477124183005, |
|
"grad_norm": 2.823336124420166, |
|
"learning_rate": 5.7180746098595325e-05, |
|
"loss": 0.2848, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.5424836601307188, |
|
"grad_norm": 3.311382293701172, |
|
"learning_rate": 5.680423781859524e-05, |
|
"loss": 0.3028, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.5490196078431373, |
|
"grad_norm": 1.4395604133605957, |
|
"learning_rate": 5.6427336018652024e-05, |
|
"loss": 0.2899, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 6.176699638366699, |
|
"learning_rate": 5.6050062496706744e-05, |
|
"loss": 0.3159, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.5620915032679739, |
|
"grad_norm": 3.6819286346435547, |
|
"learning_rate": 5.5672439072198804e-05, |
|
"loss": 0.3084, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 1.9665669202804565, |
|
"learning_rate": 5.529448758480408e-05, |
|
"loss": 0.3224, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"eval_loss": 0.30697837471961975, |
|
"eval_runtime": 7464.764, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.5751633986928104, |
|
"grad_norm": 1.5518563985824585, |
|
"learning_rate": 5.491622989317181e-05, |
|
"loss": 0.3173, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.581699346405229, |
|
"grad_norm": 2.218691110610962, |
|
"learning_rate": 5.4537687873660427e-05, |
|
"loss": 0.3152, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.5882352941176472, |
|
"grad_norm": 7.945935249328613, |
|
"learning_rate": 5.415888341907232e-05, |
|
"loss": 0.2788, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.5947712418300655, |
|
"grad_norm": 2.2534475326538086, |
|
"learning_rate": 5.3779838437387696e-05, |
|
"loss": 0.326, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.6013071895424837, |
|
"grad_norm": 2.623176097869873, |
|
"learning_rate": 5.340057485049751e-05, |
|
"loss": 0.2889, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.607843137254902, |
|
"grad_norm": 1.6333633661270142, |
|
"learning_rate": 5.302111459293569e-05, |
|
"loss": 0.3171, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.6143790849673203, |
|
"grad_norm": 7.508179664611816, |
|
"learning_rate": 5.2641479610610503e-05, |
|
"loss": 0.3068, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.6209150326797386, |
|
"grad_norm": 4.800639629364014, |
|
"learning_rate": 5.226169185953532e-05, |
|
"loss": 0.2964, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.6274509803921569, |
|
"grad_norm": 3.6136810779571533, |
|
"learning_rate": 5.1881773304558856e-05, |
|
"loss": 0.3202, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.6339869281045751, |
|
"grad_norm": 1.746603012084961, |
|
"learning_rate": 5.153975292780853e-05, |
|
"loss": 0.2849, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6339869281045751, |
|
"eval_loss": 0.316631942987442, |
|
"eval_runtime": 7464.5174, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6405228758169934, |
|
"grad_norm": 4.562976837158203, |
|
"learning_rate": 5.1159646384647754e-05, |
|
"loss": 0.2793, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.6470588235294117, |
|
"grad_norm": 4.200897216796875, |
|
"learning_rate": 5.0779472773875356e-05, |
|
"loss": 0.3339, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.65359477124183, |
|
"grad_norm": 2.1885018348693848, |
|
"learning_rate": 5.0399254082656075e-05, |
|
"loss": 0.3523, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.6601307189542482, |
|
"grad_norm": 3.327165365219116, |
|
"learning_rate": 5.0019012300761826e-05, |
|
"loss": 0.3005, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 2.69273042678833, |
|
"learning_rate": 4.963876941929997e-05, |
|
"loss": 0.3143, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.673202614379085, |
|
"grad_norm": 2.155317783355713, |
|
"learning_rate": 4.9258547429441454e-05, |
|
"loss": 0.3309, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.6797385620915033, |
|
"grad_norm": 3.1310834884643555, |
|
"learning_rate": 4.887836832114898e-05, |
|
"loss": 0.3638, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.6862745098039216, |
|
"grad_norm": 5.145089626312256, |
|
"learning_rate": 4.8498254081905217e-05, |
|
"loss": 0.3092, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.6928104575163399, |
|
"grad_norm": 1.412678599357605, |
|
"learning_rate": 4.811822669544115e-05, |
|
"loss": 0.3013, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.6993464052287581, |
|
"grad_norm": 50.4953727722168, |
|
"learning_rate": 4.7738308140464685e-05, |
|
"loss": 0.319, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.6993464052287581, |
|
"eval_loss": 0.30734336376190186, |
|
"eval_runtime": 7463.6236, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.7058823529411766, |
|
"grad_norm": 2.8010504245758057, |
|
"learning_rate": 4.7358520389389515e-05, |
|
"loss": 0.2828, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.712418300653595, |
|
"grad_norm": 3.2057955265045166, |
|
"learning_rate": 4.697888540706431e-05, |
|
"loss": 0.2932, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.7189542483660132, |
|
"grad_norm": 5.941930294036865, |
|
"learning_rate": 4.65994251495025e-05, |
|
"loss": 0.3581, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.7254901960784315, |
|
"grad_norm": 6.0155158042907715, |
|
"learning_rate": 4.622016156261232e-05, |
|
"loss": 0.287, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.7320261437908497, |
|
"grad_norm": 2.266997814178467, |
|
"learning_rate": 4.5841116580927695e-05, |
|
"loss": 0.3033, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.738562091503268, |
|
"grad_norm": 2.44047212600708, |
|
"learning_rate": 4.546231212633959e-05, |
|
"loss": 0.3032, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.7450980392156863, |
|
"grad_norm": 10.935256004333496, |
|
"learning_rate": 4.5083770106828203e-05, |
|
"loss": 0.3318, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.7516339869281046, |
|
"grad_norm": 8.668164253234863, |
|
"learning_rate": 4.470551241519594e-05, |
|
"loss": 0.2884, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.7581699346405228, |
|
"grad_norm": 5.217986106872559, |
|
"learning_rate": 4.432756092780122e-05, |
|
"loss": 0.3411, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.7647058823529411, |
|
"grad_norm": 2.592409610748291, |
|
"learning_rate": 4.394993750329328e-05, |
|
"loss": 0.319, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7647058823529411, |
|
"eval_loss": 0.31023845076560974, |
|
"eval_runtime": 7463.1227, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7712418300653594, |
|
"grad_norm": 6.279813766479492, |
|
"learning_rate": 4.3572663981347974e-05, |
|
"loss": 0.3047, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 5.978330612182617, |
|
"learning_rate": 4.3195762181404764e-05, |
|
"loss": 0.3, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.784313725490196, |
|
"grad_norm": 3.543389320373535, |
|
"learning_rate": 4.281925390140469e-05, |
|
"loss": 0.3143, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.7908496732026142, |
|
"grad_norm": 2.8691015243530273, |
|
"learning_rate": 4.2443160916529735e-05, |
|
"loss": 0.3302, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.7973856209150327, |
|
"grad_norm": 3.5990967750549316, |
|
"learning_rate": 4.206750497794355e-05, |
|
"loss": 0.3109, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.803921568627451, |
|
"grad_norm": 2.575509786605835, |
|
"learning_rate": 4.169230781153334e-05, |
|
"loss": 0.2668, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.8104575163398693, |
|
"grad_norm": 5.235141754150391, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.3043, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.8169934640522876, |
|
"grad_norm": 9.062365531921387, |
|
"learning_rate": 4.09433765648705e-05, |
|
"loss": 0.3003, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.8235294117647058, |
|
"grad_norm": 2.82456636428833, |
|
"learning_rate": 4.056968579870965e-05, |
|
"loss": 0.2778, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.8300653594771243, |
|
"grad_norm": 6.167715549468994, |
|
"learning_rate": 4.0196540430403377e-05, |
|
"loss": 0.3419, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8300653594771243, |
|
"eval_loss": 0.3168606758117676, |
|
"eval_runtime": 7462.498, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8366013071895426, |
|
"grad_norm": 4.378960609436035, |
|
"learning_rate": 3.982396204064124e-05, |
|
"loss": 0.2729, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.843137254901961, |
|
"grad_norm": 2.968966245651245, |
|
"learning_rate": 3.945197217732186e-05, |
|
"loss": 0.3031, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.8496732026143792, |
|
"grad_norm": 1.4191527366638184, |
|
"learning_rate": 3.908059235430671e-05, |
|
"loss": 0.3135, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.8562091503267975, |
|
"grad_norm": 3.671318769454956, |
|
"learning_rate": 3.870984405017586e-05, |
|
"loss": 0.2538, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.8627450980392157, |
|
"grad_norm": 7.276266098022461, |
|
"learning_rate": 3.8339748706985786e-05, |
|
"loss": 0.2591, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.869281045751634, |
|
"grad_norm": 4.304380416870117, |
|
"learning_rate": 3.797032772902929e-05, |
|
"loss": 0.3706, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.8758169934640523, |
|
"grad_norm": 6.998644828796387, |
|
"learning_rate": 3.760160248159752e-05, |
|
"loss": 0.325, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.8823529411764706, |
|
"grad_norm": 8.353042602539062, |
|
"learning_rate": 3.723359428974439e-05, |
|
"loss": 0.3581, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 5.707284450531006, |
|
"learning_rate": 3.686632443705328e-05, |
|
"loss": 0.3016, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.8954248366013071, |
|
"grad_norm": 2.3612542152404785, |
|
"learning_rate": 3.649981416440603e-05, |
|
"loss": 0.323, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.8954248366013071, |
|
"eval_loss": 0.30723679065704346, |
|
"eval_runtime": 7462.5684, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.9019607843137254, |
|
"grad_norm": 9.104491233825684, |
|
"learning_rate": 3.613408466875452e-05, |
|
"loss": 0.2923, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.9084967320261437, |
|
"grad_norm": 2.203709602355957, |
|
"learning_rate": 3.576915710189475e-05, |
|
"loss": 0.3133, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.915032679738562, |
|
"grad_norm": 2.8168318271636963, |
|
"learning_rate": 3.540505256924354e-05, |
|
"loss": 0.3283, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.9215686274509802, |
|
"grad_norm": 2.40824294090271, |
|
"learning_rate": 3.5041792128617927e-05, |
|
"loss": 0.296, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.9281045751633987, |
|
"grad_norm": 1.6007869243621826, |
|
"learning_rate": 3.467939678901726e-05, |
|
"loss": 0.2885, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.934640522875817, |
|
"grad_norm": 2.6064846515655518, |
|
"learning_rate": 3.43178875094082e-05, |
|
"loss": 0.2826, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.9411764705882353, |
|
"grad_norm": 3.1505980491638184, |
|
"learning_rate": 3.395728519751249e-05, |
|
"loss": 0.3147, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.9477124183006536, |
|
"grad_norm": 2.0151422023773193, |
|
"learning_rate": 3.3597610708597845e-05, |
|
"loss": 0.2811, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.954248366013072, |
|
"grad_norm": 5.63386869430542, |
|
"learning_rate": 3.3238884844271776e-05, |
|
"loss": 0.3105, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 7.039790153503418, |
|
"learning_rate": 3.288112835127849e-05, |
|
"loss": 0.2829, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"eval_loss": 0.31224942207336426, |
|
"eval_runtime": 7463.0695, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9673202614379086, |
|
"grad_norm": 1.942665457725525, |
|
"learning_rate": 3.2524361920299105e-05, |
|
"loss": 0.3209, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.973856209150327, |
|
"grad_norm": 2.1431195735931396, |
|
"learning_rate": 3.216860618475493e-05, |
|
"loss": 0.2936, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.9803921568627452, |
|
"grad_norm": 12.91400146484375, |
|
"learning_rate": 3.181388171961415e-05, |
|
"loss": 0.3471, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.9869281045751634, |
|
"grad_norm": 6.381521224975586, |
|
"learning_rate": 3.1460209040201965e-05, |
|
"loss": 0.3261, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.9934640522875817, |
|
"grad_norm": 4.691803932189941, |
|
"learning_rate": 3.110760860101397e-05, |
|
"loss": 0.2674, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.9562430381774902, |
|
"learning_rate": 3.0756100794533305e-05, |
|
"loss": 0.2879, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.0065359477124183, |
|
"grad_norm": 4.3201141357421875, |
|
"learning_rate": 3.0405705950051144e-05, |
|
"loss": 0.2914, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.0130718954248366, |
|
"grad_norm": 3.7568459510803223, |
|
"learning_rate": 3.0056444332491063e-05, |
|
"loss": 0.3116, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.019607843137255, |
|
"grad_norm": 2.2305781841278076, |
|
"learning_rate": 2.970833614123696e-05, |
|
"loss": 0.2957, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.026143790849673, |
|
"grad_norm": 2.8348355293273926, |
|
"learning_rate": 2.936140150896485e-05, |
|
"loss": 0.3093, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.026143790849673, |
|
"eval_loss": 0.30744266510009766, |
|
"eval_runtime": 7463.4558, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.0326797385620914, |
|
"grad_norm": 2.012289524078369, |
|
"learning_rate": 2.901566050047855e-05, |
|
"loss": 0.2565, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.0392156862745097, |
|
"grad_norm": 3.6523494720458984, |
|
"learning_rate": 2.8671133111549155e-05, |
|
"loss": 0.2881, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.045751633986928, |
|
"grad_norm": 2.896991491317749, |
|
"learning_rate": 2.8327839267758648e-05, |
|
"loss": 0.3225, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.052287581699346, |
|
"grad_norm": 3.325443744659424, |
|
"learning_rate": 2.7985798823347507e-05, |
|
"loss": 0.2898, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.0588235294117645, |
|
"grad_norm": 2.0076019763946533, |
|
"learning_rate": 2.7645031560066437e-05, |
|
"loss": 0.2988, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.065359477124183, |
|
"grad_norm": 1.9376704692840576, |
|
"learning_rate": 2.730555718603228e-05, |
|
"loss": 0.2642, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.0718954248366015, |
|
"grad_norm": 4.068740367889404, |
|
"learning_rate": 2.6967395334588234e-05, |
|
"loss": 0.2956, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.0784313725490198, |
|
"grad_norm": 2.9397168159484863, |
|
"learning_rate": 2.6630565563168418e-05, |
|
"loss": 0.317, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.084967320261438, |
|
"grad_norm": 2.2718117237091064, |
|
"learning_rate": 2.6295087352166643e-05, |
|
"loss": 0.2847, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.0915032679738563, |
|
"grad_norm": 3.948641777038574, |
|
"learning_rate": 2.599432858399628e-05, |
|
"loss": 0.2799, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.0915032679738563, |
|
"eval_loss": 0.306661993265152, |
|
"eval_runtime": 7463.0196, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.0980392156862746, |
|
"grad_norm": 2.652907133102417, |
|
"learning_rate": 2.5661471725414854e-05, |
|
"loss": 0.2944, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.104575163398693, |
|
"grad_norm": 2.062485694885254, |
|
"learning_rate": 2.533002247434479e-05, |
|
"loss": 0.2397, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.111111111111111, |
|
"grad_norm": 1.6863266229629517, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.2616, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.1176470588235294, |
|
"grad_norm": 4.490372180938721, |
|
"learning_rate": 2.4671423389077498e-05, |
|
"loss": 0.3018, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.1241830065359477, |
|
"grad_norm": 2.2945055961608887, |
|
"learning_rate": 2.434431164465336e-05, |
|
"loss": 0.2904, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.130718954248366, |
|
"grad_norm": 1.8708246946334839, |
|
"learning_rate": 2.401868368508387e-05, |
|
"loss": 0.2651, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.1372549019607843, |
|
"grad_norm": 5.456343173980713, |
|
"learning_rate": 2.369455834291125e-05, |
|
"loss": 0.2885, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.1437908496732025, |
|
"grad_norm": 2.5962464809417725, |
|
"learning_rate": 2.3371954363774552e-05, |
|
"loss": 0.2936, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.150326797385621, |
|
"grad_norm": 3.175043821334839, |
|
"learning_rate": 2.3050890405325532e-05, |
|
"loss": 0.3827, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.156862745098039, |
|
"grad_norm": 1.3506354093551636, |
|
"learning_rate": 2.2731385036149516e-05, |
|
"loss": 0.2489, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.156862745098039, |
|
"eval_loss": 0.30653849244117737, |
|
"eval_runtime": 7462.3372, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.1633986928104574, |
|
"grad_norm": 3.176468849182129, |
|
"learning_rate": 2.2413456734691595e-05, |
|
"loss": 0.3081, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.1699346405228757, |
|
"grad_norm": 3.331033229827881, |
|
"learning_rate": 2.2097123888187827e-05, |
|
"loss": 0.3057, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.176470588235294, |
|
"grad_norm": 3.275542736053467, |
|
"learning_rate": 2.1782404791601903e-05, |
|
"loss": 0.2979, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.183006535947712, |
|
"grad_norm": 1.958678960800171, |
|
"learning_rate": 2.1469317646567013e-05, |
|
"loss": 0.2879, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.189542483660131, |
|
"grad_norm": 2.5814414024353027, |
|
"learning_rate": 2.1157880560333197e-05, |
|
"loss": 0.2733, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.196078431372549, |
|
"grad_norm": 3.4207053184509277, |
|
"learning_rate": 2.0848111544720117e-05, |
|
"loss": 0.3109, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.2026143790849675, |
|
"grad_norm": 2.685720205307007, |
|
"learning_rate": 2.0540028515075328e-05, |
|
"loss": 0.3195, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.2091503267973858, |
|
"grad_norm": 3.908967971801758, |
|
"learning_rate": 2.023364928923817e-05, |
|
"loss": 0.2651, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.215686274509804, |
|
"grad_norm": 2.786430597305298, |
|
"learning_rate": 1.9928991586509276e-05, |
|
"loss": 0.2687, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 3.6411643028259277, |
|
"learning_rate": 1.9626073026625818e-05, |
|
"loss": 0.2846, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"eval_loss": 0.3210596442222595, |
|
"eval_runtime": 7462.9136, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.2287581699346406, |
|
"grad_norm": 2.521268129348755, |
|
"learning_rate": 1.9324911128742407e-05, |
|
"loss": 0.3536, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.235294117647059, |
|
"grad_norm": 2.3333940505981445, |
|
"learning_rate": 1.905538176438527e-05, |
|
"loss": 0.2823, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.241830065359477, |
|
"grad_norm": 3.3351223468780518, |
|
"learning_rate": 1.8757605424895984e-05, |
|
"loss": 0.2645, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.2483660130718954, |
|
"grad_norm": 2.780357599258423, |
|
"learning_rate": 1.8461635974828296e-05, |
|
"loss": 0.275, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.2549019607843137, |
|
"grad_norm": 3.99027419090271, |
|
"learning_rate": 1.8167490531438284e-05, |
|
"loss": 0.352, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.261437908496732, |
|
"grad_norm": 3.214839458465576, |
|
"learning_rate": 1.7875186106491448e-05, |
|
"loss": 0.2764, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.2679738562091503, |
|
"grad_norm": 3.3453619480133057, |
|
"learning_rate": 1.7584739605278832e-05, |
|
"loss": 0.3093, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.2745098039215685, |
|
"grad_norm": 1.7691558599472046, |
|
"learning_rate": 1.7296167825639326e-05, |
|
"loss": 0.2948, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.281045751633987, |
|
"grad_norm": 2.7585806846618652, |
|
"learning_rate": 1.7009487456988164e-05, |
|
"loss": 0.288, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.287581699346405, |
|
"grad_norm": 3.013615846633911, |
|
"learning_rate": 1.6724715079351687e-05, |
|
"loss": 0.2841, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.287581699346405, |
|
"eval_loss": 0.3018401563167572, |
|
"eval_runtime": 7463.5622, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.2941176470588234, |
|
"grad_norm": 5.529947280883789, |
|
"learning_rate": 1.6441867162408514e-05, |
|
"loss": 0.3024, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.3006535947712417, |
|
"grad_norm": 2.649116277694702, |
|
"learning_rate": 1.6160960064536908e-05, |
|
"loss": 0.319, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.30718954248366, |
|
"grad_norm": 5.389304161071777, |
|
"learning_rate": 1.5882010031868777e-05, |
|
"loss": 0.296, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.313725490196078, |
|
"grad_norm": 2.242767095565796, |
|
"learning_rate": 1.5605033197350095e-05, |
|
"loss": 0.2631, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.3202614379084965, |
|
"grad_norm": 6.1497111320495605, |
|
"learning_rate": 1.5330045579807802e-05, |
|
"loss": 0.2901, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.326797385620915, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5084270651401933e-05, |
|
"loss": 0.3096, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 2.6111481189727783, |
|
"learning_rate": 1.4813106265206484e-05, |
|
"loss": 0.315, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.3398692810457518, |
|
"grad_norm": 1.8103219270706177, |
|
"learning_rate": 1.4543976896714795e-05, |
|
"loss": 0.3009, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.34640522875817, |
|
"grad_norm": 4.367152690887451, |
|
"learning_rate": 1.4276898110899262e-05, |
|
"loss": 0.2815, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"grad_norm": 5.697513580322266, |
|
"learning_rate": 1.4011885354137754e-05, |
|
"loss": 0.3119, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"eval_loss": 0.30415767431259155, |
|
"eval_runtime": 7463.8583, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.3594771241830066, |
|
"grad_norm": 2.6390066146850586, |
|
"learning_rate": 1.3748953953320371e-05, |
|
"loss": 0.3065, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.366013071895425, |
|
"grad_norm": 1.9461013078689575, |
|
"learning_rate": 1.3488119114962884e-05, |
|
"loss": 0.2627, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.372549019607843, |
|
"grad_norm": 2.6425626277923584, |
|
"learning_rate": 1.322939592432738e-05, |
|
"loss": 0.2935, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.3790849673202614, |
|
"grad_norm": 3.9877243041992188, |
|
"learning_rate": 1.297279934454978e-05, |
|
"loss": 0.2721, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.3856209150326797, |
|
"grad_norm": 2.479541778564453, |
|
"learning_rate": 1.271834421577443e-05, |
|
"loss": 0.2705, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.392156862745098, |
|
"grad_norm": 3.324650526046753, |
|
"learning_rate": 1.2466045254295871e-05, |
|
"loss": 0.2933, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.3986928104575163, |
|
"grad_norm": 3.598247766494751, |
|
"learning_rate": 1.2215917051707676e-05, |
|
"loss": 0.2712, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.4052287581699345, |
|
"grad_norm": 1.403212070465088, |
|
"learning_rate": 1.1967974074058647e-05, |
|
"loss": 0.3371, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.411764705882353, |
|
"grad_norm": 20.876041412353516, |
|
"learning_rate": 1.1722230661016043e-05, |
|
"loss": 0.2962, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.418300653594771, |
|
"grad_norm": 2.889564037322998, |
|
"learning_rate": 1.147870102503636e-05, |
|
"loss": 0.2978, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.418300653594771, |
|
"eval_loss": 0.30770230293273926, |
|
"eval_runtime": 7462.6718, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.4248366013071894, |
|
"grad_norm": 2.3619654178619385, |
|
"learning_rate": 1.1237399250543307e-05, |
|
"loss": 0.2689, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.431372549019608, |
|
"grad_norm": 3.7691731452941895, |
|
"learning_rate": 1.0998339293113252e-05, |
|
"loss": 0.274, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.4379084967320264, |
|
"grad_norm": 2.3495209217071533, |
|
"learning_rate": 1.0761534978668152e-05, |
|
"loss": 0.2776, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 1.8520164489746094, |
|
"learning_rate": 1.0527000002675853e-05, |
|
"loss": 0.2815, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.450980392156863, |
|
"grad_norm": 2.9609193801879883, |
|
"learning_rate": 1.0294747929358073e-05, |
|
"loss": 0.291, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.457516339869281, |
|
"grad_norm": 3.36863112449646, |
|
"learning_rate": 1.0064792190905914e-05, |
|
"loss": 0.2957, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.4640522875816995, |
|
"grad_norm": 2.343014717102051, |
|
"learning_rate": 9.837146086703014e-06, |
|
"loss": 0.2504, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.4705882352941178, |
|
"grad_norm": 3.688849687576294, |
|
"learning_rate": 9.61182278255639e-06, |
|
"loss": 0.2479, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.477124183006536, |
|
"grad_norm": 2.4627552032470703, |
|
"learning_rate": 9.388835309934984e-06, |
|
"loss": 0.3038, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.4836601307189543, |
|
"grad_norm": 2.27083683013916, |
|
"learning_rate": 9.168196565216008e-06, |
|
"loss": 0.3068, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.4836601307189543, |
|
"eval_loss": 0.3110058605670929, |
|
"eval_runtime": 7463.183, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.4901960784313726, |
|
"grad_norm": 2.4768147468566895, |
|
"learning_rate": 8.949919308939082e-06, |
|
"loss": 0.2489, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.496732026143791, |
|
"grad_norm": 14.81464958190918, |
|
"learning_rate": 8.734016165068226e-06, |
|
"loss": 0.2852, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.503267973856209, |
|
"grad_norm": 5.01676607131958, |
|
"learning_rate": 8.520499620261802e-06, |
|
"loss": 0.2511, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.5098039215686274, |
|
"grad_norm": 3.20218825340271, |
|
"learning_rate": 8.309382023150286e-06, |
|
"loss": 0.245, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.5163398692810457, |
|
"grad_norm": 3.2415478229522705, |
|
"learning_rate": 8.100675583622137e-06, |
|
"loss": 0.2736, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.522875816993464, |
|
"grad_norm": 4.451015472412109, |
|
"learning_rate": 7.894392372117627e-06, |
|
"loss": 0.3537, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.5294117647058822, |
|
"grad_norm": 3.3448715209960938, |
|
"learning_rate": 7.690544318930765e-06, |
|
"loss": 0.2854, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.5359477124183005, |
|
"grad_norm": 2.145322561264038, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.2923, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.542483660130719, |
|
"grad_norm": 2.5505619049072266, |
|
"learning_rate": 7.290200703822897e-06, |
|
"loss": 0.2938, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.549019607843137, |
|
"grad_norm": 2.9079599380493164, |
|
"learning_rate": 7.093728295589475e-06, |
|
"loss": 0.3237, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.549019607843137, |
|
"eval_loss": 0.30904993414878845, |
|
"eval_runtime": 7464.5061, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.5555555555555554, |
|
"grad_norm": 1.6857258081436157, |
|
"learning_rate": 6.899737351709757e-06, |
|
"loss": 0.2478, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.5620915032679736, |
|
"grad_norm": 4.590330600738525, |
|
"learning_rate": 6.70823909156017e-06, |
|
"loss": 0.2728, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.568627450980392, |
|
"grad_norm": 1.7133220434188843, |
|
"learning_rate": 6.51924459035389e-06, |
|
"loss": 0.2144, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.57516339869281, |
|
"grad_norm": 5.041657447814941, |
|
"learning_rate": 6.332764778500361e-06, |
|
"loss": 0.3144, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.581699346405229, |
|
"grad_norm": 3.0704329013824463, |
|
"learning_rate": 6.148810440973168e-06, |
|
"loss": 0.2795, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.588235294117647, |
|
"grad_norm": 3.937464714050293, |
|
"learning_rate": 5.967392216686207e-06, |
|
"loss": 0.2751, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.5947712418300655, |
|
"grad_norm": 2.519258975982666, |
|
"learning_rate": 5.788520597878477e-06, |
|
"loss": 0.3139, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.6013071895424837, |
|
"grad_norm": 1.8165565729141235, |
|
"learning_rate": 5.6122059295072085e-06, |
|
"loss": 0.2162, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.607843137254902, |
|
"grad_norm": 4.116787433624268, |
|
"learning_rate": 5.438458408649588e-06, |
|
"loss": 0.3408, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.6143790849673203, |
|
"grad_norm": 2.385406494140625, |
|
"learning_rate": 5.26728808391303e-06, |
|
"loss": 0.2811, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.6143790849673203, |
|
"eval_loss": 0.31030258536338806, |
|
"eval_runtime": 7463.2353, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.6209150326797386, |
|
"grad_norm": 3.6135222911834717, |
|
"learning_rate": 5.098704854853986e-06, |
|
"loss": 0.2767, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.627450980392157, |
|
"grad_norm": 3.542980670928955, |
|
"learning_rate": 4.932718471405445e-06, |
|
"loss": 0.2658, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.633986928104575, |
|
"grad_norm": 3.6396703720092773, |
|
"learning_rate": 4.769338533313011e-06, |
|
"loss": 0.3349, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.6405228758169934, |
|
"grad_norm": 2.3764588832855225, |
|
"learning_rate": 4.608574489579759e-06, |
|
"loss": 0.3037, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.6470588235294117, |
|
"grad_norm": 2.5128707885742188, |
|
"learning_rate": 4.4504356379196925e-06, |
|
"loss": 0.2574, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.65359477124183, |
|
"grad_norm": 1.1791373491287231, |
|
"learning_rate": 4.294931124220069e-06, |
|
"loss": 0.2541, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.6601307189542482, |
|
"grad_norm": 3.077044725418091, |
|
"learning_rate": 4.142069942012422e-06, |
|
"loss": 0.3129, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 1.5024341344833374, |
|
"learning_rate": 3.991860931952435e-06, |
|
"loss": 0.2796, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.6732026143790852, |
|
"grad_norm": 3.4516193866729736, |
|
"learning_rate": 3.844312781308629e-06, |
|
"loss": 0.3117, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.6797385620915035, |
|
"grad_norm": 7.4698381423950195, |
|
"learning_rate": 3.69943402345998e-06, |
|
"loss": 0.2585, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.6797385620915035, |
|
"eval_loss": 0.3094639480113983, |
|
"eval_runtime": 7463.506, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.686274509803922, |
|
"grad_norm": 4.485745429992676, |
|
"learning_rate": 3.5572330374023465e-06, |
|
"loss": 0.2691, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.69281045751634, |
|
"grad_norm": 2.9982385635375977, |
|
"learning_rate": 3.4177180472639014e-06, |
|
"loss": 0.3215, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.6993464052287583, |
|
"grad_norm": 3.644282341003418, |
|
"learning_rate": 3.280897121829485e-06, |
|
"loss": 0.2615, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.7058823529411766, |
|
"grad_norm": 2.4241082668304443, |
|
"learning_rate": 3.146778174073972e-06, |
|
"loss": 0.2638, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.712418300653595, |
|
"grad_norm": 3.6460297107696533, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.2446, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.718954248366013, |
|
"grad_norm": 3.8150696754455566, |
|
"learning_rate": 2.886677081712319e-06, |
|
"loss": 0.3122, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.7254901960784315, |
|
"grad_norm": 2.6598055362701416, |
|
"learning_rate": 2.7607099799323965e-06, |
|
"loss": 0.3118, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.7320261437908497, |
|
"grad_norm": 4.361299514770508, |
|
"learning_rate": 2.637474940613821e-06, |
|
"loss": 0.2776, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.738562091503268, |
|
"grad_norm": 11.310445785522461, |
|
"learning_rate": 2.5169790909980174e-06, |
|
"loss": 0.3527, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"grad_norm": 3.9927046298980713, |
|
"learning_rate": 2.3992293999066385e-06, |
|
"loss": 0.2911, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"eval_loss": 0.31082937121391296, |
|
"eval_runtime": 7463.9458, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.7516339869281046, |
|
"grad_norm": 2.656670570373535, |
|
"learning_rate": 2.2842326773385438e-06, |
|
"loss": 0.2853, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.758169934640523, |
|
"grad_norm": 9.812518119812012, |
|
"learning_rate": 2.17199557407593e-06, |
|
"loss": 0.2622, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.764705882352941, |
|
"grad_norm": 4.411550521850586, |
|
"learning_rate": 2.0625245812996797e-06, |
|
"loss": 0.2961, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.7712418300653594, |
|
"grad_norm": 4.210736274719238, |
|
"learning_rate": 1.955826030213964e-06, |
|
"loss": 0.3131, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.7777777777777777, |
|
"grad_norm": 10.433571815490723, |
|
"learning_rate": 1.8519060916800668e-06, |
|
"loss": 0.2771, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.784313725490196, |
|
"grad_norm": 2.862966775894165, |
|
"learning_rate": 1.7507707758595172e-06, |
|
"loss": 0.2763, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.7908496732026142, |
|
"grad_norm": 6.950551509857178, |
|
"learning_rate": 1.6524259318664692e-06, |
|
"loss": 0.3239, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.7973856209150325, |
|
"grad_norm": 4.454069137573242, |
|
"learning_rate": 1.5568772474294224e-06, |
|
"loss": 0.3074, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.803921568627451, |
|
"grad_norm": 3.450611114501953, |
|
"learning_rate": 1.4641302485622953e-06, |
|
"loss": 0.3235, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.810457516339869, |
|
"grad_norm": 3.067033290863037, |
|
"learning_rate": 1.3741902992448152e-06, |
|
"loss": 0.3288, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.810457516339869, |
|
"eval_loss": 0.3071349561214447, |
|
"eval_runtime": 7463.7911, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.8169934640522873, |
|
"grad_norm": 4.035493850708008, |
|
"learning_rate": 1.2870626011123044e-06, |
|
"loss": 0.2968, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.8235294117647056, |
|
"grad_norm": 1.9869184494018555, |
|
"learning_rate": 1.2027521931548214e-06, |
|
"loss": 0.3015, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.8300653594771243, |
|
"grad_norm": 3.0594305992126465, |
|
"learning_rate": 1.1212639514257829e-06, |
|
"loss": 0.3119, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.8366013071895426, |
|
"grad_norm": 3.2473435401916504, |
|
"learning_rate": 1.042602588759889e-06, |
|
"loss": 0.3069, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.843137254901961, |
|
"grad_norm": 4.640011310577393, |
|
"learning_rate": 9.667726545006417e-07, |
|
"loss": 0.3186, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.849673202614379, |
|
"grad_norm": 1.6823493242263794, |
|
"learning_rate": 8.937785342371662e-07, |
|
"loss": 0.2628, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.8562091503267975, |
|
"grad_norm": 2.6158905029296875, |
|
"learning_rate": 8.236244495505974e-07, |
|
"loss": 0.296, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.8627450980392157, |
|
"grad_norm": 1.7890446186065674, |
|
"learning_rate": 7.563144577699355e-07, |
|
"loss": 0.3062, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.869281045751634, |
|
"grad_norm": 11.531109809875488, |
|
"learning_rate": 6.918524517373903e-07, |
|
"loss": 0.3105, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.8758169934640523, |
|
"grad_norm": 2.951564073562622, |
|
"learning_rate": 6.302421595832164e-07, |
|
"loss": 0.2993, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.8758169934640523, |
|
"eval_loss": 0.3057985007762909, |
|
"eval_runtime": 7464.7786, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.8823529411764706, |
|
"grad_norm": 5.375591278076172, |
|
"learning_rate": 5.714871445101356e-07, |
|
"loss": 0.2902, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 2.2659952640533447, |
|
"learning_rate": 5.155908045872349e-07, |
|
"loss": 0.2578, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.895424836601307, |
|
"grad_norm": 3.643691062927246, |
|
"learning_rate": 4.625563725534465e-07, |
|
"loss": 0.26, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.9019607843137254, |
|
"grad_norm": 3.761317253112793, |
|
"learning_rate": 4.1238691563058594e-07, |
|
"loss": 0.3239, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.9084967320261437, |
|
"grad_norm": 2.697275161743164, |
|
"learning_rate": 3.6508533534596046e-07, |
|
"loss": 0.2802, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.915032679738562, |
|
"grad_norm": 4.7579450607299805, |
|
"learning_rate": 3.20654367364559e-07, |
|
"loss": 0.2347, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.9215686274509802, |
|
"grad_norm": 14.727649688720703, |
|
"learning_rate": 2.7909658133083994e-07, |
|
"loss": 0.3004, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.928104575163399, |
|
"grad_norm": 10.497050285339355, |
|
"learning_rate": 2.404143807201109e-07, |
|
"loss": 0.3049, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.9346405228758172, |
|
"grad_norm": 3.075261116027832, |
|
"learning_rate": 2.0461000269953456e-07, |
|
"loss": 0.3362, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 2.50616192817688, |
|
"learning_rate": 1.7168551799872645e-07, |
|
"loss": 0.322, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"eval_loss": 0.3057360351085663, |
|
"eval_runtime": 7463.9401, |
|
"eval_samples_per_second": 0.182, |
|
"eval_steps_per_second": 0.182, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4590, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 1.4730845079567021e+20, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|