Robbert_base23_oldsyn_7-2neg / trainer_state.json
Ehsanl's picture
checkpoint-21420 epoch_2.25 w tok
8e46ab4 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.25,
"eval_steps": 500,
"global_step": 21420,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0010504201680672268,
"grad_norm": 22.625,
"learning_rate": 6.000000000000001e-08,
"loss": 4.9287,
"step": 10
},
{
"epoch": 0.0021008403361344537,
"grad_norm": 93.0,
"learning_rate": 1.2666666666666666e-07,
"loss": 4.58,
"step": 20
},
{
"epoch": 0.0031512605042016808,
"grad_norm": 9.6875,
"learning_rate": 1.9333333333333337e-07,
"loss": 4.8248,
"step": 30
},
{
"epoch": 0.004201680672268907,
"grad_norm": 8.1875,
"learning_rate": 2.6e-07,
"loss": 5.5906,
"step": 40
},
{
"epoch": 0.005252100840336135,
"grad_norm": 9.0625,
"learning_rate": 3.266666666666667e-07,
"loss": 5.144,
"step": 50
},
{
"epoch": 0.0063025210084033615,
"grad_norm": 8.5,
"learning_rate": 3.9333333333333336e-07,
"loss": 5.3149,
"step": 60
},
{
"epoch": 0.007352941176470588,
"grad_norm": 368.0,
"learning_rate": 4.6000000000000004e-07,
"loss": 5.0195,
"step": 70
},
{
"epoch": 0.008403361344537815,
"grad_norm": 292.0,
"learning_rate": 5.266666666666667e-07,
"loss": 4.943,
"step": 80
},
{
"epoch": 0.009453781512605041,
"grad_norm": 8.625,
"learning_rate": 5.933333333333334e-07,
"loss": 5.0446,
"step": 90
},
{
"epoch": 0.01050420168067227,
"grad_norm": 1456.0,
"learning_rate": 6.6e-07,
"loss": 5.1314,
"step": 100
},
{
"epoch": 0.011554621848739496,
"grad_norm": 13.0625,
"learning_rate": 7.266666666666668e-07,
"loss": 4.6218,
"step": 110
},
{
"epoch": 0.012605042016806723,
"grad_norm": 249.0,
"learning_rate": 7.933333333333335e-07,
"loss": 5.4531,
"step": 120
},
{
"epoch": 0.01365546218487395,
"grad_norm": 10.25,
"learning_rate": 8.6e-07,
"loss": 5.4136,
"step": 130
},
{
"epoch": 0.014705882352941176,
"grad_norm": 13.1875,
"learning_rate": 9.266666666666667e-07,
"loss": 5.5019,
"step": 140
},
{
"epoch": 0.015756302521008403,
"grad_norm": 26.25,
"learning_rate": 9.933333333333333e-07,
"loss": 4.987,
"step": 150
},
{
"epoch": 0.01680672268907563,
"grad_norm": 11.0625,
"learning_rate": 1.06e-06,
"loss": 5.0357,
"step": 160
},
{
"epoch": 0.017857142857142856,
"grad_norm": 8.3125,
"learning_rate": 1.1266666666666667e-06,
"loss": 5.3842,
"step": 170
},
{
"epoch": 0.018907563025210083,
"grad_norm": 8.125,
"learning_rate": 1.1933333333333335e-06,
"loss": 5.0255,
"step": 180
},
{
"epoch": 0.01995798319327731,
"grad_norm": 9.6875,
"learning_rate": 1.26e-06,
"loss": 4.9036,
"step": 190
},
{
"epoch": 0.02100840336134454,
"grad_norm": 11.1875,
"learning_rate": 1.3266666666666667e-06,
"loss": 5.2209,
"step": 200
},
{
"epoch": 0.022058823529411766,
"grad_norm": 9.5,
"learning_rate": 1.3933333333333335e-06,
"loss": 4.6235,
"step": 210
},
{
"epoch": 0.023109243697478993,
"grad_norm": 17.25,
"learning_rate": 1.46e-06,
"loss": 5.4189,
"step": 220
},
{
"epoch": 0.02415966386554622,
"grad_norm": 12.6875,
"learning_rate": 1.526666666666667e-06,
"loss": 5.3731,
"step": 230
},
{
"epoch": 0.025210084033613446,
"grad_norm": 17.25,
"learning_rate": 1.5933333333333335e-06,
"loss": 5.1251,
"step": 240
},
{
"epoch": 0.026260504201680673,
"grad_norm": 9.8125,
"learning_rate": 1.6600000000000002e-06,
"loss": 4.694,
"step": 250
},
{
"epoch": 0.0273109243697479,
"grad_norm": 16.5,
"learning_rate": 1.7266666666666667e-06,
"loss": 5.1736,
"step": 260
},
{
"epoch": 0.028361344537815126,
"grad_norm": 16.875,
"learning_rate": 1.7933333333333337e-06,
"loss": 5.0188,
"step": 270
},
{
"epoch": 0.029411764705882353,
"grad_norm": 20.375,
"learning_rate": 1.8600000000000002e-06,
"loss": 4.5767,
"step": 280
},
{
"epoch": 0.03046218487394958,
"grad_norm": 35.0,
"learning_rate": 1.926666666666667e-06,
"loss": 4.9608,
"step": 290
},
{
"epoch": 0.031512605042016806,
"grad_norm": 17.125,
"learning_rate": 1.9933333333333334e-06,
"loss": 5.0574,
"step": 300
},
{
"epoch": 0.032563025210084036,
"grad_norm": 13.375,
"learning_rate": 2.06e-06,
"loss": 5.245,
"step": 310
},
{
"epoch": 0.03361344537815126,
"grad_norm": 19.25,
"learning_rate": 2.126666666666667e-06,
"loss": 5.2969,
"step": 320
},
{
"epoch": 0.03466386554621849,
"grad_norm": 10.3125,
"learning_rate": 2.1933333333333332e-06,
"loss": 4.9717,
"step": 330
},
{
"epoch": 0.03571428571428571,
"grad_norm": 18.375,
"learning_rate": 2.2600000000000004e-06,
"loss": 4.6562,
"step": 340
},
{
"epoch": 0.03676470588235294,
"grad_norm": 11.5625,
"learning_rate": 2.3266666666666667e-06,
"loss": 4.7494,
"step": 350
},
{
"epoch": 0.037815126050420166,
"grad_norm": 17.0,
"learning_rate": 2.3933333333333334e-06,
"loss": 5.1655,
"step": 360
},
{
"epoch": 0.038865546218487396,
"grad_norm": 14.4375,
"learning_rate": 2.46e-06,
"loss": 5.3803,
"step": 370
},
{
"epoch": 0.03991596638655462,
"grad_norm": 20.625,
"learning_rate": 2.526666666666667e-06,
"loss": 4.8406,
"step": 380
},
{
"epoch": 0.04096638655462185,
"grad_norm": 27.25,
"learning_rate": 2.5933333333333336e-06,
"loss": 4.5881,
"step": 390
},
{
"epoch": 0.04201680672268908,
"grad_norm": 408.0,
"learning_rate": 2.6600000000000004e-06,
"loss": 5.4696,
"step": 400
},
{
"epoch": 0.0430672268907563,
"grad_norm": 15.125,
"learning_rate": 2.726666666666667e-06,
"loss": 5.1682,
"step": 410
},
{
"epoch": 0.04411764705882353,
"grad_norm": 14.1875,
"learning_rate": 2.7933333333333334e-06,
"loss": 4.4683,
"step": 420
},
{
"epoch": 0.045168067226890755,
"grad_norm": 32.5,
"learning_rate": 2.86e-06,
"loss": 5.496,
"step": 430
},
{
"epoch": 0.046218487394957986,
"grad_norm": 23.375,
"learning_rate": 2.9266666666666673e-06,
"loss": 4.9379,
"step": 440
},
{
"epoch": 0.04726890756302521,
"grad_norm": 14.0625,
"learning_rate": 2.9933333333333336e-06,
"loss": 5.0653,
"step": 450
},
{
"epoch": 0.04831932773109244,
"grad_norm": 26.625,
"learning_rate": 3.0600000000000003e-06,
"loss": 4.5886,
"step": 460
},
{
"epoch": 0.04936974789915966,
"grad_norm": 24.125,
"learning_rate": 3.1266666666666667e-06,
"loss": 5.2361,
"step": 470
},
{
"epoch": 0.05042016806722689,
"grad_norm": 31.5,
"learning_rate": 3.193333333333334e-06,
"loss": 4.8335,
"step": 480
},
{
"epoch": 0.051470588235294115,
"grad_norm": 13.5625,
"learning_rate": 3.2600000000000006e-06,
"loss": 4.671,
"step": 490
},
{
"epoch": 0.052521008403361345,
"grad_norm": 22.25,
"learning_rate": 3.326666666666667e-06,
"loss": 4.9443,
"step": 500
},
{
"epoch": 0.05357142857142857,
"grad_norm": 19.375,
"learning_rate": 3.3933333333333336e-06,
"loss": 4.8105,
"step": 510
},
{
"epoch": 0.0546218487394958,
"grad_norm": 15.5,
"learning_rate": 3.46e-06,
"loss": 5.0683,
"step": 520
},
{
"epoch": 0.05567226890756303,
"grad_norm": 28.125,
"learning_rate": 3.526666666666667e-06,
"loss": 4.3196,
"step": 530
},
{
"epoch": 0.05672268907563025,
"grad_norm": 18.25,
"learning_rate": 3.593333333333334e-06,
"loss": 4.8723,
"step": 540
},
{
"epoch": 0.05777310924369748,
"grad_norm": 29.25,
"learning_rate": 3.66e-06,
"loss": 4.8467,
"step": 550
},
{
"epoch": 0.058823529411764705,
"grad_norm": 19.25,
"learning_rate": 3.726666666666667e-06,
"loss": 4.4188,
"step": 560
},
{
"epoch": 0.059873949579831935,
"grad_norm": 20.75,
"learning_rate": 3.793333333333334e-06,
"loss": 4.999,
"step": 570
},
{
"epoch": 0.06092436974789916,
"grad_norm": 19.25,
"learning_rate": 3.86e-06,
"loss": 5.0784,
"step": 580
},
{
"epoch": 0.06197478991596639,
"grad_norm": 25.0,
"learning_rate": 3.926666666666667e-06,
"loss": 4.2984,
"step": 590
},
{
"epoch": 0.06302521008403361,
"grad_norm": 21.875,
"learning_rate": 3.993333333333334e-06,
"loss": 4.6611,
"step": 600
},
{
"epoch": 0.06407563025210083,
"grad_norm": 21.75,
"learning_rate": 4.060000000000001e-06,
"loss": 4.5503,
"step": 610
},
{
"epoch": 0.06512605042016807,
"grad_norm": 31.75,
"learning_rate": 4.126666666666667e-06,
"loss": 4.2121,
"step": 620
},
{
"epoch": 0.0661764705882353,
"grad_norm": 23.375,
"learning_rate": 4.1933333333333336e-06,
"loss": 4.3586,
"step": 630
},
{
"epoch": 0.06722689075630252,
"grad_norm": 27.0,
"learning_rate": 4.26e-06,
"loss": 4.3319,
"step": 640
},
{
"epoch": 0.06827731092436974,
"grad_norm": 108.5,
"learning_rate": 4.326666666666667e-06,
"loss": 4.4466,
"step": 650
},
{
"epoch": 0.06932773109243698,
"grad_norm": 39.75,
"learning_rate": 4.393333333333334e-06,
"loss": 4.1232,
"step": 660
},
{
"epoch": 0.0703781512605042,
"grad_norm": 30.75,
"learning_rate": 4.4600000000000005e-06,
"loss": 4.2004,
"step": 670
},
{
"epoch": 0.07142857142857142,
"grad_norm": 31.875,
"learning_rate": 4.526666666666667e-06,
"loss": 3.6652,
"step": 680
},
{
"epoch": 0.07247899159663866,
"grad_norm": 30.625,
"learning_rate": 4.593333333333333e-06,
"loss": 4.6072,
"step": 690
},
{
"epoch": 0.07352941176470588,
"grad_norm": 28.25,
"learning_rate": 4.66e-06,
"loss": 3.615,
"step": 700
},
{
"epoch": 0.07457983193277311,
"grad_norm": 44.5,
"learning_rate": 4.7266666666666674e-06,
"loss": 3.7423,
"step": 710
},
{
"epoch": 0.07563025210084033,
"grad_norm": 51.0,
"learning_rate": 4.793333333333334e-06,
"loss": 3.2943,
"step": 720
},
{
"epoch": 0.07668067226890757,
"grad_norm": 37.25,
"learning_rate": 4.86e-06,
"loss": 2.8386,
"step": 730
},
{
"epoch": 0.07773109243697479,
"grad_norm": 34.75,
"learning_rate": 4.926666666666667e-06,
"loss": 2.6554,
"step": 740
},
{
"epoch": 0.07878151260504201,
"grad_norm": 25.75,
"learning_rate": 4.9933333333333335e-06,
"loss": 3.8282,
"step": 750
},
{
"epoch": 0.07983193277310924,
"grad_norm": 94.0,
"learning_rate": 5.060000000000001e-06,
"loss": 3.9544,
"step": 760
},
{
"epoch": 0.08088235294117647,
"grad_norm": 36.75,
"learning_rate": 5.126666666666668e-06,
"loss": 3.3941,
"step": 770
},
{
"epoch": 0.0819327731092437,
"grad_norm": 20.5,
"learning_rate": 5.193333333333333e-06,
"loss": 2.5309,
"step": 780
},
{
"epoch": 0.08298319327731092,
"grad_norm": 11.125,
"learning_rate": 5.2600000000000005e-06,
"loss": 2.4891,
"step": 790
},
{
"epoch": 0.08403361344537816,
"grad_norm": 65.0,
"learning_rate": 5.326666666666667e-06,
"loss": 2.4388,
"step": 800
},
{
"epoch": 0.08508403361344538,
"grad_norm": 21.0,
"learning_rate": 5.393333333333334e-06,
"loss": 2.3751,
"step": 810
},
{
"epoch": 0.0861344537815126,
"grad_norm": 132.0,
"learning_rate": 5.460000000000001e-06,
"loss": 3.6548,
"step": 820
},
{
"epoch": 0.08718487394957983,
"grad_norm": 38.0,
"learning_rate": 5.5266666666666666e-06,
"loss": 2.9053,
"step": 830
},
{
"epoch": 0.08823529411764706,
"grad_norm": 27.375,
"learning_rate": 5.593333333333334e-06,
"loss": 2.8639,
"step": 840
},
{
"epoch": 0.08928571428571429,
"grad_norm": 98.5,
"learning_rate": 5.66e-06,
"loss": 2.9273,
"step": 850
},
{
"epoch": 0.09033613445378151,
"grad_norm": 27.875,
"learning_rate": 5.726666666666667e-06,
"loss": 2.5684,
"step": 860
},
{
"epoch": 0.09138655462184873,
"grad_norm": 16.875,
"learning_rate": 5.793333333333334e-06,
"loss": 2.7857,
"step": 870
},
{
"epoch": 0.09243697478991597,
"grad_norm": 13.1875,
"learning_rate": 5.86e-06,
"loss": 2.4058,
"step": 880
},
{
"epoch": 0.0934873949579832,
"grad_norm": 17.0,
"learning_rate": 5.926666666666667e-06,
"loss": 2.7086,
"step": 890
},
{
"epoch": 0.09453781512605042,
"grad_norm": 15.8125,
"learning_rate": 5.993333333333334e-06,
"loss": 1.9958,
"step": 900
},
{
"epoch": 0.09558823529411764,
"grad_norm": 25.25,
"learning_rate": 6.0600000000000004e-06,
"loss": 2.5835,
"step": 910
},
{
"epoch": 0.09663865546218488,
"grad_norm": 22.25,
"learning_rate": 6.126666666666668e-06,
"loss": 2.6418,
"step": 920
},
{
"epoch": 0.0976890756302521,
"grad_norm": 41.75,
"learning_rate": 6.193333333333333e-06,
"loss": 2.225,
"step": 930
},
{
"epoch": 0.09873949579831932,
"grad_norm": 38.75,
"learning_rate": 6.26e-06,
"loss": 2.4876,
"step": 940
},
{
"epoch": 0.09978991596638656,
"grad_norm": 18.375,
"learning_rate": 6.326666666666667e-06,
"loss": 2.6567,
"step": 950
},
{
"epoch": 0.10084033613445378,
"grad_norm": 35.25,
"learning_rate": 6.393333333333334e-06,
"loss": 2.9652,
"step": 960
},
{
"epoch": 0.10189075630252101,
"grad_norm": 38.25,
"learning_rate": 6.460000000000001e-06,
"loss": 2.9891,
"step": 970
},
{
"epoch": 0.10294117647058823,
"grad_norm": 12.1875,
"learning_rate": 6.526666666666666e-06,
"loss": 1.9093,
"step": 980
},
{
"epoch": 0.10399159663865547,
"grad_norm": 37.0,
"learning_rate": 6.5933333333333335e-06,
"loss": 2.344,
"step": 990
},
{
"epoch": 0.10504201680672269,
"grad_norm": 36.75,
"learning_rate": 6.660000000000001e-06,
"loss": 2.3952,
"step": 1000
},
{
"epoch": 0.10609243697478991,
"grad_norm": 416.0,
"learning_rate": 6.726666666666667e-06,
"loss": 1.9161,
"step": 1010
},
{
"epoch": 0.10714285714285714,
"grad_norm": 39.25,
"learning_rate": 6.793333333333334e-06,
"loss": 2.9488,
"step": 1020
},
{
"epoch": 0.10819327731092437,
"grad_norm": 7.53125,
"learning_rate": 6.860000000000001e-06,
"loss": 1.9871,
"step": 1030
},
{
"epoch": 0.1092436974789916,
"grad_norm": 18.25,
"learning_rate": 6.926666666666667e-06,
"loss": 2.176,
"step": 1040
},
{
"epoch": 0.11029411764705882,
"grad_norm": 24.0,
"learning_rate": 6.993333333333334e-06,
"loss": 2.589,
"step": 1050
},
{
"epoch": 0.11134453781512606,
"grad_norm": 21.75,
"learning_rate": 7.06e-06,
"loss": 2.5755,
"step": 1060
},
{
"epoch": 0.11239495798319328,
"grad_norm": 17.0,
"learning_rate": 7.126666666666667e-06,
"loss": 1.8312,
"step": 1070
},
{
"epoch": 0.1134453781512605,
"grad_norm": 25.875,
"learning_rate": 7.1933333333333345e-06,
"loss": 1.9363,
"step": 1080
},
{
"epoch": 0.11449579831932773,
"grad_norm": 13.8125,
"learning_rate": 7.260000000000001e-06,
"loss": 2.6568,
"step": 1090
},
{
"epoch": 0.11554621848739496,
"grad_norm": 94.0,
"learning_rate": 7.326666666666667e-06,
"loss": 2.3152,
"step": 1100
},
{
"epoch": 0.11659663865546219,
"grad_norm": 8.875,
"learning_rate": 7.393333333333333e-06,
"loss": 1.8939,
"step": 1110
},
{
"epoch": 0.11764705882352941,
"grad_norm": 29.625,
"learning_rate": 7.4600000000000006e-06,
"loss": 1.865,
"step": 1120
},
{
"epoch": 0.11869747899159663,
"grad_norm": 16.625,
"learning_rate": 7.526666666666668e-06,
"loss": 2.1626,
"step": 1130
},
{
"epoch": 0.11974789915966387,
"grad_norm": 286.0,
"learning_rate": 7.593333333333334e-06,
"loss": 2.6556,
"step": 1140
},
{
"epoch": 0.1207983193277311,
"grad_norm": 25.625,
"learning_rate": 7.660000000000001e-06,
"loss": 1.9427,
"step": 1150
},
{
"epoch": 0.12184873949579832,
"grad_norm": 30.875,
"learning_rate": 7.726666666666667e-06,
"loss": 1.8481,
"step": 1160
},
{
"epoch": 0.12289915966386554,
"grad_norm": 19.0,
"learning_rate": 7.793333333333334e-06,
"loss": 1.8739,
"step": 1170
},
{
"epoch": 0.12394957983193278,
"grad_norm": 22.375,
"learning_rate": 7.860000000000001e-06,
"loss": 1.7951,
"step": 1180
},
{
"epoch": 0.125,
"grad_norm": 215.0,
"learning_rate": 7.926666666666666e-06,
"loss": 1.5977,
"step": 1190
},
{
"epoch": 0.12605042016806722,
"grad_norm": 22.375,
"learning_rate": 7.993333333333334e-06,
"loss": 1.2865,
"step": 1200
},
{
"epoch": 0.12710084033613445,
"grad_norm": 18.25,
"learning_rate": 8.06e-06,
"loss": 1.9623,
"step": 1210
},
{
"epoch": 0.12815126050420167,
"grad_norm": 18.25,
"learning_rate": 8.126666666666668e-06,
"loss": 2.3094,
"step": 1220
},
{
"epoch": 0.12920168067226892,
"grad_norm": 118.5,
"learning_rate": 8.193333333333335e-06,
"loss": 2.0879,
"step": 1230
},
{
"epoch": 0.13025210084033614,
"grad_norm": 7.8125,
"learning_rate": 8.26e-06,
"loss": 1.5947,
"step": 1240
},
{
"epoch": 0.13130252100840337,
"grad_norm": 22.5,
"learning_rate": 8.326666666666668e-06,
"loss": 2.245,
"step": 1250
},
{
"epoch": 0.1323529411764706,
"grad_norm": 14.5,
"learning_rate": 8.393333333333335e-06,
"loss": 1.8151,
"step": 1260
},
{
"epoch": 0.1334033613445378,
"grad_norm": 18.5,
"learning_rate": 8.46e-06,
"loss": 1.4881,
"step": 1270
},
{
"epoch": 0.13445378151260504,
"grad_norm": 33.25,
"learning_rate": 8.526666666666667e-06,
"loss": 1.9902,
"step": 1280
},
{
"epoch": 0.13550420168067226,
"grad_norm": 26.25,
"learning_rate": 8.593333333333333e-06,
"loss": 2.0831,
"step": 1290
},
{
"epoch": 0.13655462184873948,
"grad_norm": 21.25,
"learning_rate": 8.66e-06,
"loss": 1.6901,
"step": 1300
},
{
"epoch": 0.13760504201680673,
"grad_norm": 16.75,
"learning_rate": 8.726666666666667e-06,
"loss": 1.9426,
"step": 1310
},
{
"epoch": 0.13865546218487396,
"grad_norm": 18.0,
"learning_rate": 8.793333333333334e-06,
"loss": 1.9525,
"step": 1320
},
{
"epoch": 0.13970588235294118,
"grad_norm": 8.5,
"learning_rate": 8.860000000000002e-06,
"loss": 1.9608,
"step": 1330
},
{
"epoch": 0.1407563025210084,
"grad_norm": 17.25,
"learning_rate": 8.926666666666669e-06,
"loss": 1.9182,
"step": 1340
},
{
"epoch": 0.14180672268907563,
"grad_norm": 15.625,
"learning_rate": 8.993333333333334e-06,
"loss": 1.974,
"step": 1350
},
{
"epoch": 0.14285714285714285,
"grad_norm": 7.96875,
"learning_rate": 9.060000000000001e-06,
"loss": 1.5636,
"step": 1360
},
{
"epoch": 0.14390756302521007,
"grad_norm": 14.3125,
"learning_rate": 9.126666666666667e-06,
"loss": 1.9022,
"step": 1370
},
{
"epoch": 0.14495798319327732,
"grad_norm": 22.0,
"learning_rate": 9.193333333333334e-06,
"loss": 2.12,
"step": 1380
},
{
"epoch": 0.14600840336134455,
"grad_norm": 8.5625,
"learning_rate": 9.260000000000001e-06,
"loss": 1.6334,
"step": 1390
},
{
"epoch": 0.14705882352941177,
"grad_norm": 24.625,
"learning_rate": 9.326666666666667e-06,
"loss": 1.3727,
"step": 1400
},
{
"epoch": 0.148109243697479,
"grad_norm": 14.75,
"learning_rate": 9.393333333333334e-06,
"loss": 1.8891,
"step": 1410
},
{
"epoch": 0.14915966386554622,
"grad_norm": 10.875,
"learning_rate": 9.460000000000001e-06,
"loss": 1.5872,
"step": 1420
},
{
"epoch": 0.15021008403361344,
"grad_norm": 29.625,
"learning_rate": 9.526666666666668e-06,
"loss": 1.3031,
"step": 1430
},
{
"epoch": 0.15126050420168066,
"grad_norm": 9.8125,
"learning_rate": 9.593333333333335e-06,
"loss": 1.407,
"step": 1440
},
{
"epoch": 0.15231092436974789,
"grad_norm": 24.875,
"learning_rate": 9.66e-06,
"loss": 2.4515,
"step": 1450
},
{
"epoch": 0.15336134453781514,
"grad_norm": 4.59375,
"learning_rate": 9.726666666666668e-06,
"loss": 1.3885,
"step": 1460
},
{
"epoch": 0.15441176470588236,
"grad_norm": 13.625,
"learning_rate": 9.793333333333333e-06,
"loss": 1.9857,
"step": 1470
},
{
"epoch": 0.15546218487394958,
"grad_norm": 16.375,
"learning_rate": 9.86e-06,
"loss": 2.2004,
"step": 1480
},
{
"epoch": 0.1565126050420168,
"grad_norm": 13.4375,
"learning_rate": 9.926666666666668e-06,
"loss": 1.7066,
"step": 1490
},
{
"epoch": 0.15756302521008403,
"grad_norm": 27.625,
"learning_rate": 9.993333333333333e-06,
"loss": 1.9221,
"step": 1500
},
{
"epoch": 0.15861344537815125,
"grad_norm": 16.0,
"learning_rate": 1.006e-05,
"loss": 1.8841,
"step": 1510
},
{
"epoch": 0.15966386554621848,
"grad_norm": 15.1875,
"learning_rate": 1.0126666666666667e-05,
"loss": 1.9627,
"step": 1520
},
{
"epoch": 0.16071428571428573,
"grad_norm": 25.625,
"learning_rate": 1.0193333333333335e-05,
"loss": 2.4313,
"step": 1530
},
{
"epoch": 0.16176470588235295,
"grad_norm": 31.125,
"learning_rate": 1.0260000000000002e-05,
"loss": 1.8476,
"step": 1540
},
{
"epoch": 0.16281512605042017,
"grad_norm": 19.375,
"learning_rate": 1.0326666666666667e-05,
"loss": 1.4604,
"step": 1550
},
{
"epoch": 0.1638655462184874,
"grad_norm": 18.25,
"learning_rate": 1.0393333333333336e-05,
"loss": 2.0726,
"step": 1560
},
{
"epoch": 0.16491596638655462,
"grad_norm": 5.40625,
"learning_rate": 1.0460000000000001e-05,
"loss": 1.6203,
"step": 1570
},
{
"epoch": 0.16596638655462184,
"grad_norm": 8.6875,
"learning_rate": 1.0526666666666667e-05,
"loss": 1.5707,
"step": 1580
},
{
"epoch": 0.16701680672268907,
"grad_norm": 17.125,
"learning_rate": 1.0593333333333332e-05,
"loss": 1.8378,
"step": 1590
},
{
"epoch": 0.16806722689075632,
"grad_norm": 10.75,
"learning_rate": 1.0660000000000001e-05,
"loss": 1.675,
"step": 1600
},
{
"epoch": 0.16911764705882354,
"grad_norm": 20.0,
"learning_rate": 1.0726666666666667e-05,
"loss": 1.98,
"step": 1610
},
{
"epoch": 0.17016806722689076,
"grad_norm": 12.75,
"learning_rate": 1.0793333333333334e-05,
"loss": 1.4521,
"step": 1620
},
{
"epoch": 0.17121848739495799,
"grad_norm": 22.125,
"learning_rate": 1.0860000000000001e-05,
"loss": 2.1697,
"step": 1630
},
{
"epoch": 0.1722689075630252,
"grad_norm": 21.75,
"learning_rate": 1.0926666666666668e-05,
"loss": 2.4472,
"step": 1640
},
{
"epoch": 0.17331932773109243,
"grad_norm": 700.0,
"learning_rate": 1.0993333333333334e-05,
"loss": 1.9322,
"step": 1650
},
{
"epoch": 0.17436974789915966,
"grad_norm": 16.375,
"learning_rate": 1.1060000000000003e-05,
"loss": 2.1861,
"step": 1660
},
{
"epoch": 0.17542016806722688,
"grad_norm": 25.625,
"learning_rate": 1.1126666666666668e-05,
"loss": 1.8695,
"step": 1670
},
{
"epoch": 0.17647058823529413,
"grad_norm": 7.3125,
"learning_rate": 1.1193333333333333e-05,
"loss": 1.7726,
"step": 1680
},
{
"epoch": 0.17752100840336135,
"grad_norm": 25.25,
"learning_rate": 1.126e-05,
"loss": 2.1717,
"step": 1690
},
{
"epoch": 0.17857142857142858,
"grad_norm": 15.6875,
"learning_rate": 1.1326666666666668e-05,
"loss": 1.6906,
"step": 1700
},
{
"epoch": 0.1796218487394958,
"grad_norm": 24.5,
"learning_rate": 1.1393333333333333e-05,
"loss": 1.5808,
"step": 1710
},
{
"epoch": 0.18067226890756302,
"grad_norm": 6.6875,
"learning_rate": 1.146e-05,
"loss": 1.2771,
"step": 1720
},
{
"epoch": 0.18172268907563024,
"grad_norm": 18.0,
"learning_rate": 1.1526666666666668e-05,
"loss": 2.0885,
"step": 1730
},
{
"epoch": 0.18277310924369747,
"grad_norm": 14.4375,
"learning_rate": 1.1593333333333335e-05,
"loss": 1.1955,
"step": 1740
},
{
"epoch": 0.18382352941176472,
"grad_norm": 16.625,
"learning_rate": 1.166e-05,
"loss": 2.1007,
"step": 1750
},
{
"epoch": 0.18487394957983194,
"grad_norm": 24.375,
"learning_rate": 1.1726666666666669e-05,
"loss": 2.0714,
"step": 1760
},
{
"epoch": 0.18592436974789917,
"grad_norm": 19.625,
"learning_rate": 1.1793333333333334e-05,
"loss": 1.6826,
"step": 1770
},
{
"epoch": 0.1869747899159664,
"grad_norm": 7.875,
"learning_rate": 1.186e-05,
"loss": 1.3679,
"step": 1780
},
{
"epoch": 0.1880252100840336,
"grad_norm": 13.625,
"learning_rate": 1.1926666666666669e-05,
"loss": 1.4829,
"step": 1790
},
{
"epoch": 0.18907563025210083,
"grad_norm": 25.25,
"learning_rate": 1.1993333333333334e-05,
"loss": 1.6078,
"step": 1800
},
{
"epoch": 0.19012605042016806,
"grad_norm": 292.0,
"learning_rate": 1.2060000000000001e-05,
"loss": 1.6539,
"step": 1810
},
{
"epoch": 0.19117647058823528,
"grad_norm": 23.5,
"learning_rate": 1.2126666666666667e-05,
"loss": 1.6501,
"step": 1820
},
{
"epoch": 0.19222689075630253,
"grad_norm": 15.375,
"learning_rate": 1.2193333333333334e-05,
"loss": 1.3654,
"step": 1830
},
{
"epoch": 0.19327731092436976,
"grad_norm": 18.25,
"learning_rate": 1.2260000000000001e-05,
"loss": 1.5726,
"step": 1840
},
{
"epoch": 0.19432773109243698,
"grad_norm": 19.625,
"learning_rate": 1.2326666666666667e-05,
"loss": 1.7464,
"step": 1850
},
{
"epoch": 0.1953781512605042,
"grad_norm": 8.5625,
"learning_rate": 1.2393333333333336e-05,
"loss": 1.4811,
"step": 1860
},
{
"epoch": 0.19642857142857142,
"grad_norm": 42.0,
"learning_rate": 1.2460000000000001e-05,
"loss": 1.2926,
"step": 1870
},
{
"epoch": 0.19747899159663865,
"grad_norm": 19.25,
"learning_rate": 1.2526666666666666e-05,
"loss": 1.2072,
"step": 1880
},
{
"epoch": 0.19852941176470587,
"grad_norm": 13.0625,
"learning_rate": 1.2593333333333335e-05,
"loss": 1.6926,
"step": 1890
},
{
"epoch": 0.19957983193277312,
"grad_norm": 193.0,
"learning_rate": 1.266e-05,
"loss": 1.8289,
"step": 1900
},
{
"epoch": 0.20063025210084034,
"grad_norm": 31.875,
"learning_rate": 1.2726666666666668e-05,
"loss": 1.2674,
"step": 1910
},
{
"epoch": 0.20168067226890757,
"grad_norm": 20.125,
"learning_rate": 1.2793333333333335e-05,
"loss": 1.8923,
"step": 1920
},
{
"epoch": 0.2027310924369748,
"grad_norm": 5.8125,
"learning_rate": 1.286e-05,
"loss": 1.7089,
"step": 1930
},
{
"epoch": 0.20378151260504201,
"grad_norm": 21.75,
"learning_rate": 1.2926666666666668e-05,
"loss": 1.486,
"step": 1940
},
{
"epoch": 0.20483193277310924,
"grad_norm": 13.6875,
"learning_rate": 1.2993333333333333e-05,
"loss": 2.2097,
"step": 1950
},
{
"epoch": 0.20588235294117646,
"grad_norm": 28.25,
"learning_rate": 1.3060000000000002e-05,
"loss": 1.7722,
"step": 1960
},
{
"epoch": 0.20693277310924368,
"grad_norm": 13.375,
"learning_rate": 1.3126666666666667e-05,
"loss": 1.7385,
"step": 1970
},
{
"epoch": 0.20798319327731093,
"grad_norm": 6.40625,
"learning_rate": 1.3193333333333333e-05,
"loss": 1.3068,
"step": 1980
},
{
"epoch": 0.20903361344537816,
"grad_norm": 43.25,
"learning_rate": 1.3260000000000002e-05,
"loss": 1.9285,
"step": 1990
},
{
"epoch": 0.21008403361344538,
"grad_norm": 15.0,
"learning_rate": 1.3326666666666667e-05,
"loss": 1.4772,
"step": 2000
},
{
"epoch": 0.2111344537815126,
"grad_norm": 5.90625,
"learning_rate": 1.3393333333333334e-05,
"loss": 1.6393,
"step": 2010
},
{
"epoch": 0.21218487394957983,
"grad_norm": 19.125,
"learning_rate": 1.3460000000000002e-05,
"loss": 1.3677,
"step": 2020
},
{
"epoch": 0.21323529411764705,
"grad_norm": 14.6875,
"learning_rate": 1.3526666666666669e-05,
"loss": 1.8989,
"step": 2030
},
{
"epoch": 0.21428571428571427,
"grad_norm": 14.25,
"learning_rate": 1.3593333333333334e-05,
"loss": 1.5803,
"step": 2040
},
{
"epoch": 0.21533613445378152,
"grad_norm": 15.5625,
"learning_rate": 1.3660000000000001e-05,
"loss": 1.5648,
"step": 2050
},
{
"epoch": 0.21638655462184875,
"grad_norm": 6.46875,
"learning_rate": 1.3726666666666669e-05,
"loss": 1.5311,
"step": 2060
},
{
"epoch": 0.21743697478991597,
"grad_norm": 18.0,
"learning_rate": 1.3793333333333334e-05,
"loss": 1.5656,
"step": 2070
},
{
"epoch": 0.2184873949579832,
"grad_norm": 17.875,
"learning_rate": 1.386e-05,
"loss": 1.6681,
"step": 2080
},
{
"epoch": 0.21953781512605042,
"grad_norm": 13.0,
"learning_rate": 1.3926666666666668e-05,
"loss": 1.629,
"step": 2090
},
{
"epoch": 0.22058823529411764,
"grad_norm": 13.8125,
"learning_rate": 1.3993333333333334e-05,
"loss": 1.6206,
"step": 2100
},
{
"epoch": 0.22163865546218486,
"grad_norm": 11.375,
"learning_rate": 1.4060000000000001e-05,
"loss": 1.5289,
"step": 2110
},
{
"epoch": 0.22268907563025211,
"grad_norm": 19.875,
"learning_rate": 1.4126666666666668e-05,
"loss": 1.991,
"step": 2120
},
{
"epoch": 0.22373949579831934,
"grad_norm": 16.25,
"learning_rate": 1.4193333333333335e-05,
"loss": 1.9697,
"step": 2130
},
{
"epoch": 0.22478991596638656,
"grad_norm": 7.90625,
"learning_rate": 1.426e-05,
"loss": 1.8469,
"step": 2140
},
{
"epoch": 0.22584033613445378,
"grad_norm": 6.28125,
"learning_rate": 1.432666666666667e-05,
"loss": 1.3915,
"step": 2150
},
{
"epoch": 0.226890756302521,
"grad_norm": 6.25,
"learning_rate": 1.4393333333333335e-05,
"loss": 1.2475,
"step": 2160
},
{
"epoch": 0.22794117647058823,
"grad_norm": 7.75,
"learning_rate": 1.446e-05,
"loss": 1.8652,
"step": 2170
},
{
"epoch": 0.22899159663865545,
"grad_norm": 18.125,
"learning_rate": 1.452666666666667e-05,
"loss": 1.898,
"step": 2180
},
{
"epoch": 0.23004201680672268,
"grad_norm": 14.8125,
"learning_rate": 1.4593333333333335e-05,
"loss": 1.4146,
"step": 2190
},
{
"epoch": 0.23109243697478993,
"grad_norm": 13.8125,
"learning_rate": 1.466e-05,
"loss": 1.761,
"step": 2200
},
{
"epoch": 0.23214285714285715,
"grad_norm": 15.0,
"learning_rate": 1.4726666666666667e-05,
"loss": 1.5954,
"step": 2210
},
{
"epoch": 0.23319327731092437,
"grad_norm": 12.8125,
"learning_rate": 1.4793333333333335e-05,
"loss": 1.7692,
"step": 2220
},
{
"epoch": 0.2342436974789916,
"grad_norm": 11.6875,
"learning_rate": 1.4860000000000002e-05,
"loss": 1.6226,
"step": 2230
},
{
"epoch": 0.23529411764705882,
"grad_norm": 15.5,
"learning_rate": 1.4926666666666667e-05,
"loss": 1.5973,
"step": 2240
},
{
"epoch": 0.23634453781512604,
"grad_norm": 14.875,
"learning_rate": 1.4993333333333336e-05,
"loss": 1.7924,
"step": 2250
},
{
"epoch": 0.23739495798319327,
"grad_norm": 12.875,
"learning_rate": 1.5060000000000001e-05,
"loss": 1.9794,
"step": 2260
},
{
"epoch": 0.23844537815126052,
"grad_norm": 18.625,
"learning_rate": 1.5126666666666667e-05,
"loss": 1.0344,
"step": 2270
},
{
"epoch": 0.23949579831932774,
"grad_norm": 25.0,
"learning_rate": 1.5193333333333336e-05,
"loss": 1.8067,
"step": 2280
},
{
"epoch": 0.24054621848739496,
"grad_norm": 13.375,
"learning_rate": 1.5260000000000003e-05,
"loss": 1.9954,
"step": 2290
},
{
"epoch": 0.2415966386554622,
"grad_norm": 15.5,
"learning_rate": 1.5326666666666667e-05,
"loss": 1.7709,
"step": 2300
},
{
"epoch": 0.2426470588235294,
"grad_norm": 7.75,
"learning_rate": 1.5393333333333337e-05,
"loss": 1.6276,
"step": 2310
},
{
"epoch": 0.24369747899159663,
"grad_norm": 15.4375,
"learning_rate": 1.546e-05,
"loss": 1.7505,
"step": 2320
},
{
"epoch": 0.24474789915966386,
"grad_norm": 14.0625,
"learning_rate": 1.5526666666666668e-05,
"loss": 1.3429,
"step": 2330
},
{
"epoch": 0.24579831932773108,
"grad_norm": 16.0,
"learning_rate": 1.5593333333333332e-05,
"loss": 1.2232,
"step": 2340
},
{
"epoch": 0.24684873949579833,
"grad_norm": 13.375,
"learning_rate": 1.5660000000000003e-05,
"loss": 1.6442,
"step": 2350
},
{
"epoch": 0.24789915966386555,
"grad_norm": 10.9375,
"learning_rate": 1.5726666666666666e-05,
"loss": 1.6963,
"step": 2360
},
{
"epoch": 0.24894957983193278,
"grad_norm": 13.5,
"learning_rate": 1.5793333333333333e-05,
"loss": 1.6566,
"step": 2370
},
{
"epoch": 0.25,
"grad_norm": 18.0,
"learning_rate": 1.586e-05,
"loss": 1.7326,
"step": 2380
},
{
"epoch": 0.2510504201680672,
"grad_norm": 12.3125,
"learning_rate": 1.5926666666666668e-05,
"loss": 1.672,
"step": 2390
},
{
"epoch": 0.25210084033613445,
"grad_norm": 17.875,
"learning_rate": 1.5993333333333335e-05,
"loss": 0.9257,
"step": 2400
},
{
"epoch": 0.25315126050420167,
"grad_norm": 17.625,
"learning_rate": 1.6060000000000002e-05,
"loss": 1.9682,
"step": 2410
},
{
"epoch": 0.2542016806722689,
"grad_norm": 9.375,
"learning_rate": 1.612666666666667e-05,
"loss": 1.6902,
"step": 2420
},
{
"epoch": 0.2552521008403361,
"grad_norm": 12.9375,
"learning_rate": 1.6193333333333333e-05,
"loss": 1.423,
"step": 2430
},
{
"epoch": 0.25630252100840334,
"grad_norm": 11.4375,
"learning_rate": 1.626e-05,
"loss": 1.2585,
"step": 2440
},
{
"epoch": 0.25735294117647056,
"grad_norm": 6.65625,
"learning_rate": 1.6326666666666667e-05,
"loss": 1.127,
"step": 2450
},
{
"epoch": 0.25840336134453784,
"grad_norm": 16.25,
"learning_rate": 1.6393333333333334e-05,
"loss": 1.9252,
"step": 2460
},
{
"epoch": 0.25945378151260506,
"grad_norm": 24.5,
"learning_rate": 1.646e-05,
"loss": 1.3567,
"step": 2470
},
{
"epoch": 0.2605042016806723,
"grad_norm": 6.5,
"learning_rate": 1.652666666666667e-05,
"loss": 1.4298,
"step": 2480
},
{
"epoch": 0.2615546218487395,
"grad_norm": 31.5,
"learning_rate": 1.6593333333333336e-05,
"loss": 1.4108,
"step": 2490
},
{
"epoch": 0.26260504201680673,
"grad_norm": 20.125,
"learning_rate": 1.666e-05,
"loss": 2.1598,
"step": 2500
},
{
"epoch": 0.26365546218487396,
"grad_norm": 25.25,
"learning_rate": 1.672666666666667e-05,
"loss": 1.3126,
"step": 2510
},
{
"epoch": 0.2647058823529412,
"grad_norm": 13.375,
"learning_rate": 1.6793333333333334e-05,
"loss": 1.4508,
"step": 2520
},
{
"epoch": 0.2657563025210084,
"grad_norm": 14.75,
"learning_rate": 1.686e-05,
"loss": 1.2838,
"step": 2530
},
{
"epoch": 0.2668067226890756,
"grad_norm": 26.875,
"learning_rate": 1.692666666666667e-05,
"loss": 1.3981,
"step": 2540
},
{
"epoch": 0.26785714285714285,
"grad_norm": 14.75,
"learning_rate": 1.6993333333333336e-05,
"loss": 1.3673,
"step": 2550
},
{
"epoch": 0.2689075630252101,
"grad_norm": 20.5,
"learning_rate": 1.7060000000000003e-05,
"loss": 1.6032,
"step": 2560
},
{
"epoch": 0.2699579831932773,
"grad_norm": 16.875,
"learning_rate": 1.7126666666666666e-05,
"loss": 1.7339,
"step": 2570
},
{
"epoch": 0.2710084033613445,
"grad_norm": 16.25,
"learning_rate": 1.7193333333333334e-05,
"loss": 1.9788,
"step": 2580
},
{
"epoch": 0.27205882352941174,
"grad_norm": 19.5,
"learning_rate": 1.726e-05,
"loss": 1.6745,
"step": 2590
},
{
"epoch": 0.27310924369747897,
"grad_norm": 13.5,
"learning_rate": 1.7326666666666668e-05,
"loss": 1.1851,
"step": 2600
},
{
"epoch": 0.27415966386554624,
"grad_norm": 20.125,
"learning_rate": 1.7393333333333335e-05,
"loss": 1.515,
"step": 2610
},
{
"epoch": 0.27521008403361347,
"grad_norm": 12.9375,
"learning_rate": 1.7460000000000002e-05,
"loss": 1.4682,
"step": 2620
},
{
"epoch": 0.2762605042016807,
"grad_norm": 21.75,
"learning_rate": 1.7526666666666666e-05,
"loss": 1.7853,
"step": 2630
},
{
"epoch": 0.2773109243697479,
"grad_norm": 12.9375,
"learning_rate": 1.7593333333333337e-05,
"loss": 1.7173,
"step": 2640
},
{
"epoch": 0.27836134453781514,
"grad_norm": 11.75,
"learning_rate": 1.766e-05,
"loss": 1.3274,
"step": 2650
},
{
"epoch": 0.27941176470588236,
"grad_norm": 27.125,
"learning_rate": 1.7726666666666667e-05,
"loss": 2.1297,
"step": 2660
},
{
"epoch": 0.2804621848739496,
"grad_norm": 7.09375,
"learning_rate": 1.7793333333333335e-05,
"loss": 1.3744,
"step": 2670
},
{
"epoch": 0.2815126050420168,
"grad_norm": 11.0625,
"learning_rate": 1.7860000000000002e-05,
"loss": 1.5248,
"step": 2680
},
{
"epoch": 0.28256302521008403,
"grad_norm": 18.5,
"learning_rate": 1.792666666666667e-05,
"loss": 1.6294,
"step": 2690
},
{
"epoch": 0.28361344537815125,
"grad_norm": 24.125,
"learning_rate": 1.7993333333333333e-05,
"loss": 1.737,
"step": 2700
},
{
"epoch": 0.2846638655462185,
"grad_norm": 16.25,
"learning_rate": 1.8060000000000003e-05,
"loss": 1.3969,
"step": 2710
},
{
"epoch": 0.2857142857142857,
"grad_norm": 5.8125,
"learning_rate": 1.8126666666666667e-05,
"loss": 1.911,
"step": 2720
},
{
"epoch": 0.2867647058823529,
"grad_norm": 15.5,
"learning_rate": 1.8193333333333334e-05,
"loss": 1.5679,
"step": 2730
},
{
"epoch": 0.28781512605042014,
"grad_norm": 14.0625,
"learning_rate": 1.826e-05,
"loss": 1.7564,
"step": 2740
},
{
"epoch": 0.28886554621848737,
"grad_norm": 12.375,
"learning_rate": 1.832666666666667e-05,
"loss": 1.4142,
"step": 2750
},
{
"epoch": 0.28991596638655465,
"grad_norm": 19.125,
"learning_rate": 1.8393333333333336e-05,
"loss": 2.1592,
"step": 2760
},
{
"epoch": 0.29096638655462187,
"grad_norm": 24.875,
"learning_rate": 1.8460000000000003e-05,
"loss": 1.7029,
"step": 2770
},
{
"epoch": 0.2920168067226891,
"grad_norm": 24.0,
"learning_rate": 1.852666666666667e-05,
"loss": 1.3118,
"step": 2780
},
{
"epoch": 0.2930672268907563,
"grad_norm": 29.25,
"learning_rate": 1.8593333333333334e-05,
"loss": 1.8215,
"step": 2790
},
{
"epoch": 0.29411764705882354,
"grad_norm": 6.4375,
"learning_rate": 1.866e-05,
"loss": 1.5881,
"step": 2800
},
{
"epoch": 0.29516806722689076,
"grad_norm": 12.9375,
"learning_rate": 1.8726666666666668e-05,
"loss": 1.5423,
"step": 2810
},
{
"epoch": 0.296218487394958,
"grad_norm": 8.25,
"learning_rate": 1.8793333333333335e-05,
"loss": 1.5408,
"step": 2820
},
{
"epoch": 0.2972689075630252,
"grad_norm": 18.125,
"learning_rate": 1.886e-05,
"loss": 1.3339,
"step": 2830
},
{
"epoch": 0.29831932773109243,
"grad_norm": 12.25,
"learning_rate": 1.892666666666667e-05,
"loss": 1.3236,
"step": 2840
},
{
"epoch": 0.29936974789915966,
"grad_norm": 32.5,
"learning_rate": 1.8993333333333333e-05,
"loss": 2.0409,
"step": 2850
},
{
"epoch": 0.3004201680672269,
"grad_norm": 20.125,
"learning_rate": 1.906e-05,
"loss": 1.5573,
"step": 2860
},
{
"epoch": 0.3014705882352941,
"grad_norm": 12.125,
"learning_rate": 1.9126666666666668e-05,
"loss": 1.3649,
"step": 2870
},
{
"epoch": 0.3025210084033613,
"grad_norm": 13.125,
"learning_rate": 1.9193333333333335e-05,
"loss": 1.6068,
"step": 2880
},
{
"epoch": 0.30357142857142855,
"grad_norm": 12.5625,
"learning_rate": 1.9260000000000002e-05,
"loss": 1.1925,
"step": 2890
},
{
"epoch": 0.30462184873949577,
"grad_norm": 19.875,
"learning_rate": 1.932666666666667e-05,
"loss": 1.8134,
"step": 2900
},
{
"epoch": 0.30567226890756305,
"grad_norm": 18.375,
"learning_rate": 1.9393333333333336e-05,
"loss": 1.5501,
"step": 2910
},
{
"epoch": 0.3067226890756303,
"grad_norm": 9.75,
"learning_rate": 1.946e-05,
"loss": 1.1981,
"step": 2920
},
{
"epoch": 0.3077731092436975,
"grad_norm": 23.0,
"learning_rate": 1.952666666666667e-05,
"loss": 1.3437,
"step": 2930
},
{
"epoch": 0.3088235294117647,
"grad_norm": 23.75,
"learning_rate": 1.9593333333333334e-05,
"loss": 1.3747,
"step": 2940
},
{
"epoch": 0.30987394957983194,
"grad_norm": 18.875,
"learning_rate": 1.966e-05,
"loss": 1.4291,
"step": 2950
},
{
"epoch": 0.31092436974789917,
"grad_norm": 10.5,
"learning_rate": 1.972666666666667e-05,
"loss": 1.3904,
"step": 2960
},
{
"epoch": 0.3119747899159664,
"grad_norm": 8.75,
"learning_rate": 1.9793333333333336e-05,
"loss": 1.3657,
"step": 2970
},
{
"epoch": 0.3130252100840336,
"grad_norm": 16.875,
"learning_rate": 1.9860000000000003e-05,
"loss": 1.5698,
"step": 2980
},
{
"epoch": 0.31407563025210083,
"grad_norm": 20.125,
"learning_rate": 1.9926666666666667e-05,
"loss": 1.4325,
"step": 2990
},
{
"epoch": 0.31512605042016806,
"grad_norm": 13.75,
"learning_rate": 1.9993333333333337e-05,
"loss": 1.4939,
"step": 3000
},
{
"epoch": 0.3161764705882353,
"grad_norm": 5.15625,
"learning_rate": 1.9992957746478873e-05,
"loss": 1.5617,
"step": 3010
},
{
"epoch": 0.3172268907563025,
"grad_norm": 12.25,
"learning_rate": 1.998513302034429e-05,
"loss": 1.813,
"step": 3020
},
{
"epoch": 0.3182773109243697,
"grad_norm": 12.4375,
"learning_rate": 1.9977308294209706e-05,
"loss": 1.3044,
"step": 3030
},
{
"epoch": 0.31932773109243695,
"grad_norm": 15.5,
"learning_rate": 1.9969483568075117e-05,
"loss": 1.8378,
"step": 3040
},
{
"epoch": 0.32037815126050423,
"grad_norm": 16.375,
"learning_rate": 1.9961658841940535e-05,
"loss": 1.6258,
"step": 3050
},
{
"epoch": 0.32142857142857145,
"grad_norm": 11.375,
"learning_rate": 1.9953834115805946e-05,
"loss": 1.2288,
"step": 3060
},
{
"epoch": 0.3224789915966387,
"grad_norm": 14.0,
"learning_rate": 1.9946009389671364e-05,
"loss": 1.6408,
"step": 3070
},
{
"epoch": 0.3235294117647059,
"grad_norm": 18.0,
"learning_rate": 1.9938184663536776e-05,
"loss": 1.5973,
"step": 3080
},
{
"epoch": 0.3245798319327731,
"grad_norm": 12.625,
"learning_rate": 1.9930359937402194e-05,
"loss": 1.5506,
"step": 3090
},
{
"epoch": 0.32563025210084034,
"grad_norm": 14.75,
"learning_rate": 1.9922535211267605e-05,
"loss": 1.5711,
"step": 3100
},
{
"epoch": 0.32668067226890757,
"grad_norm": 5.71875,
"learning_rate": 1.9914710485133023e-05,
"loss": 1.2216,
"step": 3110
},
{
"epoch": 0.3277310924369748,
"grad_norm": 12.9375,
"learning_rate": 1.9906885758998438e-05,
"loss": 2.0402,
"step": 3120
},
{
"epoch": 0.328781512605042,
"grad_norm": 14.75,
"learning_rate": 1.9899061032863852e-05,
"loss": 1.7379,
"step": 3130
},
{
"epoch": 0.32983193277310924,
"grad_norm": 16.125,
"learning_rate": 1.9891236306729267e-05,
"loss": 1.4854,
"step": 3140
},
{
"epoch": 0.33088235294117646,
"grad_norm": 20.75,
"learning_rate": 1.988341158059468e-05,
"loss": 1.5693,
"step": 3150
},
{
"epoch": 0.3319327731092437,
"grad_norm": 4.21875,
"learning_rate": 1.9875586854460096e-05,
"loss": 1.3189,
"step": 3160
},
{
"epoch": 0.3329831932773109,
"grad_norm": 21.25,
"learning_rate": 1.986776212832551e-05,
"loss": 1.4291,
"step": 3170
},
{
"epoch": 0.33403361344537813,
"grad_norm": 9.5,
"learning_rate": 1.9859937402190926e-05,
"loss": 1.9245,
"step": 3180
},
{
"epoch": 0.33508403361344535,
"grad_norm": 16.375,
"learning_rate": 1.985211267605634e-05,
"loss": 1.8689,
"step": 3190
},
{
"epoch": 0.33613445378151263,
"grad_norm": 27.125,
"learning_rate": 1.9844287949921755e-05,
"loss": 1.8231,
"step": 3200
},
{
"epoch": 0.33718487394957986,
"grad_norm": 20.125,
"learning_rate": 1.983646322378717e-05,
"loss": 1.8372,
"step": 3210
},
{
"epoch": 0.3382352941176471,
"grad_norm": 15.25,
"learning_rate": 1.9828638497652584e-05,
"loss": 1.4144,
"step": 3220
},
{
"epoch": 0.3392857142857143,
"grad_norm": 12.4375,
"learning_rate": 1.9820813771518e-05,
"loss": 1.3104,
"step": 3230
},
{
"epoch": 0.3403361344537815,
"grad_norm": 18.375,
"learning_rate": 1.9812989045383414e-05,
"loss": 1.7318,
"step": 3240
},
{
"epoch": 0.34138655462184875,
"grad_norm": 12.0625,
"learning_rate": 1.9805164319248828e-05,
"loss": 0.8846,
"step": 3250
},
{
"epoch": 0.34243697478991597,
"grad_norm": 14.4375,
"learning_rate": 1.9797339593114243e-05,
"loss": 1.5413,
"step": 3260
},
{
"epoch": 0.3434873949579832,
"grad_norm": 18.0,
"learning_rate": 1.9789514866979657e-05,
"loss": 1.5614,
"step": 3270
},
{
"epoch": 0.3445378151260504,
"grad_norm": 29.125,
"learning_rate": 1.9781690140845072e-05,
"loss": 1.4372,
"step": 3280
},
{
"epoch": 0.34558823529411764,
"grad_norm": 17.0,
"learning_rate": 1.9773865414710487e-05,
"loss": 1.4374,
"step": 3290
},
{
"epoch": 0.34663865546218486,
"grad_norm": 12.25,
"learning_rate": 1.97660406885759e-05,
"loss": 1.9225,
"step": 3300
},
{
"epoch": 0.3476890756302521,
"grad_norm": 6.375,
"learning_rate": 1.9758215962441316e-05,
"loss": 1.3581,
"step": 3310
},
{
"epoch": 0.3487394957983193,
"grad_norm": 13.6875,
"learning_rate": 1.975039123630673e-05,
"loss": 1.4966,
"step": 3320
},
{
"epoch": 0.34978991596638653,
"grad_norm": 12.625,
"learning_rate": 1.9742566510172145e-05,
"loss": 1.8483,
"step": 3330
},
{
"epoch": 0.35084033613445376,
"grad_norm": 19.375,
"learning_rate": 1.973474178403756e-05,
"loss": 1.5848,
"step": 3340
},
{
"epoch": 0.35189075630252103,
"grad_norm": 12.875,
"learning_rate": 1.9726917057902975e-05,
"loss": 1.0251,
"step": 3350
},
{
"epoch": 0.35294117647058826,
"grad_norm": 11.8125,
"learning_rate": 1.971909233176839e-05,
"loss": 1.5157,
"step": 3360
},
{
"epoch": 0.3539915966386555,
"grad_norm": 18.125,
"learning_rate": 1.9711267605633804e-05,
"loss": 1.6588,
"step": 3370
},
{
"epoch": 0.3550420168067227,
"grad_norm": 20.0,
"learning_rate": 1.970344287949922e-05,
"loss": 1.3472,
"step": 3380
},
{
"epoch": 0.3560924369747899,
"grad_norm": 4.5,
"learning_rate": 1.9695618153364633e-05,
"loss": 1.5791,
"step": 3390
},
{
"epoch": 0.35714285714285715,
"grad_norm": 15.0625,
"learning_rate": 1.9687793427230048e-05,
"loss": 1.769,
"step": 3400
},
{
"epoch": 0.3581932773109244,
"grad_norm": 5.09375,
"learning_rate": 1.9679968701095463e-05,
"loss": 1.1975,
"step": 3410
},
{
"epoch": 0.3592436974789916,
"grad_norm": 11.9375,
"learning_rate": 1.9672143974960877e-05,
"loss": 1.3957,
"step": 3420
},
{
"epoch": 0.3602941176470588,
"grad_norm": 15.0,
"learning_rate": 1.9664319248826292e-05,
"loss": 1.4442,
"step": 3430
},
{
"epoch": 0.36134453781512604,
"grad_norm": 13.0,
"learning_rate": 1.9656494522691707e-05,
"loss": 1.2509,
"step": 3440
},
{
"epoch": 0.36239495798319327,
"grad_norm": 20.125,
"learning_rate": 1.964866979655712e-05,
"loss": 1.4355,
"step": 3450
},
{
"epoch": 0.3634453781512605,
"grad_norm": 22.0,
"learning_rate": 1.9640845070422536e-05,
"loss": 1.2378,
"step": 3460
},
{
"epoch": 0.3644957983193277,
"grad_norm": 27.0,
"learning_rate": 1.963302034428795e-05,
"loss": 1.9886,
"step": 3470
},
{
"epoch": 0.36554621848739494,
"grad_norm": 11.4375,
"learning_rate": 1.9625195618153365e-05,
"loss": 1.3546,
"step": 3480
},
{
"epoch": 0.36659663865546216,
"grad_norm": 24.625,
"learning_rate": 1.961737089201878e-05,
"loss": 1.4929,
"step": 3490
},
{
"epoch": 0.36764705882352944,
"grad_norm": 16.875,
"learning_rate": 1.9609546165884195e-05,
"loss": 1.4343,
"step": 3500
},
{
"epoch": 0.36869747899159666,
"grad_norm": 18.0,
"learning_rate": 1.960172143974961e-05,
"loss": 1.6022,
"step": 3510
},
{
"epoch": 0.3697478991596639,
"grad_norm": 12.75,
"learning_rate": 1.9593896713615027e-05,
"loss": 1.7385,
"step": 3520
},
{
"epoch": 0.3707983193277311,
"grad_norm": 14.8125,
"learning_rate": 1.958607198748044e-05,
"loss": 1.4431,
"step": 3530
},
{
"epoch": 0.37184873949579833,
"grad_norm": 17.75,
"learning_rate": 1.9578247261345857e-05,
"loss": 1.7195,
"step": 3540
},
{
"epoch": 0.37289915966386555,
"grad_norm": 27.25,
"learning_rate": 1.9570422535211268e-05,
"loss": 0.941,
"step": 3550
},
{
"epoch": 0.3739495798319328,
"grad_norm": 25.875,
"learning_rate": 1.9562597809076686e-05,
"loss": 1.0686,
"step": 3560
},
{
"epoch": 0.375,
"grad_norm": 22.375,
"learning_rate": 1.9554773082942097e-05,
"loss": 1.5486,
"step": 3570
},
{
"epoch": 0.3760504201680672,
"grad_norm": 19.0,
"learning_rate": 1.9546948356807515e-05,
"loss": 1.5345,
"step": 3580
},
{
"epoch": 0.37710084033613445,
"grad_norm": 12.625,
"learning_rate": 1.9539123630672926e-05,
"loss": 1.3936,
"step": 3590
},
{
"epoch": 0.37815126050420167,
"grad_norm": 20.25,
"learning_rate": 1.9531298904538344e-05,
"loss": 1.3183,
"step": 3600
},
{
"epoch": 0.3792016806722689,
"grad_norm": 15.8125,
"learning_rate": 1.952347417840376e-05,
"loss": 1.793,
"step": 3610
},
{
"epoch": 0.3802521008403361,
"grad_norm": 18.375,
"learning_rate": 1.951564945226917e-05,
"loss": 1.3554,
"step": 3620
},
{
"epoch": 0.38130252100840334,
"grad_norm": 6.375,
"learning_rate": 1.950782472613459e-05,
"loss": 1.4536,
"step": 3630
},
{
"epoch": 0.38235294117647056,
"grad_norm": 15.3125,
"learning_rate": 1.95e-05,
"loss": 1.2886,
"step": 3640
},
{
"epoch": 0.38340336134453784,
"grad_norm": 5.59375,
"learning_rate": 1.9492175273865418e-05,
"loss": 1.3608,
"step": 3650
},
{
"epoch": 0.38445378151260506,
"grad_norm": 16.125,
"learning_rate": 1.948435054773083e-05,
"loss": 1.4178,
"step": 3660
},
{
"epoch": 0.3855042016806723,
"grad_norm": 17.125,
"learning_rate": 1.9476525821596247e-05,
"loss": 1.1901,
"step": 3670
},
{
"epoch": 0.3865546218487395,
"grad_norm": 12.0625,
"learning_rate": 1.9468701095461658e-05,
"loss": 1.7493,
"step": 3680
},
{
"epoch": 0.38760504201680673,
"grad_norm": 19.375,
"learning_rate": 1.9460876369327076e-05,
"loss": 1.5029,
"step": 3690
},
{
"epoch": 0.38865546218487396,
"grad_norm": 11.6875,
"learning_rate": 1.945305164319249e-05,
"loss": 1.0809,
"step": 3700
},
{
"epoch": 0.3897058823529412,
"grad_norm": 20.125,
"learning_rate": 1.9445226917057906e-05,
"loss": 1.4481,
"step": 3710
},
{
"epoch": 0.3907563025210084,
"grad_norm": 19.875,
"learning_rate": 1.943740219092332e-05,
"loss": 1.2546,
"step": 3720
},
{
"epoch": 0.3918067226890756,
"grad_norm": 19.625,
"learning_rate": 1.9429577464788735e-05,
"loss": 1.382,
"step": 3730
},
{
"epoch": 0.39285714285714285,
"grad_norm": 9.125,
"learning_rate": 1.942175273865415e-05,
"loss": 1.262,
"step": 3740
},
{
"epoch": 0.3939075630252101,
"grad_norm": 15.75,
"learning_rate": 1.9413928012519564e-05,
"loss": 1.6274,
"step": 3750
},
{
"epoch": 0.3949579831932773,
"grad_norm": 13.6875,
"learning_rate": 1.940610328638498e-05,
"loss": 1.6542,
"step": 3760
},
{
"epoch": 0.3960084033613445,
"grad_norm": 22.75,
"learning_rate": 1.9398278560250394e-05,
"loss": 1.6889,
"step": 3770
},
{
"epoch": 0.39705882352941174,
"grad_norm": 26.125,
"learning_rate": 1.9390453834115808e-05,
"loss": 1.3694,
"step": 3780
},
{
"epoch": 0.39810924369747897,
"grad_norm": 15.6875,
"learning_rate": 1.9382629107981223e-05,
"loss": 1.5103,
"step": 3790
},
{
"epoch": 0.39915966386554624,
"grad_norm": 5.96875,
"learning_rate": 1.9374804381846637e-05,
"loss": 1.1785,
"step": 3800
},
{
"epoch": 0.40021008403361347,
"grad_norm": 19.625,
"learning_rate": 1.9366979655712052e-05,
"loss": 1.7284,
"step": 3810
},
{
"epoch": 0.4012605042016807,
"grad_norm": 13.5,
"learning_rate": 1.9359154929577467e-05,
"loss": 1.4694,
"step": 3820
},
{
"epoch": 0.4023109243697479,
"grad_norm": 6.875,
"learning_rate": 1.935133020344288e-05,
"loss": 1.051,
"step": 3830
},
{
"epoch": 0.40336134453781514,
"grad_norm": 7.375,
"learning_rate": 1.9343505477308296e-05,
"loss": 1.3056,
"step": 3840
},
{
"epoch": 0.40441176470588236,
"grad_norm": 18.5,
"learning_rate": 1.933568075117371e-05,
"loss": 1.433,
"step": 3850
},
{
"epoch": 0.4054621848739496,
"grad_norm": 17.375,
"learning_rate": 1.9327856025039125e-05,
"loss": 1.4449,
"step": 3860
},
{
"epoch": 0.4065126050420168,
"grad_norm": 17.25,
"learning_rate": 1.932003129890454e-05,
"loss": 1.4828,
"step": 3870
},
{
"epoch": 0.40756302521008403,
"grad_norm": 12.125,
"learning_rate": 1.9312206572769955e-05,
"loss": 1.561,
"step": 3880
},
{
"epoch": 0.40861344537815125,
"grad_norm": 13.4375,
"learning_rate": 1.930438184663537e-05,
"loss": 1.2401,
"step": 3890
},
{
"epoch": 0.4096638655462185,
"grad_norm": 22.375,
"learning_rate": 1.9296557120500784e-05,
"loss": 1.4516,
"step": 3900
},
{
"epoch": 0.4107142857142857,
"grad_norm": 6.625,
"learning_rate": 1.92887323943662e-05,
"loss": 1.2575,
"step": 3910
},
{
"epoch": 0.4117647058823529,
"grad_norm": 13.5625,
"learning_rate": 1.9280907668231613e-05,
"loss": 1.4083,
"step": 3920
},
{
"epoch": 0.41281512605042014,
"grad_norm": 52.0,
"learning_rate": 1.9273082942097028e-05,
"loss": 1.8745,
"step": 3930
},
{
"epoch": 0.41386554621848737,
"grad_norm": 13.375,
"learning_rate": 1.9265258215962443e-05,
"loss": 1.4336,
"step": 3940
},
{
"epoch": 0.41491596638655465,
"grad_norm": 13.375,
"learning_rate": 1.9257433489827857e-05,
"loss": 1.3154,
"step": 3950
},
{
"epoch": 0.41596638655462187,
"grad_norm": 16.125,
"learning_rate": 1.9249608763693272e-05,
"loss": 1.3826,
"step": 3960
},
{
"epoch": 0.4170168067226891,
"grad_norm": 5.65625,
"learning_rate": 1.9241784037558687e-05,
"loss": 0.9214,
"step": 3970
},
{
"epoch": 0.4180672268907563,
"grad_norm": 18.75,
"learning_rate": 1.92339593114241e-05,
"loss": 1.4686,
"step": 3980
},
{
"epoch": 0.41911764705882354,
"grad_norm": 12.8125,
"learning_rate": 1.9226134585289516e-05,
"loss": 1.1541,
"step": 3990
},
{
"epoch": 0.42016806722689076,
"grad_norm": 9.625,
"learning_rate": 1.921830985915493e-05,
"loss": 1.3521,
"step": 4000
},
{
"epoch": 0.421218487394958,
"grad_norm": 14.9375,
"learning_rate": 1.9210485133020345e-05,
"loss": 1.5706,
"step": 4010
},
{
"epoch": 0.4222689075630252,
"grad_norm": 15.75,
"learning_rate": 1.920266040688576e-05,
"loss": 1.816,
"step": 4020
},
{
"epoch": 0.42331932773109243,
"grad_norm": 6.40625,
"learning_rate": 1.9194835680751174e-05,
"loss": 1.3072,
"step": 4030
},
{
"epoch": 0.42436974789915966,
"grad_norm": 15.0,
"learning_rate": 1.918701095461659e-05,
"loss": 1.0601,
"step": 4040
},
{
"epoch": 0.4254201680672269,
"grad_norm": 12.75,
"learning_rate": 1.9179186228482004e-05,
"loss": 1.655,
"step": 4050
},
{
"epoch": 0.4264705882352941,
"grad_norm": 16.75,
"learning_rate": 1.917136150234742e-05,
"loss": 1.0496,
"step": 4060
},
{
"epoch": 0.4275210084033613,
"grad_norm": 9.375,
"learning_rate": 1.9163536776212833e-05,
"loss": 1.2718,
"step": 4070
},
{
"epoch": 0.42857142857142855,
"grad_norm": 13.5,
"learning_rate": 1.9155712050078248e-05,
"loss": 1.6943,
"step": 4080
},
{
"epoch": 0.42962184873949577,
"grad_norm": 19.625,
"learning_rate": 1.9147887323943662e-05,
"loss": 1.5612,
"step": 4090
},
{
"epoch": 0.43067226890756305,
"grad_norm": 8.5,
"learning_rate": 1.914006259780908e-05,
"loss": 1.3209,
"step": 4100
},
{
"epoch": 0.4317226890756303,
"grad_norm": 16.5,
"learning_rate": 1.9132237871674492e-05,
"loss": 1.032,
"step": 4110
},
{
"epoch": 0.4327731092436975,
"grad_norm": 6.28125,
"learning_rate": 1.912441314553991e-05,
"loss": 0.8253,
"step": 4120
},
{
"epoch": 0.4338235294117647,
"grad_norm": 6.0625,
"learning_rate": 1.911658841940532e-05,
"loss": 1.4287,
"step": 4130
},
{
"epoch": 0.43487394957983194,
"grad_norm": 28.625,
"learning_rate": 1.910876369327074e-05,
"loss": 1.7322,
"step": 4140
},
{
"epoch": 0.43592436974789917,
"grad_norm": 19.375,
"learning_rate": 1.910093896713615e-05,
"loss": 1.7938,
"step": 4150
},
{
"epoch": 0.4369747899159664,
"grad_norm": 18.5,
"learning_rate": 1.909311424100157e-05,
"loss": 1.452,
"step": 4160
},
{
"epoch": 0.4380252100840336,
"grad_norm": 18.375,
"learning_rate": 1.908528951486698e-05,
"loss": 1.5159,
"step": 4170
},
{
"epoch": 0.43907563025210083,
"grad_norm": 14.5,
"learning_rate": 1.9077464788732398e-05,
"loss": 1.1355,
"step": 4180
},
{
"epoch": 0.44012605042016806,
"grad_norm": 28.375,
"learning_rate": 1.906964006259781e-05,
"loss": 1.6948,
"step": 4190
},
{
"epoch": 0.4411764705882353,
"grad_norm": 11.5,
"learning_rate": 1.9061815336463224e-05,
"loss": 1.709,
"step": 4200
},
{
"epoch": 0.4422268907563025,
"grad_norm": 4.28125,
"learning_rate": 1.905399061032864e-05,
"loss": 1.4682,
"step": 4210
},
{
"epoch": 0.4432773109243697,
"grad_norm": 11.6875,
"learning_rate": 1.9046165884194053e-05,
"loss": 0.7947,
"step": 4220
},
{
"epoch": 0.44432773109243695,
"grad_norm": 22.875,
"learning_rate": 1.903834115805947e-05,
"loss": 1.4962,
"step": 4230
},
{
"epoch": 0.44537815126050423,
"grad_norm": 11.625,
"learning_rate": 1.9030516431924882e-05,
"loss": 1.0637,
"step": 4240
},
{
"epoch": 0.44642857142857145,
"grad_norm": 13.25,
"learning_rate": 1.90226917057903e-05,
"loss": 1.3702,
"step": 4250
},
{
"epoch": 0.4474789915966387,
"grad_norm": 16.75,
"learning_rate": 1.901486697965571e-05,
"loss": 1.5466,
"step": 4260
},
{
"epoch": 0.4485294117647059,
"grad_norm": 4.03125,
"learning_rate": 1.900704225352113e-05,
"loss": 0.9244,
"step": 4270
},
{
"epoch": 0.4495798319327731,
"grad_norm": 13.3125,
"learning_rate": 1.899921752738654e-05,
"loss": 1.0728,
"step": 4280
},
{
"epoch": 0.45063025210084034,
"grad_norm": 52.0,
"learning_rate": 1.899139280125196e-05,
"loss": 1.7117,
"step": 4290
},
{
"epoch": 0.45168067226890757,
"grad_norm": 20.375,
"learning_rate": 1.8983568075117374e-05,
"loss": 1.5765,
"step": 4300
},
{
"epoch": 0.4527310924369748,
"grad_norm": 16.0,
"learning_rate": 1.8975743348982788e-05,
"loss": 1.5938,
"step": 4310
},
{
"epoch": 0.453781512605042,
"grad_norm": 16.25,
"learning_rate": 1.8967918622848203e-05,
"loss": 1.9375,
"step": 4320
},
{
"epoch": 0.45483193277310924,
"grad_norm": 3.8125,
"learning_rate": 1.8960093896713617e-05,
"loss": 1.0625,
"step": 4330
},
{
"epoch": 0.45588235294117646,
"grad_norm": 12.625,
"learning_rate": 1.8952269170579032e-05,
"loss": 1.3167,
"step": 4340
},
{
"epoch": 0.4569327731092437,
"grad_norm": 12.8125,
"learning_rate": 1.8944444444444447e-05,
"loss": 1.2067,
"step": 4350
},
{
"epoch": 0.4579831932773109,
"grad_norm": 4.46875,
"learning_rate": 1.893661971830986e-05,
"loss": 1.3717,
"step": 4360
},
{
"epoch": 0.45903361344537813,
"grad_norm": 22.625,
"learning_rate": 1.8928794992175276e-05,
"loss": 1.4667,
"step": 4370
},
{
"epoch": 0.46008403361344535,
"grad_norm": 13.75,
"learning_rate": 1.892097026604069e-05,
"loss": 1.6269,
"step": 4380
},
{
"epoch": 0.46113445378151263,
"grad_norm": 21.25,
"learning_rate": 1.8913145539906105e-05,
"loss": 1.2358,
"step": 4390
},
{
"epoch": 0.46218487394957986,
"grad_norm": 12.0,
"learning_rate": 1.890532081377152e-05,
"loss": 1.0913,
"step": 4400
},
{
"epoch": 0.4632352941176471,
"grad_norm": 17.5,
"learning_rate": 1.8897496087636935e-05,
"loss": 1.2854,
"step": 4410
},
{
"epoch": 0.4642857142857143,
"grad_norm": 11.8125,
"learning_rate": 1.888967136150235e-05,
"loss": 1.2785,
"step": 4420
},
{
"epoch": 0.4653361344537815,
"grad_norm": 11.625,
"learning_rate": 1.8881846635367764e-05,
"loss": 1.3992,
"step": 4430
},
{
"epoch": 0.46638655462184875,
"grad_norm": 12.0625,
"learning_rate": 1.887402190923318e-05,
"loss": 1.5738,
"step": 4440
},
{
"epoch": 0.46743697478991597,
"grad_norm": 13.0625,
"learning_rate": 1.8866197183098593e-05,
"loss": 1.1751,
"step": 4450
},
{
"epoch": 0.4684873949579832,
"grad_norm": 12.8125,
"learning_rate": 1.8858372456964008e-05,
"loss": 1.0964,
"step": 4460
},
{
"epoch": 0.4695378151260504,
"grad_norm": 17.75,
"learning_rate": 1.8850547730829423e-05,
"loss": 1.6314,
"step": 4470
},
{
"epoch": 0.47058823529411764,
"grad_norm": 13.5,
"learning_rate": 1.8842723004694837e-05,
"loss": 1.5794,
"step": 4480
},
{
"epoch": 0.47163865546218486,
"grad_norm": 13.9375,
"learning_rate": 1.8834898278560252e-05,
"loss": 1.7204,
"step": 4490
},
{
"epoch": 0.4726890756302521,
"grad_norm": 16.625,
"learning_rate": 1.8827073552425667e-05,
"loss": 1.4432,
"step": 4500
},
{
"epoch": 0.4737394957983193,
"grad_norm": 12.75,
"learning_rate": 1.881924882629108e-05,
"loss": 1.3706,
"step": 4510
},
{
"epoch": 0.47478991596638653,
"grad_norm": 13.125,
"learning_rate": 1.8811424100156496e-05,
"loss": 1.2891,
"step": 4520
},
{
"epoch": 0.47584033613445376,
"grad_norm": 13.8125,
"learning_rate": 1.880359937402191e-05,
"loss": 1.4449,
"step": 4530
},
{
"epoch": 0.47689075630252103,
"grad_norm": 11.8125,
"learning_rate": 1.8795774647887325e-05,
"loss": 1.6297,
"step": 4540
},
{
"epoch": 0.47794117647058826,
"grad_norm": 14.4375,
"learning_rate": 1.878794992175274e-05,
"loss": 1.6155,
"step": 4550
},
{
"epoch": 0.4789915966386555,
"grad_norm": 12.25,
"learning_rate": 1.8780125195618154e-05,
"loss": 1.5584,
"step": 4560
},
{
"epoch": 0.4800420168067227,
"grad_norm": 16.0,
"learning_rate": 1.877230046948357e-05,
"loss": 1.3156,
"step": 4570
},
{
"epoch": 0.4810924369747899,
"grad_norm": 12.6875,
"learning_rate": 1.8764475743348984e-05,
"loss": 1.5137,
"step": 4580
},
{
"epoch": 0.48214285714285715,
"grad_norm": 17.75,
"learning_rate": 1.87566510172144e-05,
"loss": 1.4913,
"step": 4590
},
{
"epoch": 0.4831932773109244,
"grad_norm": 11.5,
"learning_rate": 1.8748826291079813e-05,
"loss": 0.9244,
"step": 4600
},
{
"epoch": 0.4842436974789916,
"grad_norm": 14.125,
"learning_rate": 1.8741001564945228e-05,
"loss": 2.0681,
"step": 4610
},
{
"epoch": 0.4852941176470588,
"grad_norm": 17.0,
"learning_rate": 1.8733176838810642e-05,
"loss": 1.6281,
"step": 4620
},
{
"epoch": 0.48634453781512604,
"grad_norm": 21.0,
"learning_rate": 1.8725352112676057e-05,
"loss": 1.6307,
"step": 4630
},
{
"epoch": 0.48739495798319327,
"grad_norm": 12.75,
"learning_rate": 1.8717527386541472e-05,
"loss": 1.4405,
"step": 4640
},
{
"epoch": 0.4884453781512605,
"grad_norm": 12.75,
"learning_rate": 1.8709702660406886e-05,
"loss": 1.3397,
"step": 4650
},
{
"epoch": 0.4894957983193277,
"grad_norm": 14.5,
"learning_rate": 1.87018779342723e-05,
"loss": 1.521,
"step": 4660
},
{
"epoch": 0.49054621848739494,
"grad_norm": 15.375,
"learning_rate": 1.8694053208137716e-05,
"loss": 1.4432,
"step": 4670
},
{
"epoch": 0.49159663865546216,
"grad_norm": 15.8125,
"learning_rate": 1.868622848200313e-05,
"loss": 1.7055,
"step": 4680
},
{
"epoch": 0.49264705882352944,
"grad_norm": 14.0,
"learning_rate": 1.8678403755868545e-05,
"loss": 1.5279,
"step": 4690
},
{
"epoch": 0.49369747899159666,
"grad_norm": 11.625,
"learning_rate": 1.8670579029733963e-05,
"loss": 1.4292,
"step": 4700
},
{
"epoch": 0.4947478991596639,
"grad_norm": 9.8125,
"learning_rate": 1.8662754303599374e-05,
"loss": 1.2928,
"step": 4710
},
{
"epoch": 0.4957983193277311,
"grad_norm": 9.375,
"learning_rate": 1.8654929577464792e-05,
"loss": 1.3798,
"step": 4720
},
{
"epoch": 0.49684873949579833,
"grad_norm": 14.375,
"learning_rate": 1.8647104851330204e-05,
"loss": 1.4781,
"step": 4730
},
{
"epoch": 0.49789915966386555,
"grad_norm": 18.0,
"learning_rate": 1.863928012519562e-05,
"loss": 1.6945,
"step": 4740
},
{
"epoch": 0.4989495798319328,
"grad_norm": 12.375,
"learning_rate": 1.8631455399061033e-05,
"loss": 1.632,
"step": 4750
},
{
"epoch": 0.5,
"grad_norm": 12.0625,
"learning_rate": 1.862363067292645e-05,
"loss": 1.2287,
"step": 4760
},
{
"epoch": 0.5010504201680672,
"grad_norm": 10.625,
"learning_rate": 1.8615805946791862e-05,
"loss": 1.4539,
"step": 4770
},
{
"epoch": 0.5021008403361344,
"grad_norm": 16.25,
"learning_rate": 1.8607981220657277e-05,
"loss": 1.6725,
"step": 4780
},
{
"epoch": 0.5031512605042017,
"grad_norm": 12.3125,
"learning_rate": 1.8600156494522695e-05,
"loss": 1.7122,
"step": 4790
},
{
"epoch": 0.5042016806722689,
"grad_norm": 13.6875,
"learning_rate": 1.8592331768388106e-05,
"loss": 1.8102,
"step": 4800
},
{
"epoch": 0.5052521008403361,
"grad_norm": 12.625,
"learning_rate": 1.8584507042253524e-05,
"loss": 1.2716,
"step": 4810
},
{
"epoch": 0.5063025210084033,
"grad_norm": 18.75,
"learning_rate": 1.8576682316118935e-05,
"loss": 1.52,
"step": 4820
},
{
"epoch": 0.5073529411764706,
"grad_norm": 12.6875,
"learning_rate": 1.8568857589984354e-05,
"loss": 1.4568,
"step": 4830
},
{
"epoch": 0.5084033613445378,
"grad_norm": 11.75,
"learning_rate": 1.8561032863849765e-05,
"loss": 1.6998,
"step": 4840
},
{
"epoch": 0.509453781512605,
"grad_norm": 13.625,
"learning_rate": 1.8553208137715183e-05,
"loss": 1.4233,
"step": 4850
},
{
"epoch": 0.5105042016806722,
"grad_norm": 20.625,
"learning_rate": 1.8545383411580594e-05,
"loss": 1.4319,
"step": 4860
},
{
"epoch": 0.5115546218487395,
"grad_norm": 22.875,
"learning_rate": 1.8537558685446012e-05,
"loss": 1.4499,
"step": 4870
},
{
"epoch": 0.5126050420168067,
"grad_norm": 7.0625,
"learning_rate": 1.8529733959311427e-05,
"loss": 1.3792,
"step": 4880
},
{
"epoch": 0.5136554621848739,
"grad_norm": 6.9375,
"learning_rate": 1.852190923317684e-05,
"loss": 1.0939,
"step": 4890
},
{
"epoch": 0.5147058823529411,
"grad_norm": 6.125,
"learning_rate": 1.8514084507042256e-05,
"loss": 1.5026,
"step": 4900
},
{
"epoch": 0.5157563025210085,
"grad_norm": 14.75,
"learning_rate": 1.850625978090767e-05,
"loss": 1.472,
"step": 4910
},
{
"epoch": 0.5168067226890757,
"grad_norm": 14.375,
"learning_rate": 1.8498435054773085e-05,
"loss": 1.2507,
"step": 4920
},
{
"epoch": 0.5178571428571429,
"grad_norm": 3.921875,
"learning_rate": 1.84906103286385e-05,
"loss": 1.6288,
"step": 4930
},
{
"epoch": 0.5189075630252101,
"grad_norm": 18.5,
"learning_rate": 1.8482785602503915e-05,
"loss": 1.7701,
"step": 4940
},
{
"epoch": 0.5199579831932774,
"grad_norm": 15.5625,
"learning_rate": 1.847496087636933e-05,
"loss": 1.5399,
"step": 4950
},
{
"epoch": 0.5210084033613446,
"grad_norm": 17.5,
"learning_rate": 1.8467136150234744e-05,
"loss": 1.5214,
"step": 4960
},
{
"epoch": 0.5220588235294118,
"grad_norm": 12.6875,
"learning_rate": 1.845931142410016e-05,
"loss": 1.6711,
"step": 4970
},
{
"epoch": 0.523109243697479,
"grad_norm": 15.5,
"learning_rate": 1.8451486697965573e-05,
"loss": 1.5255,
"step": 4980
},
{
"epoch": 0.5241596638655462,
"grad_norm": 23.75,
"learning_rate": 1.8443661971830988e-05,
"loss": 1.1936,
"step": 4990
},
{
"epoch": 0.5252100840336135,
"grad_norm": 11.25,
"learning_rate": 1.8435837245696403e-05,
"loss": 1.6067,
"step": 5000
},
{
"epoch": 0.5262605042016807,
"grad_norm": 9.75,
"learning_rate": 1.8428012519561817e-05,
"loss": 1.6151,
"step": 5010
},
{
"epoch": 0.5273109243697479,
"grad_norm": 23.625,
"learning_rate": 1.8420187793427232e-05,
"loss": 1.7412,
"step": 5020
},
{
"epoch": 0.5283613445378151,
"grad_norm": 14.625,
"learning_rate": 1.8412363067292647e-05,
"loss": 1.2956,
"step": 5030
},
{
"epoch": 0.5294117647058824,
"grad_norm": 14.0625,
"learning_rate": 1.840453834115806e-05,
"loss": 1.5333,
"step": 5040
},
{
"epoch": 0.5304621848739496,
"grad_norm": 12.375,
"learning_rate": 1.8396713615023476e-05,
"loss": 1.3507,
"step": 5050
},
{
"epoch": 0.5315126050420168,
"grad_norm": 17.375,
"learning_rate": 1.838888888888889e-05,
"loss": 1.2024,
"step": 5060
},
{
"epoch": 0.532563025210084,
"grad_norm": 4.6875,
"learning_rate": 1.8381064162754305e-05,
"loss": 1.542,
"step": 5070
},
{
"epoch": 0.5336134453781513,
"grad_norm": 24.875,
"learning_rate": 1.837323943661972e-05,
"loss": 1.3459,
"step": 5080
},
{
"epoch": 0.5346638655462185,
"grad_norm": 20.25,
"learning_rate": 1.8365414710485134e-05,
"loss": 1.4547,
"step": 5090
},
{
"epoch": 0.5357142857142857,
"grad_norm": 8.75,
"learning_rate": 1.835758998435055e-05,
"loss": 1.3747,
"step": 5100
},
{
"epoch": 0.5367647058823529,
"grad_norm": 4.90625,
"learning_rate": 1.8349765258215964e-05,
"loss": 1.449,
"step": 5110
},
{
"epoch": 0.5378151260504201,
"grad_norm": 5.34375,
"learning_rate": 1.834194053208138e-05,
"loss": 1.4577,
"step": 5120
},
{
"epoch": 0.5388655462184874,
"grad_norm": 14.375,
"learning_rate": 1.8334115805946793e-05,
"loss": 1.2082,
"step": 5130
},
{
"epoch": 0.5399159663865546,
"grad_norm": 16.375,
"learning_rate": 1.8326291079812208e-05,
"loss": 1.3699,
"step": 5140
},
{
"epoch": 0.5409663865546218,
"grad_norm": 60.75,
"learning_rate": 1.8318466353677622e-05,
"loss": 1.5371,
"step": 5150
},
{
"epoch": 0.542016806722689,
"grad_norm": 13.1875,
"learning_rate": 1.8310641627543037e-05,
"loss": 1.1132,
"step": 5160
},
{
"epoch": 0.5430672268907563,
"grad_norm": 16.625,
"learning_rate": 1.8302816901408452e-05,
"loss": 1.3545,
"step": 5170
},
{
"epoch": 0.5441176470588235,
"grad_norm": 5.0625,
"learning_rate": 1.8294992175273866e-05,
"loss": 1.3821,
"step": 5180
},
{
"epoch": 0.5451680672268907,
"grad_norm": 14.875,
"learning_rate": 1.828716744913928e-05,
"loss": 1.4026,
"step": 5190
},
{
"epoch": 0.5462184873949579,
"grad_norm": 21.0,
"learning_rate": 1.8279342723004696e-05,
"loss": 1.3391,
"step": 5200
},
{
"epoch": 0.5472689075630253,
"grad_norm": 16.375,
"learning_rate": 1.827151799687011e-05,
"loss": 1.603,
"step": 5210
},
{
"epoch": 0.5483193277310925,
"grad_norm": 7.75,
"learning_rate": 1.8263693270735525e-05,
"loss": 1.4944,
"step": 5220
},
{
"epoch": 0.5493697478991597,
"grad_norm": 12.1875,
"learning_rate": 1.825586854460094e-05,
"loss": 1.4024,
"step": 5230
},
{
"epoch": 0.5504201680672269,
"grad_norm": 15.3125,
"learning_rate": 1.8248043818466354e-05,
"loss": 1.0601,
"step": 5240
},
{
"epoch": 0.5514705882352942,
"grad_norm": 9.8125,
"learning_rate": 1.824021909233177e-05,
"loss": 1.6795,
"step": 5250
},
{
"epoch": 0.5525210084033614,
"grad_norm": 12.4375,
"learning_rate": 1.8232394366197184e-05,
"loss": 1.3527,
"step": 5260
},
{
"epoch": 0.5535714285714286,
"grad_norm": 4.3125,
"learning_rate": 1.8224569640062598e-05,
"loss": 1.422,
"step": 5270
},
{
"epoch": 0.5546218487394958,
"grad_norm": 9.6875,
"learning_rate": 1.8216744913928016e-05,
"loss": 1.3717,
"step": 5280
},
{
"epoch": 0.555672268907563,
"grad_norm": 12.875,
"learning_rate": 1.8208920187793428e-05,
"loss": 1.3349,
"step": 5290
},
{
"epoch": 0.5567226890756303,
"grad_norm": 12.9375,
"learning_rate": 1.8201095461658846e-05,
"loss": 1.3049,
"step": 5300
},
{
"epoch": 0.5577731092436975,
"grad_norm": 15.3125,
"learning_rate": 1.8193270735524257e-05,
"loss": 1.6232,
"step": 5310
},
{
"epoch": 0.5588235294117647,
"grad_norm": 16.125,
"learning_rate": 1.8185446009389675e-05,
"loss": 1.2363,
"step": 5320
},
{
"epoch": 0.5598739495798319,
"grad_norm": 12.875,
"learning_rate": 1.8177621283255086e-05,
"loss": 1.3058,
"step": 5330
},
{
"epoch": 0.5609243697478992,
"grad_norm": 13.25,
"learning_rate": 1.8169796557120504e-05,
"loss": 1.4614,
"step": 5340
},
{
"epoch": 0.5619747899159664,
"grad_norm": 13.5625,
"learning_rate": 1.8161971830985915e-05,
"loss": 1.5089,
"step": 5350
},
{
"epoch": 0.5630252100840336,
"grad_norm": 10.375,
"learning_rate": 1.8154147104851333e-05,
"loss": 1.0185,
"step": 5360
},
{
"epoch": 0.5640756302521008,
"grad_norm": 22.5,
"learning_rate": 1.8146322378716745e-05,
"loss": 1.0896,
"step": 5370
},
{
"epoch": 0.5651260504201681,
"grad_norm": 26.375,
"learning_rate": 1.813849765258216e-05,
"loss": 1.449,
"step": 5380
},
{
"epoch": 0.5661764705882353,
"grad_norm": 15.0625,
"learning_rate": 1.8130672926447577e-05,
"loss": 0.989,
"step": 5390
},
{
"epoch": 0.5672268907563025,
"grad_norm": 5.34375,
"learning_rate": 1.812284820031299e-05,
"loss": 1.3221,
"step": 5400
},
{
"epoch": 0.5682773109243697,
"grad_norm": 19.625,
"learning_rate": 1.8115023474178407e-05,
"loss": 1.1908,
"step": 5410
},
{
"epoch": 0.569327731092437,
"grad_norm": 17.875,
"learning_rate": 1.8107198748043818e-05,
"loss": 1.6788,
"step": 5420
},
{
"epoch": 0.5703781512605042,
"grad_norm": 12.875,
"learning_rate": 1.8099374021909236e-05,
"loss": 1.5798,
"step": 5430
},
{
"epoch": 0.5714285714285714,
"grad_norm": 47.25,
"learning_rate": 1.8091549295774647e-05,
"loss": 1.3783,
"step": 5440
},
{
"epoch": 0.5724789915966386,
"grad_norm": 5.5,
"learning_rate": 1.8083724569640065e-05,
"loss": 1.4343,
"step": 5450
},
{
"epoch": 0.5735294117647058,
"grad_norm": 15.25,
"learning_rate": 1.8075899843505477e-05,
"loss": 1.3189,
"step": 5460
},
{
"epoch": 0.5745798319327731,
"grad_norm": 17.5,
"learning_rate": 1.8068075117370895e-05,
"loss": 1.821,
"step": 5470
},
{
"epoch": 0.5756302521008403,
"grad_norm": 9.8125,
"learning_rate": 1.806025039123631e-05,
"loss": 1.3433,
"step": 5480
},
{
"epoch": 0.5766806722689075,
"grad_norm": 9.1875,
"learning_rate": 1.8052425665101724e-05,
"loss": 1.647,
"step": 5490
},
{
"epoch": 0.5777310924369747,
"grad_norm": 6.53125,
"learning_rate": 1.804460093896714e-05,
"loss": 1.1225,
"step": 5500
},
{
"epoch": 0.5787815126050421,
"grad_norm": 21.625,
"learning_rate": 1.8036776212832553e-05,
"loss": 1.2338,
"step": 5510
},
{
"epoch": 0.5798319327731093,
"grad_norm": 15.9375,
"learning_rate": 1.8028951486697968e-05,
"loss": 1.333,
"step": 5520
},
{
"epoch": 0.5808823529411765,
"grad_norm": 5.0,
"learning_rate": 1.8021126760563383e-05,
"loss": 1.1024,
"step": 5530
},
{
"epoch": 0.5819327731092437,
"grad_norm": 12.1875,
"learning_rate": 1.8013302034428797e-05,
"loss": 1.2991,
"step": 5540
},
{
"epoch": 0.582983193277311,
"grad_norm": 13.25,
"learning_rate": 1.8005477308294212e-05,
"loss": 1.7033,
"step": 5550
},
{
"epoch": 0.5840336134453782,
"grad_norm": 12.6875,
"learning_rate": 1.7997652582159627e-05,
"loss": 1.3945,
"step": 5560
},
{
"epoch": 0.5850840336134454,
"grad_norm": 5.21875,
"learning_rate": 1.798982785602504e-05,
"loss": 1.3108,
"step": 5570
},
{
"epoch": 0.5861344537815126,
"grad_norm": 7.71875,
"learning_rate": 1.7982003129890456e-05,
"loss": 1.3495,
"step": 5580
},
{
"epoch": 0.5871848739495799,
"grad_norm": 17.875,
"learning_rate": 1.797417840375587e-05,
"loss": 1.2221,
"step": 5590
},
{
"epoch": 0.5882352941176471,
"grad_norm": 7.96875,
"learning_rate": 1.7966353677621285e-05,
"loss": 1.3192,
"step": 5600
},
{
"epoch": 0.5892857142857143,
"grad_norm": 16.875,
"learning_rate": 1.79585289514867e-05,
"loss": 1.7956,
"step": 5610
},
{
"epoch": 0.5903361344537815,
"grad_norm": 15.0,
"learning_rate": 1.7950704225352114e-05,
"loss": 1.57,
"step": 5620
},
{
"epoch": 0.5913865546218487,
"grad_norm": 15.625,
"learning_rate": 1.794287949921753e-05,
"loss": 1.216,
"step": 5630
},
{
"epoch": 0.592436974789916,
"grad_norm": 17.375,
"learning_rate": 1.7935054773082944e-05,
"loss": 1.39,
"step": 5640
},
{
"epoch": 0.5934873949579832,
"grad_norm": 4.25,
"learning_rate": 1.792723004694836e-05,
"loss": 1.1288,
"step": 5650
},
{
"epoch": 0.5945378151260504,
"grad_norm": 17.375,
"learning_rate": 1.7919405320813773e-05,
"loss": 1.3017,
"step": 5660
},
{
"epoch": 0.5955882352941176,
"grad_norm": 10.125,
"learning_rate": 1.7911580594679188e-05,
"loss": 1.2518,
"step": 5670
},
{
"epoch": 0.5966386554621849,
"grad_norm": 13.0625,
"learning_rate": 1.7903755868544602e-05,
"loss": 1.3653,
"step": 5680
},
{
"epoch": 0.5976890756302521,
"grad_norm": 13.0,
"learning_rate": 1.7895931142410017e-05,
"loss": 1.2755,
"step": 5690
},
{
"epoch": 0.5987394957983193,
"grad_norm": 8.8125,
"learning_rate": 1.788810641627543e-05,
"loss": 1.4182,
"step": 5700
},
{
"epoch": 0.5997899159663865,
"grad_norm": 19.375,
"learning_rate": 1.7880281690140846e-05,
"loss": 1.4829,
"step": 5710
},
{
"epoch": 0.6008403361344538,
"grad_norm": 5.4375,
"learning_rate": 1.787245696400626e-05,
"loss": 1.4376,
"step": 5720
},
{
"epoch": 0.601890756302521,
"grad_norm": 14.3125,
"learning_rate": 1.7864632237871676e-05,
"loss": 1.6832,
"step": 5730
},
{
"epoch": 0.6029411764705882,
"grad_norm": 16.5,
"learning_rate": 1.785680751173709e-05,
"loss": 1.8146,
"step": 5740
},
{
"epoch": 0.6039915966386554,
"grad_norm": 12.9375,
"learning_rate": 1.7848982785602505e-05,
"loss": 1.3456,
"step": 5750
},
{
"epoch": 0.6050420168067226,
"grad_norm": 18.875,
"learning_rate": 1.784115805946792e-05,
"loss": 1.2098,
"step": 5760
},
{
"epoch": 0.6060924369747899,
"grad_norm": 13.0,
"learning_rate": 1.7833333333333334e-05,
"loss": 1.2479,
"step": 5770
},
{
"epoch": 0.6071428571428571,
"grad_norm": 11.125,
"learning_rate": 1.782550860719875e-05,
"loss": 1.1653,
"step": 5780
},
{
"epoch": 0.6081932773109243,
"grad_norm": 13.25,
"learning_rate": 1.7817683881064164e-05,
"loss": 1.8153,
"step": 5790
},
{
"epoch": 0.6092436974789915,
"grad_norm": 14.375,
"learning_rate": 1.7809859154929578e-05,
"loss": 1.6105,
"step": 5800
},
{
"epoch": 0.6102941176470589,
"grad_norm": 27.5,
"learning_rate": 1.7802034428794993e-05,
"loss": 1.4028,
"step": 5810
},
{
"epoch": 0.6113445378151261,
"grad_norm": 4.125,
"learning_rate": 1.7794209702660408e-05,
"loss": 1.2498,
"step": 5820
},
{
"epoch": 0.6123949579831933,
"grad_norm": 11.625,
"learning_rate": 1.7786384976525822e-05,
"loss": 0.9192,
"step": 5830
},
{
"epoch": 0.6134453781512605,
"grad_norm": 18.75,
"learning_rate": 1.7778560250391237e-05,
"loss": 1.1645,
"step": 5840
},
{
"epoch": 0.6144957983193278,
"grad_norm": 15.3125,
"learning_rate": 1.777073552425665e-05,
"loss": 1.9024,
"step": 5850
},
{
"epoch": 0.615546218487395,
"grad_norm": 15.25,
"learning_rate": 1.7762910798122066e-05,
"loss": 1.5987,
"step": 5860
},
{
"epoch": 0.6165966386554622,
"grad_norm": 13.5625,
"learning_rate": 1.775508607198748e-05,
"loss": 1.3094,
"step": 5870
},
{
"epoch": 0.6176470588235294,
"grad_norm": 3.625,
"learning_rate": 1.77472613458529e-05,
"loss": 1.7913,
"step": 5880
},
{
"epoch": 0.6186974789915967,
"grad_norm": 17.0,
"learning_rate": 1.773943661971831e-05,
"loss": 1.3136,
"step": 5890
},
{
"epoch": 0.6197478991596639,
"grad_norm": 20.625,
"learning_rate": 1.7731611893583728e-05,
"loss": 1.5556,
"step": 5900
},
{
"epoch": 0.6207983193277311,
"grad_norm": 12.4375,
"learning_rate": 1.772378716744914e-05,
"loss": 0.7191,
"step": 5910
},
{
"epoch": 0.6218487394957983,
"grad_norm": 13.5,
"learning_rate": 1.7715962441314557e-05,
"loss": 1.0429,
"step": 5920
},
{
"epoch": 0.6228991596638656,
"grad_norm": 13.5625,
"learning_rate": 1.770813771517997e-05,
"loss": 1.7025,
"step": 5930
},
{
"epoch": 0.6239495798319328,
"grad_norm": 19.75,
"learning_rate": 1.7700312989045387e-05,
"loss": 1.5732,
"step": 5940
},
{
"epoch": 0.625,
"grad_norm": 15.8125,
"learning_rate": 1.7692488262910798e-05,
"loss": 1.5835,
"step": 5950
},
{
"epoch": 0.6260504201680672,
"grad_norm": 15.4375,
"learning_rate": 1.7684663536776213e-05,
"loss": 1.7755,
"step": 5960
},
{
"epoch": 0.6271008403361344,
"grad_norm": 17.75,
"learning_rate": 1.767683881064163e-05,
"loss": 1.5965,
"step": 5970
},
{
"epoch": 0.6281512605042017,
"grad_norm": 4.5625,
"learning_rate": 1.7669014084507042e-05,
"loss": 1.0207,
"step": 5980
},
{
"epoch": 0.6292016806722689,
"grad_norm": 12.9375,
"learning_rate": 1.766118935837246e-05,
"loss": 1.4202,
"step": 5990
},
{
"epoch": 0.6302521008403361,
"grad_norm": 14.5,
"learning_rate": 1.765336463223787e-05,
"loss": 1.0423,
"step": 6000
},
{
"epoch": 0.6313025210084033,
"grad_norm": 15.3125,
"learning_rate": 1.764553990610329e-05,
"loss": 2.0845,
"step": 6010
},
{
"epoch": 0.6323529411764706,
"grad_norm": 4.8125,
"learning_rate": 1.76377151799687e-05,
"loss": 1.4338,
"step": 6020
},
{
"epoch": 0.6334033613445378,
"grad_norm": 14.6875,
"learning_rate": 1.762989045383412e-05,
"loss": 1.3871,
"step": 6030
},
{
"epoch": 0.634453781512605,
"grad_norm": 14.25,
"learning_rate": 1.762206572769953e-05,
"loss": 1.7668,
"step": 6040
},
{
"epoch": 0.6355042016806722,
"grad_norm": 18.25,
"learning_rate": 1.7614241001564948e-05,
"loss": 1.7259,
"step": 6050
},
{
"epoch": 0.6365546218487395,
"grad_norm": 23.5,
"learning_rate": 1.7606416275430363e-05,
"loss": 0.9416,
"step": 6060
},
{
"epoch": 0.6376050420168067,
"grad_norm": 15.375,
"learning_rate": 1.7598591549295777e-05,
"loss": 1.9263,
"step": 6070
},
{
"epoch": 0.6386554621848739,
"grad_norm": 11.625,
"learning_rate": 1.7590766823161192e-05,
"loss": 1.0191,
"step": 6080
},
{
"epoch": 0.6397058823529411,
"grad_norm": 15.9375,
"learning_rate": 1.7582942097026607e-05,
"loss": 1.8691,
"step": 6090
},
{
"epoch": 0.6407563025210085,
"grad_norm": 4.90625,
"learning_rate": 1.757511737089202e-05,
"loss": 1.4114,
"step": 6100
},
{
"epoch": 0.6418067226890757,
"grad_norm": 24.0,
"learning_rate": 1.7567292644757436e-05,
"loss": 1.3372,
"step": 6110
},
{
"epoch": 0.6428571428571429,
"grad_norm": 12.4375,
"learning_rate": 1.755946791862285e-05,
"loss": 1.2139,
"step": 6120
},
{
"epoch": 0.6439075630252101,
"grad_norm": 11.625,
"learning_rate": 1.7551643192488265e-05,
"loss": 1.0369,
"step": 6130
},
{
"epoch": 0.6449579831932774,
"grad_norm": 6.65625,
"learning_rate": 1.754381846635368e-05,
"loss": 1.2482,
"step": 6140
},
{
"epoch": 0.6460084033613446,
"grad_norm": 13.1875,
"learning_rate": 1.753599374021909e-05,
"loss": 1.4508,
"step": 6150
},
{
"epoch": 0.6470588235294118,
"grad_norm": 23.125,
"learning_rate": 1.752816901408451e-05,
"loss": 1.4622,
"step": 6160
},
{
"epoch": 0.648109243697479,
"grad_norm": 5.875,
"learning_rate": 1.7520344287949924e-05,
"loss": 1.6703,
"step": 6170
},
{
"epoch": 0.6491596638655462,
"grad_norm": 10.5,
"learning_rate": 1.751251956181534e-05,
"loss": 1.0821,
"step": 6180
},
{
"epoch": 0.6502100840336135,
"grad_norm": 6.65625,
"learning_rate": 1.7504694835680753e-05,
"loss": 1.4789,
"step": 6190
},
{
"epoch": 0.6512605042016807,
"grad_norm": 13.5625,
"learning_rate": 1.7496870109546168e-05,
"loss": 1.2124,
"step": 6200
},
{
"epoch": 0.6523109243697479,
"grad_norm": 12.9375,
"learning_rate": 1.7489045383411582e-05,
"loss": 1.7802,
"step": 6210
},
{
"epoch": 0.6533613445378151,
"grad_norm": 16.5,
"learning_rate": 1.7481220657276997e-05,
"loss": 1.644,
"step": 6220
},
{
"epoch": 0.6544117647058824,
"grad_norm": 28.5,
"learning_rate": 1.747339593114241e-05,
"loss": 1.4378,
"step": 6230
},
{
"epoch": 0.6554621848739496,
"grad_norm": 3.40625,
"learning_rate": 1.7465571205007826e-05,
"loss": 1.4979,
"step": 6240
},
{
"epoch": 0.6565126050420168,
"grad_norm": 17.375,
"learning_rate": 1.745774647887324e-05,
"loss": 1.7841,
"step": 6250
},
{
"epoch": 0.657563025210084,
"grad_norm": 10.3125,
"learning_rate": 1.7449921752738656e-05,
"loss": 1.1316,
"step": 6260
},
{
"epoch": 0.6586134453781513,
"grad_norm": 5.375,
"learning_rate": 1.744209702660407e-05,
"loss": 1.0144,
"step": 6270
},
{
"epoch": 0.6596638655462185,
"grad_norm": 13.5,
"learning_rate": 1.7434272300469485e-05,
"loss": 1.1497,
"step": 6280
},
{
"epoch": 0.6607142857142857,
"grad_norm": 12.5,
"learning_rate": 1.74264475743349e-05,
"loss": 1.2047,
"step": 6290
},
{
"epoch": 0.6617647058823529,
"grad_norm": 16.25,
"learning_rate": 1.7418622848200314e-05,
"loss": 1.4126,
"step": 6300
},
{
"epoch": 0.6628151260504201,
"grad_norm": 15.1875,
"learning_rate": 1.741079812206573e-05,
"loss": 1.5036,
"step": 6310
},
{
"epoch": 0.6638655462184874,
"grad_norm": 12.5,
"learning_rate": 1.7402973395931144e-05,
"loss": 1.7585,
"step": 6320
},
{
"epoch": 0.6649159663865546,
"grad_norm": 13.0625,
"learning_rate": 1.7395148669796558e-05,
"loss": 1.359,
"step": 6330
},
{
"epoch": 0.6659663865546218,
"grad_norm": 16.75,
"learning_rate": 1.7387323943661973e-05,
"loss": 1.2548,
"step": 6340
},
{
"epoch": 0.667016806722689,
"grad_norm": 15.625,
"learning_rate": 1.7379499217527388e-05,
"loss": 1.4111,
"step": 6350
},
{
"epoch": 0.6680672268907563,
"grad_norm": 9.9375,
"learning_rate": 1.7371674491392802e-05,
"loss": 1.1517,
"step": 6360
},
{
"epoch": 0.6691176470588235,
"grad_norm": 13.6875,
"learning_rate": 1.7363849765258217e-05,
"loss": 1.2131,
"step": 6370
},
{
"epoch": 0.6701680672268907,
"grad_norm": 6.75,
"learning_rate": 1.735602503912363e-05,
"loss": 1.3011,
"step": 6380
},
{
"epoch": 0.6712184873949579,
"grad_norm": 14.8125,
"learning_rate": 1.7348200312989046e-05,
"loss": 1.2044,
"step": 6390
},
{
"epoch": 0.6722689075630253,
"grad_norm": 3.75,
"learning_rate": 1.734037558685446e-05,
"loss": 1.1038,
"step": 6400
},
{
"epoch": 0.6733193277310925,
"grad_norm": 11.3125,
"learning_rate": 1.7332550860719875e-05,
"loss": 1.6009,
"step": 6410
},
{
"epoch": 0.6743697478991597,
"grad_norm": 9.6875,
"learning_rate": 1.732472613458529e-05,
"loss": 1.4377,
"step": 6420
},
{
"epoch": 0.6754201680672269,
"grad_norm": 20.0,
"learning_rate": 1.7316901408450705e-05,
"loss": 1.5643,
"step": 6430
},
{
"epoch": 0.6764705882352942,
"grad_norm": 5.59375,
"learning_rate": 1.730907668231612e-05,
"loss": 1.5667,
"step": 6440
},
{
"epoch": 0.6775210084033614,
"grad_norm": 12.25,
"learning_rate": 1.7301251956181534e-05,
"loss": 1.1483,
"step": 6450
},
{
"epoch": 0.6785714285714286,
"grad_norm": 12.375,
"learning_rate": 1.7293427230046952e-05,
"loss": 1.3863,
"step": 6460
},
{
"epoch": 0.6796218487394958,
"grad_norm": 11.25,
"learning_rate": 1.7285602503912363e-05,
"loss": 1.1266,
"step": 6470
},
{
"epoch": 0.680672268907563,
"grad_norm": 17.25,
"learning_rate": 1.727777777777778e-05,
"loss": 1.649,
"step": 6480
},
{
"epoch": 0.6817226890756303,
"grad_norm": 16.25,
"learning_rate": 1.7269953051643193e-05,
"loss": 1.582,
"step": 6490
},
{
"epoch": 0.6827731092436975,
"grad_norm": 29.0,
"learning_rate": 1.726212832550861e-05,
"loss": 1.6732,
"step": 6500
},
{
"epoch": 0.6838235294117647,
"grad_norm": 10.25,
"learning_rate": 1.7254303599374022e-05,
"loss": 1.4154,
"step": 6510
},
{
"epoch": 0.6848739495798319,
"grad_norm": 11.6875,
"learning_rate": 1.724647887323944e-05,
"loss": 1.5973,
"step": 6520
},
{
"epoch": 0.6859243697478992,
"grad_norm": 13.4375,
"learning_rate": 1.723865414710485e-05,
"loss": 1.9317,
"step": 6530
},
{
"epoch": 0.6869747899159664,
"grad_norm": 11.5625,
"learning_rate": 1.7230829420970266e-05,
"loss": 1.6537,
"step": 6540
},
{
"epoch": 0.6880252100840336,
"grad_norm": 10.0625,
"learning_rate": 1.7223004694835684e-05,
"loss": 1.3775,
"step": 6550
},
{
"epoch": 0.6890756302521008,
"grad_norm": 11.9375,
"learning_rate": 1.7215179968701095e-05,
"loss": 1.455,
"step": 6560
},
{
"epoch": 0.6901260504201681,
"grad_norm": 21.0,
"learning_rate": 1.7207355242566513e-05,
"loss": 1.2449,
"step": 6570
},
{
"epoch": 0.6911764705882353,
"grad_norm": 16.5,
"learning_rate": 1.7199530516431925e-05,
"loss": 1.5573,
"step": 6580
},
{
"epoch": 0.6922268907563025,
"grad_norm": 11.25,
"learning_rate": 1.7191705790297343e-05,
"loss": 1.4817,
"step": 6590
},
{
"epoch": 0.6932773109243697,
"grad_norm": 13.5,
"learning_rate": 1.7183881064162754e-05,
"loss": 1.333,
"step": 6600
},
{
"epoch": 0.694327731092437,
"grad_norm": 17.75,
"learning_rate": 1.7176056338028172e-05,
"loss": 1.5649,
"step": 6610
},
{
"epoch": 0.6953781512605042,
"grad_norm": 4.75,
"learning_rate": 1.7168231611893583e-05,
"loss": 0.8342,
"step": 6620
},
{
"epoch": 0.6964285714285714,
"grad_norm": 16.0,
"learning_rate": 1.7160406885759e-05,
"loss": 1.4425,
"step": 6630
},
{
"epoch": 0.6974789915966386,
"grad_norm": 11.9375,
"learning_rate": 1.7152582159624412e-05,
"loss": 1.434,
"step": 6640
},
{
"epoch": 0.6985294117647058,
"grad_norm": 11.625,
"learning_rate": 1.714475743348983e-05,
"loss": 1.3348,
"step": 6650
},
{
"epoch": 0.6995798319327731,
"grad_norm": 19.875,
"learning_rate": 1.7136932707355245e-05,
"loss": 1.5359,
"step": 6660
},
{
"epoch": 0.7006302521008403,
"grad_norm": 10.125,
"learning_rate": 1.712910798122066e-05,
"loss": 1.5929,
"step": 6670
},
{
"epoch": 0.7016806722689075,
"grad_norm": 8.5,
"learning_rate": 1.7121283255086074e-05,
"loss": 1.4151,
"step": 6680
},
{
"epoch": 0.7027310924369747,
"grad_norm": 6.78125,
"learning_rate": 1.711345852895149e-05,
"loss": 1.0809,
"step": 6690
},
{
"epoch": 0.7037815126050421,
"grad_norm": 11.4375,
"learning_rate": 1.7105633802816904e-05,
"loss": 1.3458,
"step": 6700
},
{
"epoch": 0.7048319327731093,
"grad_norm": 11.4375,
"learning_rate": 1.709780907668232e-05,
"loss": 1.5675,
"step": 6710
},
{
"epoch": 0.7058823529411765,
"grad_norm": 14.9375,
"learning_rate": 1.7089984350547733e-05,
"loss": 1.9173,
"step": 6720
},
{
"epoch": 0.7069327731092437,
"grad_norm": 10.4375,
"learning_rate": 1.7082159624413148e-05,
"loss": 1.032,
"step": 6730
},
{
"epoch": 0.707983193277311,
"grad_norm": 18.5,
"learning_rate": 1.7074334898278562e-05,
"loss": 1.7023,
"step": 6740
},
{
"epoch": 0.7090336134453782,
"grad_norm": 15.9375,
"learning_rate": 1.7066510172143977e-05,
"loss": 1.3879,
"step": 6750
},
{
"epoch": 0.7100840336134454,
"grad_norm": 6.46875,
"learning_rate": 1.705868544600939e-05,
"loss": 1.8452,
"step": 6760
},
{
"epoch": 0.7111344537815126,
"grad_norm": 15.0,
"learning_rate": 1.7050860719874806e-05,
"loss": 1.4104,
"step": 6770
},
{
"epoch": 0.7121848739495799,
"grad_norm": 16.625,
"learning_rate": 1.704303599374022e-05,
"loss": 1.3641,
"step": 6780
},
{
"epoch": 0.7132352941176471,
"grad_norm": 17.25,
"learning_rate": 1.7035211267605636e-05,
"loss": 1.4804,
"step": 6790
},
{
"epoch": 0.7142857142857143,
"grad_norm": 27.375,
"learning_rate": 1.702738654147105e-05,
"loss": 1.9552,
"step": 6800
},
{
"epoch": 0.7153361344537815,
"grad_norm": 4.21875,
"learning_rate": 1.7019561815336465e-05,
"loss": 1.0423,
"step": 6810
},
{
"epoch": 0.7163865546218487,
"grad_norm": 10.9375,
"learning_rate": 1.701173708920188e-05,
"loss": 1.4615,
"step": 6820
},
{
"epoch": 0.717436974789916,
"grad_norm": 16.5,
"learning_rate": 1.7003912363067294e-05,
"loss": 1.3719,
"step": 6830
},
{
"epoch": 0.7184873949579832,
"grad_norm": 13.8125,
"learning_rate": 1.699608763693271e-05,
"loss": 2.0062,
"step": 6840
},
{
"epoch": 0.7195378151260504,
"grad_norm": 12.1875,
"learning_rate": 1.6988262910798124e-05,
"loss": 1.6726,
"step": 6850
},
{
"epoch": 0.7205882352941176,
"grad_norm": 11.0,
"learning_rate": 1.6980438184663538e-05,
"loss": 1.4726,
"step": 6860
},
{
"epoch": 0.7216386554621849,
"grad_norm": 12.625,
"learning_rate": 1.6972613458528953e-05,
"loss": 1.3042,
"step": 6870
},
{
"epoch": 0.7226890756302521,
"grad_norm": 11.75,
"learning_rate": 1.6964788732394367e-05,
"loss": 1.5685,
"step": 6880
},
{
"epoch": 0.7237394957983193,
"grad_norm": 5.8125,
"learning_rate": 1.6956964006259782e-05,
"loss": 1.4803,
"step": 6890
},
{
"epoch": 0.7247899159663865,
"grad_norm": 14.1875,
"learning_rate": 1.6949139280125197e-05,
"loss": 1.1679,
"step": 6900
},
{
"epoch": 0.7258403361344538,
"grad_norm": 14.875,
"learning_rate": 1.694131455399061e-05,
"loss": 1.4983,
"step": 6910
},
{
"epoch": 0.726890756302521,
"grad_norm": 8.8125,
"learning_rate": 1.6933489827856026e-05,
"loss": 1.2823,
"step": 6920
},
{
"epoch": 0.7279411764705882,
"grad_norm": 9.375,
"learning_rate": 1.692566510172144e-05,
"loss": 1.3766,
"step": 6930
},
{
"epoch": 0.7289915966386554,
"grad_norm": 13.875,
"learning_rate": 1.6917840375586855e-05,
"loss": 1.4109,
"step": 6940
},
{
"epoch": 0.7300420168067226,
"grad_norm": 13.375,
"learning_rate": 1.691001564945227e-05,
"loss": 1.5843,
"step": 6950
},
{
"epoch": 0.7310924369747899,
"grad_norm": 13.1875,
"learning_rate": 1.6902190923317685e-05,
"loss": 1.2523,
"step": 6960
},
{
"epoch": 0.7321428571428571,
"grad_norm": 24.125,
"learning_rate": 1.68943661971831e-05,
"loss": 1.1301,
"step": 6970
},
{
"epoch": 0.7331932773109243,
"grad_norm": 13.3125,
"learning_rate": 1.6886541471048514e-05,
"loss": 1.4351,
"step": 6980
},
{
"epoch": 0.7342436974789915,
"grad_norm": 15.6875,
"learning_rate": 1.687871674491393e-05,
"loss": 1.276,
"step": 6990
},
{
"epoch": 0.7352941176470589,
"grad_norm": 9.625,
"learning_rate": 1.6870892018779343e-05,
"loss": 1.2223,
"step": 7000
},
{
"epoch": 0.7363445378151261,
"grad_norm": 5.21875,
"learning_rate": 1.6863067292644758e-05,
"loss": 0.9195,
"step": 7010
},
{
"epoch": 0.7373949579831933,
"grad_norm": 5.4375,
"learning_rate": 1.6855242566510173e-05,
"loss": 1.5059,
"step": 7020
},
{
"epoch": 0.7384453781512605,
"grad_norm": 17.0,
"learning_rate": 1.6847417840375587e-05,
"loss": 1.2798,
"step": 7030
},
{
"epoch": 0.7394957983193278,
"grad_norm": 11.4375,
"learning_rate": 1.6839593114241002e-05,
"loss": 1.3882,
"step": 7040
},
{
"epoch": 0.740546218487395,
"grad_norm": 8.6875,
"learning_rate": 1.6831768388106417e-05,
"loss": 1.4359,
"step": 7050
},
{
"epoch": 0.7415966386554622,
"grad_norm": 19.5,
"learning_rate": 1.6823943661971835e-05,
"loss": 1.1035,
"step": 7060
},
{
"epoch": 0.7426470588235294,
"grad_norm": 21.875,
"learning_rate": 1.6816118935837246e-05,
"loss": 1.1844,
"step": 7070
},
{
"epoch": 0.7436974789915967,
"grad_norm": 13.5625,
"learning_rate": 1.6808294209702664e-05,
"loss": 1.0824,
"step": 7080
},
{
"epoch": 0.7447478991596639,
"grad_norm": 11.875,
"learning_rate": 1.6800469483568075e-05,
"loss": 1.2525,
"step": 7090
},
{
"epoch": 0.7457983193277311,
"grad_norm": 39.25,
"learning_rate": 1.6792644757433493e-05,
"loss": 1.0738,
"step": 7100
},
{
"epoch": 0.7468487394957983,
"grad_norm": 13.125,
"learning_rate": 1.6784820031298905e-05,
"loss": 1.4299,
"step": 7110
},
{
"epoch": 0.7478991596638656,
"grad_norm": 18.5,
"learning_rate": 1.677699530516432e-05,
"loss": 1.5707,
"step": 7120
},
{
"epoch": 0.7489495798319328,
"grad_norm": 18.125,
"learning_rate": 1.6769170579029734e-05,
"loss": 1.5046,
"step": 7130
},
{
"epoch": 0.75,
"grad_norm": 21.5,
"learning_rate": 1.676134585289515e-05,
"loss": 0.9223,
"step": 7140
},
{
"epoch": 0.7510504201680672,
"grad_norm": 11.75,
"learning_rate": 1.6753521126760567e-05,
"loss": 1.3039,
"step": 7150
},
{
"epoch": 0.7521008403361344,
"grad_norm": 15.6875,
"learning_rate": 1.6745696400625978e-05,
"loss": 0.8591,
"step": 7160
},
{
"epoch": 0.7531512605042017,
"grad_norm": 10.375,
"learning_rate": 1.6737871674491396e-05,
"loss": 1.2332,
"step": 7170
},
{
"epoch": 0.7542016806722689,
"grad_norm": 10.3125,
"learning_rate": 1.6730046948356807e-05,
"loss": 1.2879,
"step": 7180
},
{
"epoch": 0.7552521008403361,
"grad_norm": 12.4375,
"learning_rate": 1.6722222222222225e-05,
"loss": 1.484,
"step": 7190
},
{
"epoch": 0.7563025210084033,
"grad_norm": 11.6875,
"learning_rate": 1.6714397496087636e-05,
"loss": 1.0901,
"step": 7200
},
{
"epoch": 0.7573529411764706,
"grad_norm": 12.9375,
"learning_rate": 1.6706572769953054e-05,
"loss": 1.1962,
"step": 7210
},
{
"epoch": 0.7584033613445378,
"grad_norm": 4.96875,
"learning_rate": 1.6698748043818466e-05,
"loss": 0.949,
"step": 7220
},
{
"epoch": 0.759453781512605,
"grad_norm": 18.0,
"learning_rate": 1.6690923317683884e-05,
"loss": 1.31,
"step": 7230
},
{
"epoch": 0.7605042016806722,
"grad_norm": 4.28125,
"learning_rate": 1.66830985915493e-05,
"loss": 1.1472,
"step": 7240
},
{
"epoch": 0.7615546218487395,
"grad_norm": 12.3125,
"learning_rate": 1.6675273865414713e-05,
"loss": 1.94,
"step": 7250
},
{
"epoch": 0.7626050420168067,
"grad_norm": 13.75,
"learning_rate": 1.6667449139280128e-05,
"loss": 1.1797,
"step": 7260
},
{
"epoch": 0.7636554621848739,
"grad_norm": 14.8125,
"learning_rate": 1.6659624413145542e-05,
"loss": 1.0651,
"step": 7270
},
{
"epoch": 0.7647058823529411,
"grad_norm": 19.25,
"learning_rate": 1.6651799687010957e-05,
"loss": 1.6434,
"step": 7280
},
{
"epoch": 0.7657563025210085,
"grad_norm": 4.625,
"learning_rate": 1.664397496087637e-05,
"loss": 1.4819,
"step": 7290
},
{
"epoch": 0.7668067226890757,
"grad_norm": 5.96875,
"learning_rate": 1.6636150234741786e-05,
"loss": 1.1262,
"step": 7300
},
{
"epoch": 0.7678571428571429,
"grad_norm": 14.0625,
"learning_rate": 1.66283255086072e-05,
"loss": 1.3299,
"step": 7310
},
{
"epoch": 0.7689075630252101,
"grad_norm": 17.125,
"learning_rate": 1.6620500782472616e-05,
"loss": 1.4443,
"step": 7320
},
{
"epoch": 0.7699579831932774,
"grad_norm": 18.125,
"learning_rate": 1.6612676056338027e-05,
"loss": 1.3762,
"step": 7330
},
{
"epoch": 0.7710084033613446,
"grad_norm": 14.25,
"learning_rate": 1.6604851330203445e-05,
"loss": 1.3657,
"step": 7340
},
{
"epoch": 0.7720588235294118,
"grad_norm": 14.5625,
"learning_rate": 1.659702660406886e-05,
"loss": 1.2761,
"step": 7350
},
{
"epoch": 0.773109243697479,
"grad_norm": 9.5,
"learning_rate": 1.6589201877934274e-05,
"loss": 1.4405,
"step": 7360
},
{
"epoch": 0.7741596638655462,
"grad_norm": 9.9375,
"learning_rate": 1.658137715179969e-05,
"loss": 0.9356,
"step": 7370
},
{
"epoch": 0.7752100840336135,
"grad_norm": 12.375,
"learning_rate": 1.6573552425665104e-05,
"loss": 1.2672,
"step": 7380
},
{
"epoch": 0.7762605042016807,
"grad_norm": 12.0625,
"learning_rate": 1.6565727699530518e-05,
"loss": 1.3292,
"step": 7390
},
{
"epoch": 0.7773109243697479,
"grad_norm": 17.875,
"learning_rate": 1.6557902973395933e-05,
"loss": 1.2816,
"step": 7400
},
{
"epoch": 0.7783613445378151,
"grad_norm": 14.375,
"learning_rate": 1.6550078247261347e-05,
"loss": 1.2162,
"step": 7410
},
{
"epoch": 0.7794117647058824,
"grad_norm": 20.0,
"learning_rate": 1.6542253521126762e-05,
"loss": 1.1232,
"step": 7420
},
{
"epoch": 0.7804621848739496,
"grad_norm": 12.625,
"learning_rate": 1.6534428794992177e-05,
"loss": 1.711,
"step": 7430
},
{
"epoch": 0.7815126050420168,
"grad_norm": 13.375,
"learning_rate": 1.652660406885759e-05,
"loss": 1.4417,
"step": 7440
},
{
"epoch": 0.782563025210084,
"grad_norm": 18.0,
"learning_rate": 1.6518779342723006e-05,
"loss": 1.5316,
"step": 7450
},
{
"epoch": 0.7836134453781513,
"grad_norm": 12.5625,
"learning_rate": 1.651095461658842e-05,
"loss": 1.7166,
"step": 7460
},
{
"epoch": 0.7846638655462185,
"grad_norm": 21.625,
"learning_rate": 1.6503129890453835e-05,
"loss": 1.2015,
"step": 7470
},
{
"epoch": 0.7857142857142857,
"grad_norm": 22.5,
"learning_rate": 1.649530516431925e-05,
"loss": 1.4377,
"step": 7480
},
{
"epoch": 0.7867647058823529,
"grad_norm": 16.125,
"learning_rate": 1.6487480438184665e-05,
"loss": 1.5353,
"step": 7490
},
{
"epoch": 0.7878151260504201,
"grad_norm": 19.75,
"learning_rate": 1.647965571205008e-05,
"loss": 1.4953,
"step": 7500
},
{
"epoch": 0.7888655462184874,
"grad_norm": 16.125,
"learning_rate": 1.6471830985915494e-05,
"loss": 1.1865,
"step": 7510
},
{
"epoch": 0.7899159663865546,
"grad_norm": 7.125,
"learning_rate": 1.646400625978091e-05,
"loss": 0.8322,
"step": 7520
},
{
"epoch": 0.7909663865546218,
"grad_norm": 11.625,
"learning_rate": 1.6456181533646323e-05,
"loss": 1.4808,
"step": 7530
},
{
"epoch": 0.792016806722689,
"grad_norm": 14.875,
"learning_rate": 1.6448356807511738e-05,
"loss": 1.7362,
"step": 7540
},
{
"epoch": 0.7930672268907563,
"grad_norm": 25.75,
"learning_rate": 1.6440532081377153e-05,
"loss": 1.4707,
"step": 7550
},
{
"epoch": 0.7941176470588235,
"grad_norm": 15.8125,
"learning_rate": 1.6432707355242567e-05,
"loss": 1.3344,
"step": 7560
},
{
"epoch": 0.7951680672268907,
"grad_norm": 6.5625,
"learning_rate": 1.6424882629107982e-05,
"loss": 1.3918,
"step": 7570
},
{
"epoch": 0.7962184873949579,
"grad_norm": 10.0,
"learning_rate": 1.6417057902973397e-05,
"loss": 1.3308,
"step": 7580
},
{
"epoch": 0.7972689075630253,
"grad_norm": 26.625,
"learning_rate": 1.640923317683881e-05,
"loss": 1.3687,
"step": 7590
},
{
"epoch": 0.7983193277310925,
"grad_norm": 13.6875,
"learning_rate": 1.6401408450704226e-05,
"loss": 1.3443,
"step": 7600
},
{
"epoch": 0.7993697478991597,
"grad_norm": 4.5,
"learning_rate": 1.639358372456964e-05,
"loss": 0.9478,
"step": 7610
},
{
"epoch": 0.8004201680672269,
"grad_norm": 19.75,
"learning_rate": 1.6385758998435055e-05,
"loss": 0.9845,
"step": 7620
},
{
"epoch": 0.8014705882352942,
"grad_norm": 6.375,
"learning_rate": 1.637793427230047e-05,
"loss": 1.1467,
"step": 7630
},
{
"epoch": 0.8025210084033614,
"grad_norm": 18.5,
"learning_rate": 1.6370109546165888e-05,
"loss": 1.2671,
"step": 7640
},
{
"epoch": 0.8035714285714286,
"grad_norm": 12.8125,
"learning_rate": 1.63622848200313e-05,
"loss": 1.454,
"step": 7650
},
{
"epoch": 0.8046218487394958,
"grad_norm": 13.875,
"learning_rate": 1.6354460093896717e-05,
"loss": 1.8004,
"step": 7660
},
{
"epoch": 0.805672268907563,
"grad_norm": 10.9375,
"learning_rate": 1.634663536776213e-05,
"loss": 1.1873,
"step": 7670
},
{
"epoch": 0.8067226890756303,
"grad_norm": 17.625,
"learning_rate": 1.6338810641627547e-05,
"loss": 1.7385,
"step": 7680
},
{
"epoch": 0.8077731092436975,
"grad_norm": 13.0,
"learning_rate": 1.6330985915492958e-05,
"loss": 1.1763,
"step": 7690
},
{
"epoch": 0.8088235294117647,
"grad_norm": 14.4375,
"learning_rate": 1.6323161189358376e-05,
"loss": 1.1858,
"step": 7700
},
{
"epoch": 0.8098739495798319,
"grad_norm": 21.75,
"learning_rate": 1.6315336463223787e-05,
"loss": 1.6386,
"step": 7710
},
{
"epoch": 0.8109243697478992,
"grad_norm": 26.5,
"learning_rate": 1.6307511737089202e-05,
"loss": 1.2271,
"step": 7720
},
{
"epoch": 0.8119747899159664,
"grad_norm": 14.0,
"learning_rate": 1.629968701095462e-05,
"loss": 1.4109,
"step": 7730
},
{
"epoch": 0.8130252100840336,
"grad_norm": 16.25,
"learning_rate": 1.629186228482003e-05,
"loss": 1.5447,
"step": 7740
},
{
"epoch": 0.8140756302521008,
"grad_norm": 13.75,
"learning_rate": 1.628403755868545e-05,
"loss": 1.4124,
"step": 7750
},
{
"epoch": 0.8151260504201681,
"grad_norm": 12.5625,
"learning_rate": 1.627621283255086e-05,
"loss": 1.3217,
"step": 7760
},
{
"epoch": 0.8161764705882353,
"grad_norm": 12.1875,
"learning_rate": 1.626838810641628e-05,
"loss": 1.1625,
"step": 7770
},
{
"epoch": 0.8172268907563025,
"grad_norm": 14.375,
"learning_rate": 1.626056338028169e-05,
"loss": 1.7034,
"step": 7780
},
{
"epoch": 0.8182773109243697,
"grad_norm": 3.375,
"learning_rate": 1.6252738654147108e-05,
"loss": 1.1285,
"step": 7790
},
{
"epoch": 0.819327731092437,
"grad_norm": 5.21875,
"learning_rate": 1.624491392801252e-05,
"loss": 1.305,
"step": 7800
},
{
"epoch": 0.8203781512605042,
"grad_norm": 14.125,
"learning_rate": 1.6237089201877937e-05,
"loss": 1.1519,
"step": 7810
},
{
"epoch": 0.8214285714285714,
"grad_norm": 11.125,
"learning_rate": 1.6229264475743348e-05,
"loss": 1.2225,
"step": 7820
},
{
"epoch": 0.8224789915966386,
"grad_norm": 18.625,
"learning_rate": 1.6221439749608766e-05,
"loss": 1.2256,
"step": 7830
},
{
"epoch": 0.8235294117647058,
"grad_norm": 14.1875,
"learning_rate": 1.621361502347418e-05,
"loss": 1.3576,
"step": 7840
},
{
"epoch": 0.8245798319327731,
"grad_norm": 11.5,
"learning_rate": 1.6205790297339596e-05,
"loss": 1.0199,
"step": 7850
},
{
"epoch": 0.8256302521008403,
"grad_norm": 16.375,
"learning_rate": 1.619796557120501e-05,
"loss": 1.4019,
"step": 7860
},
{
"epoch": 0.8266806722689075,
"grad_norm": 39.75,
"learning_rate": 1.6190140845070425e-05,
"loss": 1.0616,
"step": 7870
},
{
"epoch": 0.8277310924369747,
"grad_norm": 20.0,
"learning_rate": 1.618231611893584e-05,
"loss": 1.3819,
"step": 7880
},
{
"epoch": 0.8287815126050421,
"grad_norm": 12.4375,
"learning_rate": 1.6174491392801254e-05,
"loss": 1.2698,
"step": 7890
},
{
"epoch": 0.8298319327731093,
"grad_norm": 15.0,
"learning_rate": 1.616666666666667e-05,
"loss": 1.4022,
"step": 7900
},
{
"epoch": 0.8308823529411765,
"grad_norm": 13.4375,
"learning_rate": 1.615884194053208e-05,
"loss": 1.3826,
"step": 7910
},
{
"epoch": 0.8319327731092437,
"grad_norm": 6.1875,
"learning_rate": 1.6151017214397498e-05,
"loss": 1.6889,
"step": 7920
},
{
"epoch": 0.832983193277311,
"grad_norm": 11.625,
"learning_rate": 1.6143192488262913e-05,
"loss": 1.072,
"step": 7930
},
{
"epoch": 0.8340336134453782,
"grad_norm": 11.625,
"learning_rate": 1.6135367762128327e-05,
"loss": 1.6721,
"step": 7940
},
{
"epoch": 0.8350840336134454,
"grad_norm": 28.625,
"learning_rate": 1.6127543035993742e-05,
"loss": 1.4931,
"step": 7950
},
{
"epoch": 0.8361344537815126,
"grad_norm": 9.125,
"learning_rate": 1.6119718309859157e-05,
"loss": 1.4431,
"step": 7960
},
{
"epoch": 0.8371848739495799,
"grad_norm": 4.1875,
"learning_rate": 1.611189358372457e-05,
"loss": 1.3817,
"step": 7970
},
{
"epoch": 0.8382352941176471,
"grad_norm": 16.75,
"learning_rate": 1.6104068857589986e-05,
"loss": 1.1158,
"step": 7980
},
{
"epoch": 0.8392857142857143,
"grad_norm": 3.90625,
"learning_rate": 1.60962441314554e-05,
"loss": 1.2636,
"step": 7990
},
{
"epoch": 0.8403361344537815,
"grad_norm": 13.25,
"learning_rate": 1.6088419405320815e-05,
"loss": 1.5959,
"step": 8000
},
{
"epoch": 0.8413865546218487,
"grad_norm": 37.5,
"learning_rate": 1.608059467918623e-05,
"loss": 1.061,
"step": 8010
},
{
"epoch": 0.842436974789916,
"grad_norm": 9.6875,
"learning_rate": 1.6072769953051645e-05,
"loss": 1.6377,
"step": 8020
},
{
"epoch": 0.8434873949579832,
"grad_norm": 16.5,
"learning_rate": 1.606494522691706e-05,
"loss": 1.2668,
"step": 8030
},
{
"epoch": 0.8445378151260504,
"grad_norm": 17.125,
"learning_rate": 1.6057120500782474e-05,
"loss": 1.4443,
"step": 8040
},
{
"epoch": 0.8455882352941176,
"grad_norm": 9.6875,
"learning_rate": 1.604929577464789e-05,
"loss": 1.3508,
"step": 8050
},
{
"epoch": 0.8466386554621849,
"grad_norm": 12.875,
"learning_rate": 1.6041471048513303e-05,
"loss": 1.1882,
"step": 8060
},
{
"epoch": 0.8476890756302521,
"grad_norm": 7.03125,
"learning_rate": 1.6033646322378718e-05,
"loss": 0.9828,
"step": 8070
},
{
"epoch": 0.8487394957983193,
"grad_norm": 19.5,
"learning_rate": 1.6025821596244133e-05,
"loss": 1.7828,
"step": 8080
},
{
"epoch": 0.8497899159663865,
"grad_norm": 18.0,
"learning_rate": 1.6017996870109547e-05,
"loss": 1.5972,
"step": 8090
},
{
"epoch": 0.8508403361344538,
"grad_norm": 21.0,
"learning_rate": 1.6010172143974962e-05,
"loss": 1.6549,
"step": 8100
},
{
"epoch": 0.851890756302521,
"grad_norm": 3.453125,
"learning_rate": 1.6002347417840377e-05,
"loss": 1.634,
"step": 8110
},
{
"epoch": 0.8529411764705882,
"grad_norm": 13.875,
"learning_rate": 1.599452269170579e-05,
"loss": 1.0437,
"step": 8120
},
{
"epoch": 0.8539915966386554,
"grad_norm": 3.71875,
"learning_rate": 1.5986697965571206e-05,
"loss": 1.4985,
"step": 8130
},
{
"epoch": 0.8550420168067226,
"grad_norm": 22.75,
"learning_rate": 1.597887323943662e-05,
"loss": 1.6172,
"step": 8140
},
{
"epoch": 0.8560924369747899,
"grad_norm": 20.875,
"learning_rate": 1.5971048513302035e-05,
"loss": 1.484,
"step": 8150
},
{
"epoch": 0.8571428571428571,
"grad_norm": 14.4375,
"learning_rate": 1.596322378716745e-05,
"loss": 1.1782,
"step": 8160
},
{
"epoch": 0.8581932773109243,
"grad_norm": 11.375,
"learning_rate": 1.5955399061032864e-05,
"loss": 1.5498,
"step": 8170
},
{
"epoch": 0.8592436974789915,
"grad_norm": 16.0,
"learning_rate": 1.594757433489828e-05,
"loss": 1.3166,
"step": 8180
},
{
"epoch": 0.8602941176470589,
"grad_norm": 15.25,
"learning_rate": 1.5939749608763694e-05,
"loss": 1.3896,
"step": 8190
},
{
"epoch": 0.8613445378151261,
"grad_norm": 13.1875,
"learning_rate": 1.593192488262911e-05,
"loss": 1.238,
"step": 8200
},
{
"epoch": 0.8623949579831933,
"grad_norm": 13.0,
"learning_rate": 1.5924100156494523e-05,
"loss": 1.4749,
"step": 8210
},
{
"epoch": 0.8634453781512605,
"grad_norm": 18.875,
"learning_rate": 1.5916275430359938e-05,
"loss": 1.3273,
"step": 8220
},
{
"epoch": 0.8644957983193278,
"grad_norm": 13.9375,
"learning_rate": 1.5908450704225352e-05,
"loss": 1.7615,
"step": 8230
},
{
"epoch": 0.865546218487395,
"grad_norm": 3.140625,
"learning_rate": 1.590062597809077e-05,
"loss": 0.9801,
"step": 8240
},
{
"epoch": 0.8665966386554622,
"grad_norm": 12.9375,
"learning_rate": 1.5892801251956182e-05,
"loss": 1.1506,
"step": 8250
},
{
"epoch": 0.8676470588235294,
"grad_norm": 20.25,
"learning_rate": 1.58849765258216e-05,
"loss": 1.5939,
"step": 8260
},
{
"epoch": 0.8686974789915967,
"grad_norm": 7.875,
"learning_rate": 1.587715179968701e-05,
"loss": 1.2275,
"step": 8270
},
{
"epoch": 0.8697478991596639,
"grad_norm": 47.25,
"learning_rate": 1.586932707355243e-05,
"loss": 1.5948,
"step": 8280
},
{
"epoch": 0.8707983193277311,
"grad_norm": 12.5,
"learning_rate": 1.586150234741784e-05,
"loss": 1.5074,
"step": 8290
},
{
"epoch": 0.8718487394957983,
"grad_norm": 14.9375,
"learning_rate": 1.5853677621283255e-05,
"loss": 1.1553,
"step": 8300
},
{
"epoch": 0.8728991596638656,
"grad_norm": 5.0625,
"learning_rate": 1.584585289514867e-05,
"loss": 1.4229,
"step": 8310
},
{
"epoch": 0.8739495798319328,
"grad_norm": 11.1875,
"learning_rate": 1.5838028169014084e-05,
"loss": 1.1038,
"step": 8320
},
{
"epoch": 0.875,
"grad_norm": 8.5625,
"learning_rate": 1.5830203442879502e-05,
"loss": 1.5116,
"step": 8330
},
{
"epoch": 0.8760504201680672,
"grad_norm": 10.25,
"learning_rate": 1.5822378716744914e-05,
"loss": 1.5387,
"step": 8340
},
{
"epoch": 0.8771008403361344,
"grad_norm": 14.0625,
"learning_rate": 1.581455399061033e-05,
"loss": 1.3233,
"step": 8350
},
{
"epoch": 0.8781512605042017,
"grad_norm": 12.0625,
"learning_rate": 1.5806729264475743e-05,
"loss": 1.1451,
"step": 8360
},
{
"epoch": 0.8792016806722689,
"grad_norm": 19.25,
"learning_rate": 1.579890453834116e-05,
"loss": 1.4902,
"step": 8370
},
{
"epoch": 0.8802521008403361,
"grad_norm": 11.3125,
"learning_rate": 1.5791079812206572e-05,
"loss": 1.3482,
"step": 8380
},
{
"epoch": 0.8813025210084033,
"grad_norm": 26.25,
"learning_rate": 1.578325508607199e-05,
"loss": 1.251,
"step": 8390
},
{
"epoch": 0.8823529411764706,
"grad_norm": 13.3125,
"learning_rate": 1.57754303599374e-05,
"loss": 1.2147,
"step": 8400
},
{
"epoch": 0.8834033613445378,
"grad_norm": 5.0625,
"learning_rate": 1.576760563380282e-05,
"loss": 1.4074,
"step": 8410
},
{
"epoch": 0.884453781512605,
"grad_norm": 4.40625,
"learning_rate": 1.5759780907668234e-05,
"loss": 1.0801,
"step": 8420
},
{
"epoch": 0.8855042016806722,
"grad_norm": 5.34375,
"learning_rate": 1.575195618153365e-05,
"loss": 1.0651,
"step": 8430
},
{
"epoch": 0.8865546218487395,
"grad_norm": 12.375,
"learning_rate": 1.5744131455399064e-05,
"loss": 1.6375,
"step": 8440
},
{
"epoch": 0.8876050420168067,
"grad_norm": 27.75,
"learning_rate": 1.5736306729264478e-05,
"loss": 1.3611,
"step": 8450
},
{
"epoch": 0.8886554621848739,
"grad_norm": 9.875,
"learning_rate": 1.5728482003129893e-05,
"loss": 1.0472,
"step": 8460
},
{
"epoch": 0.8897058823529411,
"grad_norm": 16.125,
"learning_rate": 1.5720657276995307e-05,
"loss": 1.3257,
"step": 8470
},
{
"epoch": 0.8907563025210085,
"grad_norm": 16.375,
"learning_rate": 1.5712832550860722e-05,
"loss": 0.9886,
"step": 8480
},
{
"epoch": 0.8918067226890757,
"grad_norm": 5.1875,
"learning_rate": 1.5705007824726133e-05,
"loss": 0.8322,
"step": 8490
},
{
"epoch": 0.8928571428571429,
"grad_norm": 17.75,
"learning_rate": 1.569718309859155e-05,
"loss": 1.3795,
"step": 8500
},
{
"epoch": 0.8939075630252101,
"grad_norm": 17.5,
"learning_rate": 1.5689358372456963e-05,
"loss": 1.6337,
"step": 8510
},
{
"epoch": 0.8949579831932774,
"grad_norm": 14.4375,
"learning_rate": 1.568153364632238e-05,
"loss": 1.7422,
"step": 8520
},
{
"epoch": 0.8960084033613446,
"grad_norm": 16.75,
"learning_rate": 1.5673708920187795e-05,
"loss": 1.7619,
"step": 8530
},
{
"epoch": 0.8970588235294118,
"grad_norm": 13.0625,
"learning_rate": 1.566588419405321e-05,
"loss": 1.4839,
"step": 8540
},
{
"epoch": 0.898109243697479,
"grad_norm": 16.625,
"learning_rate": 1.5658059467918625e-05,
"loss": 1.3422,
"step": 8550
},
{
"epoch": 0.8991596638655462,
"grad_norm": 12.875,
"learning_rate": 1.565023474178404e-05,
"loss": 1.5977,
"step": 8560
},
{
"epoch": 0.9002100840336135,
"grad_norm": 12.5625,
"learning_rate": 1.5642410015649454e-05,
"loss": 1.3554,
"step": 8570
},
{
"epoch": 0.9012605042016807,
"grad_norm": 16.75,
"learning_rate": 1.563458528951487e-05,
"loss": 1.1915,
"step": 8580
},
{
"epoch": 0.9023109243697479,
"grad_norm": 12.0,
"learning_rate": 1.5626760563380283e-05,
"loss": 1.3909,
"step": 8590
},
{
"epoch": 0.9033613445378151,
"grad_norm": 12.75,
"learning_rate": 1.5618935837245698e-05,
"loss": 1.2245,
"step": 8600
},
{
"epoch": 0.9044117647058824,
"grad_norm": 22.375,
"learning_rate": 1.5611111111111113e-05,
"loss": 1.3636,
"step": 8610
},
{
"epoch": 0.9054621848739496,
"grad_norm": 11.5625,
"learning_rate": 1.5603286384976527e-05,
"loss": 1.2219,
"step": 8620
},
{
"epoch": 0.9065126050420168,
"grad_norm": 15.875,
"learning_rate": 1.5595461658841942e-05,
"loss": 1.4637,
"step": 8630
},
{
"epoch": 0.907563025210084,
"grad_norm": 8.125,
"learning_rate": 1.5587636932707357e-05,
"loss": 1.3819,
"step": 8640
},
{
"epoch": 0.9086134453781513,
"grad_norm": 20.75,
"learning_rate": 1.557981220657277e-05,
"loss": 1.5721,
"step": 8650
},
{
"epoch": 0.9096638655462185,
"grad_norm": 3.890625,
"learning_rate": 1.5571987480438186e-05,
"loss": 1.3215,
"step": 8660
},
{
"epoch": 0.9107142857142857,
"grad_norm": 13.375,
"learning_rate": 1.55641627543036e-05,
"loss": 1.4286,
"step": 8670
},
{
"epoch": 0.9117647058823529,
"grad_norm": 12.5,
"learning_rate": 1.5556338028169015e-05,
"loss": 1.0645,
"step": 8680
},
{
"epoch": 0.9128151260504201,
"grad_norm": 11.8125,
"learning_rate": 1.554851330203443e-05,
"loss": 1.1888,
"step": 8690
},
{
"epoch": 0.9138655462184874,
"grad_norm": 14.5,
"learning_rate": 1.5540688575899844e-05,
"loss": 1.6634,
"step": 8700
},
{
"epoch": 0.9149159663865546,
"grad_norm": 18.25,
"learning_rate": 1.553286384976526e-05,
"loss": 1.4643,
"step": 8710
},
{
"epoch": 0.9159663865546218,
"grad_norm": 4.8125,
"learning_rate": 1.5525039123630674e-05,
"loss": 1.4156,
"step": 8720
},
{
"epoch": 0.917016806722689,
"grad_norm": 11.75,
"learning_rate": 1.551721439749609e-05,
"loss": 1.5662,
"step": 8730
},
{
"epoch": 0.9180672268907563,
"grad_norm": 19.5,
"learning_rate": 1.5509389671361503e-05,
"loss": 1.1295,
"step": 8740
},
{
"epoch": 0.9191176470588235,
"grad_norm": 9.25,
"learning_rate": 1.5501564945226918e-05,
"loss": 1.422,
"step": 8750
},
{
"epoch": 0.9201680672268907,
"grad_norm": 10.4375,
"learning_rate": 1.5493740219092332e-05,
"loss": 0.6702,
"step": 8760
},
{
"epoch": 0.9212184873949579,
"grad_norm": 15.6875,
"learning_rate": 1.5485915492957747e-05,
"loss": 1.3917,
"step": 8770
},
{
"epoch": 0.9222689075630253,
"grad_norm": 11.625,
"learning_rate": 1.5478090766823162e-05,
"loss": 1.3655,
"step": 8780
},
{
"epoch": 0.9233193277310925,
"grad_norm": 15.25,
"learning_rate": 1.5470266040688576e-05,
"loss": 1.3897,
"step": 8790
},
{
"epoch": 0.9243697478991597,
"grad_norm": 20.75,
"learning_rate": 1.546244131455399e-05,
"loss": 1.2722,
"step": 8800
},
{
"epoch": 0.9254201680672269,
"grad_norm": 13.875,
"learning_rate": 1.5454616588419406e-05,
"loss": 1.2772,
"step": 8810
},
{
"epoch": 0.9264705882352942,
"grad_norm": 33.75,
"learning_rate": 1.5446791862284824e-05,
"loss": 1.3379,
"step": 8820
},
{
"epoch": 0.9275210084033614,
"grad_norm": 14.8125,
"learning_rate": 1.5438967136150235e-05,
"loss": 1.0489,
"step": 8830
},
{
"epoch": 0.9285714285714286,
"grad_norm": 13.375,
"learning_rate": 1.5431142410015653e-05,
"loss": 0.8453,
"step": 8840
},
{
"epoch": 0.9296218487394958,
"grad_norm": 11.5,
"learning_rate": 1.5423317683881064e-05,
"loss": 1.3735,
"step": 8850
},
{
"epoch": 0.930672268907563,
"grad_norm": 18.25,
"learning_rate": 1.5415492957746482e-05,
"loss": 1.5835,
"step": 8860
},
{
"epoch": 0.9317226890756303,
"grad_norm": 10.8125,
"learning_rate": 1.5407668231611894e-05,
"loss": 1.5527,
"step": 8870
},
{
"epoch": 0.9327731092436975,
"grad_norm": 3.96875,
"learning_rate": 1.5399843505477308e-05,
"loss": 1.0899,
"step": 8880
},
{
"epoch": 0.9338235294117647,
"grad_norm": 15.8125,
"learning_rate": 1.5392018779342723e-05,
"loss": 1.9109,
"step": 8890
},
{
"epoch": 0.9348739495798319,
"grad_norm": 12.9375,
"learning_rate": 1.5384194053208138e-05,
"loss": 1.3079,
"step": 8900
},
{
"epoch": 0.9359243697478992,
"grad_norm": 11.625,
"learning_rate": 1.5376369327073556e-05,
"loss": 1.4786,
"step": 8910
},
{
"epoch": 0.9369747899159664,
"grad_norm": 16.25,
"learning_rate": 1.5368544600938967e-05,
"loss": 1.5381,
"step": 8920
},
{
"epoch": 0.9380252100840336,
"grad_norm": 9.375,
"learning_rate": 1.5360719874804385e-05,
"loss": 1.1872,
"step": 8930
},
{
"epoch": 0.9390756302521008,
"grad_norm": 11.8125,
"learning_rate": 1.5352895148669796e-05,
"loss": 1.785,
"step": 8940
},
{
"epoch": 0.9401260504201681,
"grad_norm": 4.5,
"learning_rate": 1.5345070422535214e-05,
"loss": 1.0459,
"step": 8950
},
{
"epoch": 0.9411764705882353,
"grad_norm": 25.125,
"learning_rate": 1.5337245696400625e-05,
"loss": 1.0509,
"step": 8960
},
{
"epoch": 0.9422268907563025,
"grad_norm": 18.25,
"learning_rate": 1.5329420970266043e-05,
"loss": 1.6948,
"step": 8970
},
{
"epoch": 0.9432773109243697,
"grad_norm": 21.875,
"learning_rate": 1.5321596244131455e-05,
"loss": 0.8567,
"step": 8980
},
{
"epoch": 0.944327731092437,
"grad_norm": 20.75,
"learning_rate": 1.5313771517996873e-05,
"loss": 1.8198,
"step": 8990
},
{
"epoch": 0.9453781512605042,
"grad_norm": 17.5,
"learning_rate": 1.5305946791862284e-05,
"loss": 1.6832,
"step": 9000
},
{
"epoch": 0.9464285714285714,
"grad_norm": 19.875,
"learning_rate": 1.5298122065727702e-05,
"loss": 1.2455,
"step": 9010
},
{
"epoch": 0.9474789915966386,
"grad_norm": 11.875,
"learning_rate": 1.5290297339593117e-05,
"loss": 1.3775,
"step": 9020
},
{
"epoch": 0.9485294117647058,
"grad_norm": 35.5,
"learning_rate": 1.528247261345853e-05,
"loss": 1.7105,
"step": 9030
},
{
"epoch": 0.9495798319327731,
"grad_norm": 9.0625,
"learning_rate": 1.5274647887323946e-05,
"loss": 0.8712,
"step": 9040
},
{
"epoch": 0.9506302521008403,
"grad_norm": 4.125,
"learning_rate": 1.526682316118936e-05,
"loss": 1.1452,
"step": 9050
},
{
"epoch": 0.9516806722689075,
"grad_norm": 10.125,
"learning_rate": 1.5258998435054775e-05,
"loss": 1.3654,
"step": 9060
},
{
"epoch": 0.9527310924369747,
"grad_norm": 29.5,
"learning_rate": 1.5251173708920188e-05,
"loss": 1.4345,
"step": 9070
},
{
"epoch": 0.9537815126050421,
"grad_norm": 9.6875,
"learning_rate": 1.5243348982785605e-05,
"loss": 1.1807,
"step": 9080
},
{
"epoch": 0.9548319327731093,
"grad_norm": 14.3125,
"learning_rate": 1.5235524256651018e-05,
"loss": 1.409,
"step": 9090
},
{
"epoch": 0.9558823529411765,
"grad_norm": 13.9375,
"learning_rate": 1.5227699530516434e-05,
"loss": 1.2369,
"step": 9100
},
{
"epoch": 0.9569327731092437,
"grad_norm": 14.375,
"learning_rate": 1.5219874804381849e-05,
"loss": 1.1645,
"step": 9110
},
{
"epoch": 0.957983193277311,
"grad_norm": 15.8125,
"learning_rate": 1.5212050078247262e-05,
"loss": 1.4289,
"step": 9120
},
{
"epoch": 0.9590336134453782,
"grad_norm": 14.4375,
"learning_rate": 1.5204225352112678e-05,
"loss": 1.4257,
"step": 9130
},
{
"epoch": 0.9600840336134454,
"grad_norm": 19.625,
"learning_rate": 1.5196400625978091e-05,
"loss": 1.1819,
"step": 9140
},
{
"epoch": 0.9611344537815126,
"grad_norm": 17.5,
"learning_rate": 1.5188575899843507e-05,
"loss": 1.4177,
"step": 9150
},
{
"epoch": 0.9621848739495799,
"grad_norm": 9.125,
"learning_rate": 1.518075117370892e-05,
"loss": 1.0235,
"step": 9160
},
{
"epoch": 0.9632352941176471,
"grad_norm": 13.125,
"learning_rate": 1.5172926447574337e-05,
"loss": 1.4327,
"step": 9170
},
{
"epoch": 0.9642857142857143,
"grad_norm": 6.46875,
"learning_rate": 1.516510172143975e-05,
"loss": 1.326,
"step": 9180
},
{
"epoch": 0.9653361344537815,
"grad_norm": 11.8125,
"learning_rate": 1.5157276995305166e-05,
"loss": 1.7656,
"step": 9190
},
{
"epoch": 0.9663865546218487,
"grad_norm": 9.1875,
"learning_rate": 1.5149452269170579e-05,
"loss": 1.368,
"step": 9200
},
{
"epoch": 0.967436974789916,
"grad_norm": 18.25,
"learning_rate": 1.5141627543035995e-05,
"loss": 1.7504,
"step": 9210
},
{
"epoch": 0.9684873949579832,
"grad_norm": 12.0,
"learning_rate": 1.513380281690141e-05,
"loss": 1.2685,
"step": 9220
},
{
"epoch": 0.9695378151260504,
"grad_norm": 4.9375,
"learning_rate": 1.5125978090766824e-05,
"loss": 1.1237,
"step": 9230
},
{
"epoch": 0.9705882352941176,
"grad_norm": 4.3125,
"learning_rate": 1.5118153364632239e-05,
"loss": 1.0553,
"step": 9240
},
{
"epoch": 0.9716386554621849,
"grad_norm": 12.0625,
"learning_rate": 1.5110328638497654e-05,
"loss": 1.795,
"step": 9250
},
{
"epoch": 0.9726890756302521,
"grad_norm": 14.4375,
"learning_rate": 1.5102503912363068e-05,
"loss": 1.0514,
"step": 9260
},
{
"epoch": 0.9737394957983193,
"grad_norm": 11.6875,
"learning_rate": 1.5094679186228483e-05,
"loss": 1.6683,
"step": 9270
},
{
"epoch": 0.9747899159663865,
"grad_norm": 24.125,
"learning_rate": 1.5086854460093898e-05,
"loss": 1.3332,
"step": 9280
},
{
"epoch": 0.9758403361344538,
"grad_norm": 11.9375,
"learning_rate": 1.5079029733959312e-05,
"loss": 1.1958,
"step": 9290
},
{
"epoch": 0.976890756302521,
"grad_norm": 12.4375,
"learning_rate": 1.5071205007824727e-05,
"loss": 1.4438,
"step": 9300
},
{
"epoch": 0.9779411764705882,
"grad_norm": 17.25,
"learning_rate": 1.5063380281690143e-05,
"loss": 1.1032,
"step": 9310
},
{
"epoch": 0.9789915966386554,
"grad_norm": 18.125,
"learning_rate": 1.5055555555555556e-05,
"loss": 1.5746,
"step": 9320
},
{
"epoch": 0.9800420168067226,
"grad_norm": 16.5,
"learning_rate": 1.5047730829420973e-05,
"loss": 1.1873,
"step": 9330
},
{
"epoch": 0.9810924369747899,
"grad_norm": 5.15625,
"learning_rate": 1.5039906103286386e-05,
"loss": 1.0214,
"step": 9340
},
{
"epoch": 0.9821428571428571,
"grad_norm": 12.0,
"learning_rate": 1.5032081377151802e-05,
"loss": 1.3897,
"step": 9350
},
{
"epoch": 0.9831932773109243,
"grad_norm": 17.25,
"learning_rate": 1.5024256651017215e-05,
"loss": 1.7093,
"step": 9360
},
{
"epoch": 0.9842436974789915,
"grad_norm": 14.4375,
"learning_rate": 1.5016431924882631e-05,
"loss": 1.1554,
"step": 9370
},
{
"epoch": 0.9852941176470589,
"grad_norm": 10.6875,
"learning_rate": 1.5008607198748044e-05,
"loss": 1.2735,
"step": 9380
},
{
"epoch": 0.9863445378151261,
"grad_norm": 15.375,
"learning_rate": 1.500078247261346e-05,
"loss": 1.3506,
"step": 9390
},
{
"epoch": 0.9873949579831933,
"grad_norm": 4.125,
"learning_rate": 1.4992957746478874e-05,
"loss": 1.1523,
"step": 9400
},
{
"epoch": 0.9884453781512605,
"grad_norm": 17.875,
"learning_rate": 1.498513302034429e-05,
"loss": 1.7688,
"step": 9410
},
{
"epoch": 0.9894957983193278,
"grad_norm": 12.6875,
"learning_rate": 1.4977308294209705e-05,
"loss": 1.3584,
"step": 9420
},
{
"epoch": 0.990546218487395,
"grad_norm": 5.0,
"learning_rate": 1.4969483568075118e-05,
"loss": 0.7708,
"step": 9430
},
{
"epoch": 0.9915966386554622,
"grad_norm": 16.375,
"learning_rate": 1.4961658841940534e-05,
"loss": 1.4436,
"step": 9440
},
{
"epoch": 0.9926470588235294,
"grad_norm": 4.5625,
"learning_rate": 1.4953834115805947e-05,
"loss": 1.2742,
"step": 9450
},
{
"epoch": 0.9936974789915967,
"grad_norm": 10.875,
"learning_rate": 1.4946009389671363e-05,
"loss": 1.735,
"step": 9460
},
{
"epoch": 0.9947478991596639,
"grad_norm": 16.875,
"learning_rate": 1.4938184663536776e-05,
"loss": 1.4445,
"step": 9470
},
{
"epoch": 0.9957983193277311,
"grad_norm": 17.625,
"learning_rate": 1.4930359937402192e-05,
"loss": 1.5068,
"step": 9480
},
{
"epoch": 0.9968487394957983,
"grad_norm": 21.375,
"learning_rate": 1.4922535211267605e-05,
"loss": 1.1406,
"step": 9490
},
{
"epoch": 0.9978991596638656,
"grad_norm": 8.5,
"learning_rate": 1.4914710485133022e-05,
"loss": 1.3998,
"step": 9500
},
{
"epoch": 0.9989495798319328,
"grad_norm": 4.6875,
"learning_rate": 1.4906885758998436e-05,
"loss": 1.3126,
"step": 9510
},
{
"epoch": 1.0,
"grad_norm": 14.25,
"learning_rate": 1.4899061032863851e-05,
"loss": 1.0987,
"step": 9520
},
{
"epoch": 1.0010504201680672,
"grad_norm": 35.25,
"learning_rate": 1.4891236306729266e-05,
"loss": 1.3145,
"step": 9530
},
{
"epoch": 1.0021008403361344,
"grad_norm": 28.375,
"learning_rate": 1.488341158059468e-05,
"loss": 1.5549,
"step": 9540
},
{
"epoch": 1.0031512605042017,
"grad_norm": 12.875,
"learning_rate": 1.4875586854460095e-05,
"loss": 1.0721,
"step": 9550
},
{
"epoch": 1.004201680672269,
"grad_norm": 27.25,
"learning_rate": 1.486776212832551e-05,
"loss": 0.9465,
"step": 9560
},
{
"epoch": 1.0052521008403361,
"grad_norm": 5.46875,
"learning_rate": 1.4859937402190924e-05,
"loss": 1.2033,
"step": 9570
},
{
"epoch": 1.0063025210084033,
"grad_norm": 16.125,
"learning_rate": 1.4852112676056339e-05,
"loss": 1.5769,
"step": 9580
},
{
"epoch": 1.0073529411764706,
"grad_norm": 17.0,
"learning_rate": 1.4844287949921754e-05,
"loss": 1.5204,
"step": 9590
},
{
"epoch": 1.0084033613445378,
"grad_norm": 16.625,
"learning_rate": 1.483646322378717e-05,
"loss": 1.3765,
"step": 9600
},
{
"epoch": 1.009453781512605,
"grad_norm": 19.25,
"learning_rate": 1.4828638497652583e-05,
"loss": 1.3035,
"step": 9610
},
{
"epoch": 1.0105042016806722,
"grad_norm": 2.046875,
"learning_rate": 1.4820813771518e-05,
"loss": 1.3045,
"step": 9620
},
{
"epoch": 1.0115546218487395,
"grad_norm": 15.4375,
"learning_rate": 1.4812989045383412e-05,
"loss": 1.4246,
"step": 9630
},
{
"epoch": 1.0126050420168067,
"grad_norm": 16.5,
"learning_rate": 1.4805164319248829e-05,
"loss": 1.1389,
"step": 9640
},
{
"epoch": 1.013655462184874,
"grad_norm": 12.5,
"learning_rate": 1.4797339593114242e-05,
"loss": 1.1888,
"step": 9650
},
{
"epoch": 1.0147058823529411,
"grad_norm": 23.0,
"learning_rate": 1.4789514866979658e-05,
"loss": 1.6063,
"step": 9660
},
{
"epoch": 1.0157563025210083,
"grad_norm": 12.5625,
"learning_rate": 1.4781690140845071e-05,
"loss": 1.4559,
"step": 9670
},
{
"epoch": 1.0168067226890756,
"grad_norm": 16.25,
"learning_rate": 1.4773865414710487e-05,
"loss": 1.1611,
"step": 9680
},
{
"epoch": 1.0178571428571428,
"grad_norm": 23.5,
"learning_rate": 1.47660406885759e-05,
"loss": 1.3645,
"step": 9690
},
{
"epoch": 1.01890756302521,
"grad_norm": 11.375,
"learning_rate": 1.4758215962441317e-05,
"loss": 1.2812,
"step": 9700
},
{
"epoch": 1.0199579831932772,
"grad_norm": 11.625,
"learning_rate": 1.4750391236306731e-05,
"loss": 1.5295,
"step": 9710
},
{
"epoch": 1.0210084033613445,
"grad_norm": 7.96875,
"learning_rate": 1.4742566510172144e-05,
"loss": 1.1815,
"step": 9720
},
{
"epoch": 1.0220588235294117,
"grad_norm": 12.5625,
"learning_rate": 1.473474178403756e-05,
"loss": 1.4964,
"step": 9730
},
{
"epoch": 1.023109243697479,
"grad_norm": 19.25,
"learning_rate": 1.4726917057902973e-05,
"loss": 1.3701,
"step": 9740
},
{
"epoch": 1.0241596638655461,
"grad_norm": 9.875,
"learning_rate": 1.471909233176839e-05,
"loss": 1.1485,
"step": 9750
},
{
"epoch": 1.0252100840336134,
"grad_norm": 18.5,
"learning_rate": 1.4711267605633803e-05,
"loss": 1.5789,
"step": 9760
},
{
"epoch": 1.0262605042016806,
"grad_norm": 12.0625,
"learning_rate": 1.4703442879499219e-05,
"loss": 1.6354,
"step": 9770
},
{
"epoch": 1.0273109243697478,
"grad_norm": 15.375,
"learning_rate": 1.4695618153364632e-05,
"loss": 1.3682,
"step": 9780
},
{
"epoch": 1.028361344537815,
"grad_norm": 4.28125,
"learning_rate": 1.4687793427230048e-05,
"loss": 0.7953,
"step": 9790
},
{
"epoch": 1.0294117647058822,
"grad_norm": 13.75,
"learning_rate": 1.4679968701095463e-05,
"loss": 0.9399,
"step": 9800
},
{
"epoch": 1.0304621848739495,
"grad_norm": 12.6875,
"learning_rate": 1.4672143974960878e-05,
"loss": 1.0148,
"step": 9810
},
{
"epoch": 1.0315126050420167,
"grad_norm": 5.25,
"learning_rate": 1.4664319248826292e-05,
"loss": 1.4619,
"step": 9820
},
{
"epoch": 1.0325630252100841,
"grad_norm": 18.625,
"learning_rate": 1.4656494522691707e-05,
"loss": 1.3168,
"step": 9830
},
{
"epoch": 1.0336134453781514,
"grad_norm": 5.25,
"learning_rate": 1.4648669796557122e-05,
"loss": 1.1152,
"step": 9840
},
{
"epoch": 1.0346638655462186,
"grad_norm": 12.0,
"learning_rate": 1.4640845070422536e-05,
"loss": 1.0424,
"step": 9850
},
{
"epoch": 1.0357142857142858,
"grad_norm": 14.9375,
"learning_rate": 1.4633020344287951e-05,
"loss": 1.2628,
"step": 9860
},
{
"epoch": 1.036764705882353,
"grad_norm": 14.0,
"learning_rate": 1.4625195618153366e-05,
"loss": 1.5196,
"step": 9870
},
{
"epoch": 1.0378151260504203,
"grad_norm": 12.5,
"learning_rate": 1.461737089201878e-05,
"loss": 1.02,
"step": 9880
},
{
"epoch": 1.0388655462184875,
"grad_norm": 12.5625,
"learning_rate": 1.4609546165884195e-05,
"loss": 1.7924,
"step": 9890
},
{
"epoch": 1.0399159663865547,
"grad_norm": 18.875,
"learning_rate": 1.460172143974961e-05,
"loss": 1.303,
"step": 9900
},
{
"epoch": 1.040966386554622,
"grad_norm": 18.125,
"learning_rate": 1.4593896713615026e-05,
"loss": 1.0753,
"step": 9910
},
{
"epoch": 1.0420168067226891,
"grad_norm": 12.5,
"learning_rate": 1.4586071987480439e-05,
"loss": 1.2265,
"step": 9920
},
{
"epoch": 1.0430672268907564,
"grad_norm": 12.1875,
"learning_rate": 1.4578247261345855e-05,
"loss": 1.6092,
"step": 9930
},
{
"epoch": 1.0441176470588236,
"grad_norm": 21.5,
"learning_rate": 1.4570422535211268e-05,
"loss": 1.2881,
"step": 9940
},
{
"epoch": 1.0451680672268908,
"grad_norm": 18.375,
"learning_rate": 1.4562597809076685e-05,
"loss": 1.4533,
"step": 9950
},
{
"epoch": 1.046218487394958,
"grad_norm": 12.1875,
"learning_rate": 1.4554773082942098e-05,
"loss": 1.0597,
"step": 9960
},
{
"epoch": 1.0472689075630253,
"grad_norm": 4.03125,
"learning_rate": 1.4546948356807514e-05,
"loss": 1.3047,
"step": 9970
},
{
"epoch": 1.0483193277310925,
"grad_norm": 13.5,
"learning_rate": 1.4539123630672927e-05,
"loss": 1.4317,
"step": 9980
},
{
"epoch": 1.0493697478991597,
"grad_norm": 21.625,
"learning_rate": 1.4531298904538343e-05,
"loss": 1.5113,
"step": 9990
},
{
"epoch": 1.050420168067227,
"grad_norm": 14.6875,
"learning_rate": 1.4523474178403758e-05,
"loss": 1.5173,
"step": 10000
},
{
"epoch": 1.0514705882352942,
"grad_norm": 15.8125,
"learning_rate": 1.451564945226917e-05,
"loss": 1.4162,
"step": 10010
},
{
"epoch": 1.0525210084033614,
"grad_norm": 17.75,
"learning_rate": 1.4507824726134587e-05,
"loss": 1.7786,
"step": 10020
},
{
"epoch": 1.0535714285714286,
"grad_norm": 354.0,
"learning_rate": 1.45e-05,
"loss": 1.0287,
"step": 10030
},
{
"epoch": 1.0546218487394958,
"grad_norm": 12.0625,
"learning_rate": 1.4492175273865416e-05,
"loss": 1.2295,
"step": 10040
},
{
"epoch": 1.055672268907563,
"grad_norm": 11.5625,
"learning_rate": 1.448435054773083e-05,
"loss": 1.391,
"step": 10050
},
{
"epoch": 1.0567226890756303,
"grad_norm": 15.125,
"learning_rate": 1.4476525821596246e-05,
"loss": 1.3713,
"step": 10060
},
{
"epoch": 1.0577731092436975,
"grad_norm": 16.75,
"learning_rate": 1.4468701095461659e-05,
"loss": 1.4097,
"step": 10070
},
{
"epoch": 1.0588235294117647,
"grad_norm": 11.375,
"learning_rate": 1.4460876369327075e-05,
"loss": 1.2367,
"step": 10080
},
{
"epoch": 1.059873949579832,
"grad_norm": 13.875,
"learning_rate": 1.4453051643192491e-05,
"loss": 1.8857,
"step": 10090
},
{
"epoch": 1.0609243697478992,
"grad_norm": 3.96875,
"learning_rate": 1.4445226917057904e-05,
"loss": 1.5483,
"step": 10100
},
{
"epoch": 1.0619747899159664,
"grad_norm": 5.71875,
"learning_rate": 1.4437402190923319e-05,
"loss": 1.0275,
"step": 10110
},
{
"epoch": 1.0630252100840336,
"grad_norm": 16.5,
"learning_rate": 1.4429577464788734e-05,
"loss": 1.4544,
"step": 10120
},
{
"epoch": 1.0640756302521008,
"grad_norm": 13.3125,
"learning_rate": 1.4421752738654148e-05,
"loss": 1.4458,
"step": 10130
},
{
"epoch": 1.065126050420168,
"grad_norm": 16.25,
"learning_rate": 1.4413928012519563e-05,
"loss": 0.9895,
"step": 10140
},
{
"epoch": 1.0661764705882353,
"grad_norm": 16.625,
"learning_rate": 1.4406103286384978e-05,
"loss": 1.7776,
"step": 10150
},
{
"epoch": 1.0672268907563025,
"grad_norm": 24.0,
"learning_rate": 1.4398278560250392e-05,
"loss": 1.0594,
"step": 10160
},
{
"epoch": 1.0682773109243697,
"grad_norm": 12.25,
"learning_rate": 1.4390453834115807e-05,
"loss": 1.393,
"step": 10170
},
{
"epoch": 1.069327731092437,
"grad_norm": 12.875,
"learning_rate": 1.4382629107981222e-05,
"loss": 1.5787,
"step": 10180
},
{
"epoch": 1.0703781512605042,
"grad_norm": 13.625,
"learning_rate": 1.4374804381846636e-05,
"loss": 1.2195,
"step": 10190
},
{
"epoch": 1.0714285714285714,
"grad_norm": 6.34375,
"learning_rate": 1.4366979655712053e-05,
"loss": 1.3009,
"step": 10200
},
{
"epoch": 1.0724789915966386,
"grad_norm": 19.875,
"learning_rate": 1.4359154929577466e-05,
"loss": 1.6442,
"step": 10210
},
{
"epoch": 1.0735294117647058,
"grad_norm": 9.6875,
"learning_rate": 1.4351330203442882e-05,
"loss": 1.4135,
"step": 10220
},
{
"epoch": 1.074579831932773,
"grad_norm": 5.125,
"learning_rate": 1.4343505477308295e-05,
"loss": 1.3497,
"step": 10230
},
{
"epoch": 1.0756302521008403,
"grad_norm": 11.6875,
"learning_rate": 1.4335680751173711e-05,
"loss": 1.3815,
"step": 10240
},
{
"epoch": 1.0766806722689075,
"grad_norm": 17.875,
"learning_rate": 1.4327856025039124e-05,
"loss": 1.3823,
"step": 10250
},
{
"epoch": 1.0777310924369747,
"grad_norm": 19.0,
"learning_rate": 1.432003129890454e-05,
"loss": 1.7767,
"step": 10260
},
{
"epoch": 1.078781512605042,
"grad_norm": 20.75,
"learning_rate": 1.4312206572769953e-05,
"loss": 1.2912,
"step": 10270
},
{
"epoch": 1.0798319327731092,
"grad_norm": 12.625,
"learning_rate": 1.430438184663537e-05,
"loss": 1.0742,
"step": 10280
},
{
"epoch": 1.0808823529411764,
"grad_norm": 19.625,
"learning_rate": 1.4296557120500784e-05,
"loss": 0.905,
"step": 10290
},
{
"epoch": 1.0819327731092436,
"grad_norm": 4.78125,
"learning_rate": 1.4288732394366197e-05,
"loss": 0.8441,
"step": 10300
},
{
"epoch": 1.0829831932773109,
"grad_norm": 16.375,
"learning_rate": 1.4280907668231614e-05,
"loss": 1.4784,
"step": 10310
},
{
"epoch": 1.084033613445378,
"grad_norm": 9.625,
"learning_rate": 1.4273082942097027e-05,
"loss": 1.3086,
"step": 10320
},
{
"epoch": 1.0850840336134453,
"grad_norm": 12.0,
"learning_rate": 1.4265258215962443e-05,
"loss": 1.2392,
"step": 10330
},
{
"epoch": 1.0861344537815125,
"grad_norm": 24.75,
"learning_rate": 1.4257433489827856e-05,
"loss": 1.7351,
"step": 10340
},
{
"epoch": 1.0871848739495797,
"grad_norm": 16.25,
"learning_rate": 1.4249608763693272e-05,
"loss": 1.3572,
"step": 10350
},
{
"epoch": 1.088235294117647,
"grad_norm": 23.625,
"learning_rate": 1.4241784037558685e-05,
"loss": 1.2551,
"step": 10360
},
{
"epoch": 1.0892857142857142,
"grad_norm": 10.3125,
"learning_rate": 1.4233959311424102e-05,
"loss": 1.2559,
"step": 10370
},
{
"epoch": 1.0903361344537814,
"grad_norm": 13.3125,
"learning_rate": 1.4226134585289515e-05,
"loss": 0.9075,
"step": 10380
},
{
"epoch": 1.0913865546218486,
"grad_norm": 18.75,
"learning_rate": 1.4218309859154931e-05,
"loss": 1.1107,
"step": 10390
},
{
"epoch": 1.092436974789916,
"grad_norm": 11.75,
"learning_rate": 1.4210485133020346e-05,
"loss": 1.5606,
"step": 10400
},
{
"epoch": 1.0934873949579833,
"grad_norm": 4.40625,
"learning_rate": 1.420266040688576e-05,
"loss": 1.2448,
"step": 10410
},
{
"epoch": 1.0945378151260505,
"grad_norm": 24.375,
"learning_rate": 1.4194835680751175e-05,
"loss": 1.3813,
"step": 10420
},
{
"epoch": 1.0955882352941178,
"grad_norm": 11.1875,
"learning_rate": 1.418701095461659e-05,
"loss": 1.2514,
"step": 10430
},
{
"epoch": 1.096638655462185,
"grad_norm": 12.5,
"learning_rate": 1.4179186228482004e-05,
"loss": 1.2028,
"step": 10440
},
{
"epoch": 1.0976890756302522,
"grad_norm": 17.5,
"learning_rate": 1.4171361502347419e-05,
"loss": 1.5367,
"step": 10450
},
{
"epoch": 1.0987394957983194,
"grad_norm": 15.5625,
"learning_rate": 1.4163536776212834e-05,
"loss": 1.3679,
"step": 10460
},
{
"epoch": 1.0997899159663866,
"grad_norm": 17.625,
"learning_rate": 1.4155712050078248e-05,
"loss": 1.7243,
"step": 10470
},
{
"epoch": 1.1008403361344539,
"grad_norm": 11.9375,
"learning_rate": 1.4147887323943663e-05,
"loss": 1.1804,
"step": 10480
},
{
"epoch": 1.101890756302521,
"grad_norm": 3.125,
"learning_rate": 1.414006259780908e-05,
"loss": 1.5059,
"step": 10490
},
{
"epoch": 1.1029411764705883,
"grad_norm": 9.125,
"learning_rate": 1.4132237871674492e-05,
"loss": 1.171,
"step": 10500
},
{
"epoch": 1.1039915966386555,
"grad_norm": 12.0625,
"learning_rate": 1.4124413145539909e-05,
"loss": 1.5,
"step": 10510
},
{
"epoch": 1.1050420168067228,
"grad_norm": 11.9375,
"learning_rate": 1.4116588419405321e-05,
"loss": 1.8615,
"step": 10520
},
{
"epoch": 1.10609243697479,
"grad_norm": 11.5625,
"learning_rate": 1.4108763693270738e-05,
"loss": 1.547,
"step": 10530
},
{
"epoch": 1.1071428571428572,
"grad_norm": 18.625,
"learning_rate": 1.410093896713615e-05,
"loss": 1.4725,
"step": 10540
},
{
"epoch": 1.1081932773109244,
"grad_norm": 35.25,
"learning_rate": 1.4093114241001567e-05,
"loss": 1.4691,
"step": 10550
},
{
"epoch": 1.1092436974789917,
"grad_norm": 14.5625,
"learning_rate": 1.408528951486698e-05,
"loss": 1.2334,
"step": 10560
},
{
"epoch": 1.1102941176470589,
"grad_norm": 12.9375,
"learning_rate": 1.4077464788732396e-05,
"loss": 1.5374,
"step": 10570
},
{
"epoch": 1.111344537815126,
"grad_norm": 13.75,
"learning_rate": 1.406964006259781e-05,
"loss": 1.4166,
"step": 10580
},
{
"epoch": 1.1123949579831933,
"grad_norm": 12.1875,
"learning_rate": 1.4061815336463224e-05,
"loss": 1.1928,
"step": 10590
},
{
"epoch": 1.1134453781512605,
"grad_norm": 13.8125,
"learning_rate": 1.405399061032864e-05,
"loss": 1.2728,
"step": 10600
},
{
"epoch": 1.1144957983193278,
"grad_norm": 17.25,
"learning_rate": 1.4046165884194053e-05,
"loss": 1.3545,
"step": 10610
},
{
"epoch": 1.115546218487395,
"grad_norm": 20.0,
"learning_rate": 1.403834115805947e-05,
"loss": 1.6394,
"step": 10620
},
{
"epoch": 1.1165966386554622,
"grad_norm": 11.8125,
"learning_rate": 1.4030516431924883e-05,
"loss": 1.6101,
"step": 10630
},
{
"epoch": 1.1176470588235294,
"grad_norm": 10.5625,
"learning_rate": 1.4022691705790299e-05,
"loss": 0.965,
"step": 10640
},
{
"epoch": 1.1186974789915967,
"grad_norm": 13.375,
"learning_rate": 1.4014866979655712e-05,
"loss": 1.4676,
"step": 10650
},
{
"epoch": 1.1197478991596639,
"grad_norm": 11.5625,
"learning_rate": 1.4007042253521128e-05,
"loss": 1.4875,
"step": 10660
},
{
"epoch": 1.120798319327731,
"grad_norm": 18.875,
"learning_rate": 1.3999217527386541e-05,
"loss": 1.5261,
"step": 10670
},
{
"epoch": 1.1218487394957983,
"grad_norm": 9.5,
"learning_rate": 1.3991392801251958e-05,
"loss": 1.3436,
"step": 10680
},
{
"epoch": 1.1228991596638656,
"grad_norm": 18.375,
"learning_rate": 1.3983568075117372e-05,
"loss": 1.3652,
"step": 10690
},
{
"epoch": 1.1239495798319328,
"grad_norm": 15.8125,
"learning_rate": 1.3975743348982787e-05,
"loss": 1.5565,
"step": 10700
},
{
"epoch": 1.125,
"grad_norm": 20.375,
"learning_rate": 1.3967918622848202e-05,
"loss": 1.3983,
"step": 10710
},
{
"epoch": 1.1260504201680672,
"grad_norm": 20.625,
"learning_rate": 1.3960093896713616e-05,
"loss": 1.2691,
"step": 10720
},
{
"epoch": 1.1271008403361344,
"grad_norm": 7.59375,
"learning_rate": 1.3952269170579031e-05,
"loss": 1.1509,
"step": 10730
},
{
"epoch": 1.1281512605042017,
"grad_norm": 12.875,
"learning_rate": 1.3944444444444446e-05,
"loss": 0.952,
"step": 10740
},
{
"epoch": 1.129201680672269,
"grad_norm": 14.375,
"learning_rate": 1.393661971830986e-05,
"loss": 1.3555,
"step": 10750
},
{
"epoch": 1.1302521008403361,
"grad_norm": 12.0625,
"learning_rate": 1.3928794992175275e-05,
"loss": 1.2439,
"step": 10760
},
{
"epoch": 1.1313025210084033,
"grad_norm": 11.875,
"learning_rate": 1.392097026604069e-05,
"loss": 1.1128,
"step": 10770
},
{
"epoch": 1.1323529411764706,
"grad_norm": 15.5625,
"learning_rate": 1.3913145539906106e-05,
"loss": 1.4089,
"step": 10780
},
{
"epoch": 1.1334033613445378,
"grad_norm": 16.0,
"learning_rate": 1.3905320813771519e-05,
"loss": 1.3108,
"step": 10790
},
{
"epoch": 1.134453781512605,
"grad_norm": 12.625,
"learning_rate": 1.3897496087636935e-05,
"loss": 0.923,
"step": 10800
},
{
"epoch": 1.1355042016806722,
"grad_norm": 5.6875,
"learning_rate": 1.3889671361502348e-05,
"loss": 0.8651,
"step": 10810
},
{
"epoch": 1.1365546218487395,
"grad_norm": 7.46875,
"learning_rate": 1.3881846635367764e-05,
"loss": 0.9996,
"step": 10820
},
{
"epoch": 1.1376050420168067,
"grad_norm": 15.25,
"learning_rate": 1.3874021909233177e-05,
"loss": 1.188,
"step": 10830
},
{
"epoch": 1.138655462184874,
"grad_norm": 11.8125,
"learning_rate": 1.3866197183098594e-05,
"loss": 1.2221,
"step": 10840
},
{
"epoch": 1.1397058823529411,
"grad_norm": 18.75,
"learning_rate": 1.3858372456964007e-05,
"loss": 1.3794,
"step": 10850
},
{
"epoch": 1.1407563025210083,
"grad_norm": 16.875,
"learning_rate": 1.3850547730829423e-05,
"loss": 1.4104,
"step": 10860
},
{
"epoch": 1.1418067226890756,
"grad_norm": 27.25,
"learning_rate": 1.3842723004694836e-05,
"loss": 1.3409,
"step": 10870
},
{
"epoch": 1.1428571428571428,
"grad_norm": 23.875,
"learning_rate": 1.383489827856025e-05,
"loss": 1.4767,
"step": 10880
},
{
"epoch": 1.14390756302521,
"grad_norm": 5.46875,
"learning_rate": 1.3827073552425667e-05,
"loss": 0.9714,
"step": 10890
},
{
"epoch": 1.1449579831932772,
"grad_norm": 5.59375,
"learning_rate": 1.381924882629108e-05,
"loss": 1.3208,
"step": 10900
},
{
"epoch": 1.1460084033613445,
"grad_norm": 15.5,
"learning_rate": 1.3811424100156496e-05,
"loss": 1.2431,
"step": 10910
},
{
"epoch": 1.1470588235294117,
"grad_norm": 9.0625,
"learning_rate": 1.380359937402191e-05,
"loss": 1.254,
"step": 10920
},
{
"epoch": 1.148109243697479,
"grad_norm": 16.75,
"learning_rate": 1.3795774647887326e-05,
"loss": 1.6899,
"step": 10930
},
{
"epoch": 1.1491596638655461,
"grad_norm": 15.1875,
"learning_rate": 1.3787949921752739e-05,
"loss": 1.1452,
"step": 10940
},
{
"epoch": 1.1502100840336134,
"grad_norm": 17.75,
"learning_rate": 1.3780125195618155e-05,
"loss": 1.8214,
"step": 10950
},
{
"epoch": 1.1512605042016806,
"grad_norm": 4.71875,
"learning_rate": 1.3772300469483568e-05,
"loss": 0.724,
"step": 10960
},
{
"epoch": 1.1523109243697478,
"grad_norm": 23.375,
"learning_rate": 1.3764475743348984e-05,
"loss": 1.2687,
"step": 10970
},
{
"epoch": 1.153361344537815,
"grad_norm": 13.0,
"learning_rate": 1.3756651017214399e-05,
"loss": 1.2736,
"step": 10980
},
{
"epoch": 1.1544117647058822,
"grad_norm": 4.40625,
"learning_rate": 1.3748826291079814e-05,
"loss": 1.5121,
"step": 10990
},
{
"epoch": 1.1554621848739495,
"grad_norm": 16.375,
"learning_rate": 1.3741001564945228e-05,
"loss": 1.3454,
"step": 11000
},
{
"epoch": 1.1565126050420167,
"grad_norm": 17.0,
"learning_rate": 1.3733176838810643e-05,
"loss": 1.5489,
"step": 11010
},
{
"epoch": 1.157563025210084,
"grad_norm": 15.9375,
"learning_rate": 1.3725352112676057e-05,
"loss": 1.0976,
"step": 11020
},
{
"epoch": 1.1586134453781511,
"grad_norm": 34.25,
"learning_rate": 1.3717527386541472e-05,
"loss": 1.1138,
"step": 11030
},
{
"epoch": 1.1596638655462184,
"grad_norm": 16.75,
"learning_rate": 1.3709702660406887e-05,
"loss": 1.4818,
"step": 11040
},
{
"epoch": 1.1607142857142858,
"grad_norm": 16.25,
"learning_rate": 1.3701877934272301e-05,
"loss": 1.3685,
"step": 11050
},
{
"epoch": 1.161764705882353,
"grad_norm": 9.25,
"learning_rate": 1.3694053208137716e-05,
"loss": 1.2225,
"step": 11060
},
{
"epoch": 1.1628151260504203,
"grad_norm": 12.0,
"learning_rate": 1.368622848200313e-05,
"loss": 1.2856,
"step": 11070
},
{
"epoch": 1.1638655462184875,
"grad_norm": 7.28125,
"learning_rate": 1.3678403755868545e-05,
"loss": 1.1306,
"step": 11080
},
{
"epoch": 1.1649159663865547,
"grad_norm": 9.3125,
"learning_rate": 1.3670579029733962e-05,
"loss": 1.333,
"step": 11090
},
{
"epoch": 1.165966386554622,
"grad_norm": 9.75,
"learning_rate": 1.3662754303599375e-05,
"loss": 1.265,
"step": 11100
},
{
"epoch": 1.1670168067226891,
"grad_norm": 19.625,
"learning_rate": 1.3654929577464791e-05,
"loss": 1.5931,
"step": 11110
},
{
"epoch": 1.1680672268907564,
"grad_norm": 4.3125,
"learning_rate": 1.3647104851330204e-05,
"loss": 1.6028,
"step": 11120
},
{
"epoch": 1.1691176470588236,
"grad_norm": 13.1875,
"learning_rate": 1.363928012519562e-05,
"loss": 1.2405,
"step": 11130
},
{
"epoch": 1.1701680672268908,
"grad_norm": 12.8125,
"learning_rate": 1.3631455399061033e-05,
"loss": 1.6825,
"step": 11140
},
{
"epoch": 1.171218487394958,
"grad_norm": 18.125,
"learning_rate": 1.362363067292645e-05,
"loss": 1.2953,
"step": 11150
},
{
"epoch": 1.1722689075630253,
"grad_norm": 14.625,
"learning_rate": 1.3615805946791863e-05,
"loss": 1.254,
"step": 11160
},
{
"epoch": 1.1733193277310925,
"grad_norm": 12.6875,
"learning_rate": 1.3607981220657277e-05,
"loss": 0.9121,
"step": 11170
},
{
"epoch": 1.1743697478991597,
"grad_norm": 15.875,
"learning_rate": 1.3600156494522694e-05,
"loss": 1.2803,
"step": 11180
},
{
"epoch": 1.175420168067227,
"grad_norm": 20.0,
"learning_rate": 1.3592331768388107e-05,
"loss": 1.0452,
"step": 11190
},
{
"epoch": 1.1764705882352942,
"grad_norm": 17.875,
"learning_rate": 1.3584507042253523e-05,
"loss": 1.6264,
"step": 11200
},
{
"epoch": 1.1775210084033614,
"grad_norm": 36.25,
"learning_rate": 1.3576682316118936e-05,
"loss": 1.2355,
"step": 11210
},
{
"epoch": 1.1785714285714286,
"grad_norm": 11.375,
"learning_rate": 1.3568857589984352e-05,
"loss": 1.7128,
"step": 11220
},
{
"epoch": 1.1796218487394958,
"grad_norm": 9.875,
"learning_rate": 1.3561032863849765e-05,
"loss": 1.0711,
"step": 11230
},
{
"epoch": 1.180672268907563,
"grad_norm": 12.9375,
"learning_rate": 1.3553208137715182e-05,
"loss": 1.1493,
"step": 11240
},
{
"epoch": 1.1817226890756303,
"grad_norm": 14.6875,
"learning_rate": 1.3545383411580595e-05,
"loss": 1.3251,
"step": 11250
},
{
"epoch": 1.1827731092436975,
"grad_norm": 21.625,
"learning_rate": 1.3537558685446011e-05,
"loss": 1.8988,
"step": 11260
},
{
"epoch": 1.1838235294117647,
"grad_norm": 11.6875,
"learning_rate": 1.3529733959311426e-05,
"loss": 1.6502,
"step": 11270
},
{
"epoch": 1.184873949579832,
"grad_norm": 14.4375,
"learning_rate": 1.352190923317684e-05,
"loss": 1.2318,
"step": 11280
},
{
"epoch": 1.1859243697478992,
"grad_norm": 12.125,
"learning_rate": 1.3514084507042255e-05,
"loss": 1.6075,
"step": 11290
},
{
"epoch": 1.1869747899159664,
"grad_norm": 14.5,
"learning_rate": 1.350625978090767e-05,
"loss": 1.43,
"step": 11300
},
{
"epoch": 1.1880252100840336,
"grad_norm": 20.875,
"learning_rate": 1.3498435054773084e-05,
"loss": 1.5842,
"step": 11310
},
{
"epoch": 1.1890756302521008,
"grad_norm": 14.375,
"learning_rate": 1.3490610328638499e-05,
"loss": 1.573,
"step": 11320
},
{
"epoch": 1.190126050420168,
"grad_norm": 11.25,
"learning_rate": 1.3482785602503913e-05,
"loss": 1.5654,
"step": 11330
},
{
"epoch": 1.1911764705882353,
"grad_norm": 18.125,
"learning_rate": 1.3474960876369328e-05,
"loss": 2.1257,
"step": 11340
},
{
"epoch": 1.1922268907563025,
"grad_norm": 16.875,
"learning_rate": 1.3467136150234743e-05,
"loss": 1.4576,
"step": 11350
},
{
"epoch": 1.1932773109243697,
"grad_norm": 8.0625,
"learning_rate": 1.3459311424100157e-05,
"loss": 1.3279,
"step": 11360
},
{
"epoch": 1.194327731092437,
"grad_norm": 189.0,
"learning_rate": 1.3451486697965572e-05,
"loss": 1.4203,
"step": 11370
},
{
"epoch": 1.1953781512605042,
"grad_norm": 21.375,
"learning_rate": 1.3443661971830988e-05,
"loss": 1.4392,
"step": 11380
},
{
"epoch": 1.1964285714285714,
"grad_norm": 12.9375,
"learning_rate": 1.3435837245696401e-05,
"loss": 1.6435,
"step": 11390
},
{
"epoch": 1.1974789915966386,
"grad_norm": 22.125,
"learning_rate": 1.3428012519561818e-05,
"loss": 1.1396,
"step": 11400
},
{
"epoch": 1.1985294117647058,
"grad_norm": 13.125,
"learning_rate": 1.342018779342723e-05,
"loss": 0.9979,
"step": 11410
},
{
"epoch": 1.199579831932773,
"grad_norm": 14.9375,
"learning_rate": 1.3412363067292647e-05,
"loss": 1.2108,
"step": 11420
},
{
"epoch": 1.2006302521008403,
"grad_norm": 19.625,
"learning_rate": 1.340453834115806e-05,
"loss": 0.9337,
"step": 11430
},
{
"epoch": 1.2016806722689075,
"grad_norm": 14.25,
"learning_rate": 1.3396713615023476e-05,
"loss": 1.4902,
"step": 11440
},
{
"epoch": 1.2027310924369747,
"grad_norm": 16.875,
"learning_rate": 1.338888888888889e-05,
"loss": 1.4991,
"step": 11450
},
{
"epoch": 1.203781512605042,
"grad_norm": 18.5,
"learning_rate": 1.3381064162754304e-05,
"loss": 1.2124,
"step": 11460
},
{
"epoch": 1.2048319327731092,
"grad_norm": 15.5625,
"learning_rate": 1.337323943661972e-05,
"loss": 1.4139,
"step": 11470
},
{
"epoch": 1.2058823529411764,
"grad_norm": 14.0,
"learning_rate": 1.3365414710485133e-05,
"loss": 1.3933,
"step": 11480
},
{
"epoch": 1.2069327731092436,
"grad_norm": 4.9375,
"learning_rate": 1.335758998435055e-05,
"loss": 1.5434,
"step": 11490
},
{
"epoch": 1.2079831932773109,
"grad_norm": 16.25,
"learning_rate": 1.3349765258215963e-05,
"loss": 1.5367,
"step": 11500
},
{
"epoch": 1.209033613445378,
"grad_norm": 9.25,
"learning_rate": 1.3341940532081379e-05,
"loss": 1.3468,
"step": 11510
},
{
"epoch": 1.2100840336134453,
"grad_norm": 15.875,
"learning_rate": 1.3334115805946792e-05,
"loss": 1.4425,
"step": 11520
},
{
"epoch": 1.2111344537815125,
"grad_norm": 18.625,
"learning_rate": 1.3326291079812208e-05,
"loss": 1.7911,
"step": 11530
},
{
"epoch": 1.2121848739495797,
"grad_norm": 13.3125,
"learning_rate": 1.3318466353677621e-05,
"loss": 1.1501,
"step": 11540
},
{
"epoch": 1.213235294117647,
"grad_norm": 13.1875,
"learning_rate": 1.3310641627543037e-05,
"loss": 1.2397,
"step": 11550
},
{
"epoch": 1.2142857142857142,
"grad_norm": 14.25,
"learning_rate": 1.330281690140845e-05,
"loss": 1.5209,
"step": 11560
},
{
"epoch": 1.2153361344537816,
"grad_norm": 3.5,
"learning_rate": 1.3294992175273867e-05,
"loss": 1.6226,
"step": 11570
},
{
"epoch": 1.2163865546218489,
"grad_norm": 17.25,
"learning_rate": 1.3287167449139281e-05,
"loss": 1.2548,
"step": 11580
},
{
"epoch": 1.217436974789916,
"grad_norm": 15.9375,
"learning_rate": 1.3279342723004696e-05,
"loss": 1.5391,
"step": 11590
},
{
"epoch": 1.2184873949579833,
"grad_norm": 17.25,
"learning_rate": 1.327151799687011e-05,
"loss": 1.0214,
"step": 11600
},
{
"epoch": 1.2195378151260505,
"grad_norm": 17.625,
"learning_rate": 1.3263693270735525e-05,
"loss": 1.2153,
"step": 11610
},
{
"epoch": 1.2205882352941178,
"grad_norm": 9.1875,
"learning_rate": 1.325586854460094e-05,
"loss": 0.8848,
"step": 11620
},
{
"epoch": 1.221638655462185,
"grad_norm": 11.875,
"learning_rate": 1.3248043818466355e-05,
"loss": 1.4852,
"step": 11630
},
{
"epoch": 1.2226890756302522,
"grad_norm": 5.1875,
"learning_rate": 1.324021909233177e-05,
"loss": 1.2781,
"step": 11640
},
{
"epoch": 1.2237394957983194,
"grad_norm": 13.5,
"learning_rate": 1.3232394366197184e-05,
"loss": 1.4085,
"step": 11650
},
{
"epoch": 1.2247899159663866,
"grad_norm": 12.875,
"learning_rate": 1.3224569640062599e-05,
"loss": 0.9212,
"step": 11660
},
{
"epoch": 1.2258403361344539,
"grad_norm": 9.8125,
"learning_rate": 1.3216744913928015e-05,
"loss": 1.5026,
"step": 11670
},
{
"epoch": 1.226890756302521,
"grad_norm": 15.1875,
"learning_rate": 1.3208920187793428e-05,
"loss": 1.153,
"step": 11680
},
{
"epoch": 1.2279411764705883,
"grad_norm": 5.78125,
"learning_rate": 1.3201095461658844e-05,
"loss": 1.5746,
"step": 11690
},
{
"epoch": 1.2289915966386555,
"grad_norm": 5.5625,
"learning_rate": 1.3193270735524257e-05,
"loss": 1.3192,
"step": 11700
},
{
"epoch": 1.2300420168067228,
"grad_norm": 12.5,
"learning_rate": 1.3185446009389674e-05,
"loss": 1.6797,
"step": 11710
},
{
"epoch": 1.23109243697479,
"grad_norm": 5.21875,
"learning_rate": 1.3177621283255087e-05,
"loss": 1.0005,
"step": 11720
},
{
"epoch": 1.2321428571428572,
"grad_norm": 11.25,
"learning_rate": 1.3169796557120503e-05,
"loss": 1.5104,
"step": 11730
},
{
"epoch": 1.2331932773109244,
"grad_norm": 8.8125,
"learning_rate": 1.3161971830985916e-05,
"loss": 1.4712,
"step": 11740
},
{
"epoch": 1.2342436974789917,
"grad_norm": 6.84375,
"learning_rate": 1.3154147104851332e-05,
"loss": 1.3192,
"step": 11750
},
{
"epoch": 1.2352941176470589,
"grad_norm": 11.875,
"learning_rate": 1.3146322378716745e-05,
"loss": 1.0407,
"step": 11760
},
{
"epoch": 1.236344537815126,
"grad_norm": 5.46875,
"learning_rate": 1.313849765258216e-05,
"loss": 1.7191,
"step": 11770
},
{
"epoch": 1.2373949579831933,
"grad_norm": 17.375,
"learning_rate": 1.3130672926447576e-05,
"loss": 1.6019,
"step": 11780
},
{
"epoch": 1.2384453781512605,
"grad_norm": 9.75,
"learning_rate": 1.3122848200312989e-05,
"loss": 1.4871,
"step": 11790
},
{
"epoch": 1.2394957983193278,
"grad_norm": 7.8125,
"learning_rate": 1.3115023474178405e-05,
"loss": 1.3843,
"step": 11800
},
{
"epoch": 1.240546218487395,
"grad_norm": 7.28125,
"learning_rate": 1.3107198748043818e-05,
"loss": 1.1443,
"step": 11810
},
{
"epoch": 1.2415966386554622,
"grad_norm": 13.75,
"learning_rate": 1.3099374021909235e-05,
"loss": 1.1077,
"step": 11820
},
{
"epoch": 1.2426470588235294,
"grad_norm": 17.125,
"learning_rate": 1.3091549295774648e-05,
"loss": 1.1865,
"step": 11830
},
{
"epoch": 1.2436974789915967,
"grad_norm": 12.1875,
"learning_rate": 1.3083724569640064e-05,
"loss": 1.5642,
"step": 11840
},
{
"epoch": 1.2447478991596639,
"grad_norm": 9.6875,
"learning_rate": 1.3075899843505477e-05,
"loss": 1.1188,
"step": 11850
},
{
"epoch": 1.245798319327731,
"grad_norm": 16.125,
"learning_rate": 1.3068075117370893e-05,
"loss": 2.0191,
"step": 11860
},
{
"epoch": 1.2468487394957983,
"grad_norm": 12.25,
"learning_rate": 1.3060250391236308e-05,
"loss": 1.4672,
"step": 11870
},
{
"epoch": 1.2478991596638656,
"grad_norm": 5.28125,
"learning_rate": 1.3052425665101723e-05,
"loss": 1.1893,
"step": 11880
},
{
"epoch": 1.2489495798319328,
"grad_norm": 9.0625,
"learning_rate": 1.3044600938967137e-05,
"loss": 1.175,
"step": 11890
},
{
"epoch": 1.25,
"grad_norm": 13.125,
"learning_rate": 1.3036776212832552e-05,
"loss": 1.8234,
"step": 11900
},
{
"epoch": 1.2510504201680672,
"grad_norm": 20.875,
"learning_rate": 1.3028951486697967e-05,
"loss": 1.6298,
"step": 11910
},
{
"epoch": 1.2521008403361344,
"grad_norm": 14.0625,
"learning_rate": 1.3021126760563381e-05,
"loss": 1.3639,
"step": 11920
},
{
"epoch": 1.2531512605042017,
"grad_norm": 24.375,
"learning_rate": 1.3013302034428796e-05,
"loss": 1.449,
"step": 11930
},
{
"epoch": 1.254201680672269,
"grad_norm": 11.3125,
"learning_rate": 1.300547730829421e-05,
"loss": 1.1688,
"step": 11940
},
{
"epoch": 1.2552521008403361,
"grad_norm": 13.25,
"learning_rate": 1.2997652582159625e-05,
"loss": 1.6437,
"step": 11950
},
{
"epoch": 1.2563025210084033,
"grad_norm": 13.4375,
"learning_rate": 1.2989827856025042e-05,
"loss": 1.0691,
"step": 11960
},
{
"epoch": 1.2573529411764706,
"grad_norm": 24.125,
"learning_rate": 1.2982003129890455e-05,
"loss": 1.7317,
"step": 11970
},
{
"epoch": 1.2584033613445378,
"grad_norm": 19.125,
"learning_rate": 1.2974178403755871e-05,
"loss": 1.1363,
"step": 11980
},
{
"epoch": 1.259453781512605,
"grad_norm": 14.1875,
"learning_rate": 1.2966353677621284e-05,
"loss": 1.362,
"step": 11990
},
{
"epoch": 1.2605042016806722,
"grad_norm": 11.375,
"learning_rate": 1.29585289514867e-05,
"loss": 1.6147,
"step": 12000
},
{
"epoch": 1.2615546218487395,
"grad_norm": 12.1875,
"learning_rate": 1.2950704225352113e-05,
"loss": 1.3879,
"step": 12010
},
{
"epoch": 1.2626050420168067,
"grad_norm": 11.9375,
"learning_rate": 1.294287949921753e-05,
"loss": 1.3297,
"step": 12020
},
{
"epoch": 1.263655462184874,
"grad_norm": 11.5,
"learning_rate": 1.2935054773082943e-05,
"loss": 1.1665,
"step": 12030
},
{
"epoch": 1.2647058823529411,
"grad_norm": 18.125,
"learning_rate": 1.2927230046948359e-05,
"loss": 1.4025,
"step": 12040
},
{
"epoch": 1.2657563025210083,
"grad_norm": 25.0,
"learning_rate": 1.2919405320813772e-05,
"loss": 1.2586,
"step": 12050
},
{
"epoch": 1.2668067226890756,
"grad_norm": 12.5,
"learning_rate": 1.2911580594679186e-05,
"loss": 1.7389,
"step": 12060
},
{
"epoch": 1.2678571428571428,
"grad_norm": 19.25,
"learning_rate": 1.2903755868544603e-05,
"loss": 1.1541,
"step": 12070
},
{
"epoch": 1.26890756302521,
"grad_norm": 7.625,
"learning_rate": 1.2895931142410016e-05,
"loss": 1.1763,
"step": 12080
},
{
"epoch": 1.2699579831932772,
"grad_norm": 23.0,
"learning_rate": 1.2888106416275432e-05,
"loss": 1.2426,
"step": 12090
},
{
"epoch": 1.2710084033613445,
"grad_norm": 4.84375,
"learning_rate": 1.2880281690140845e-05,
"loss": 1.3512,
"step": 12100
},
{
"epoch": 1.2720588235294117,
"grad_norm": 15.5,
"learning_rate": 1.2872456964006261e-05,
"loss": 1.1114,
"step": 12110
},
{
"epoch": 1.273109243697479,
"grad_norm": 11.8125,
"learning_rate": 1.2864632237871674e-05,
"loss": 1.3452,
"step": 12120
},
{
"epoch": 1.2741596638655461,
"grad_norm": 21.125,
"learning_rate": 1.285680751173709e-05,
"loss": 1.4839,
"step": 12130
},
{
"epoch": 1.2752100840336134,
"grad_norm": 26.0,
"learning_rate": 1.2848982785602504e-05,
"loss": 1.4127,
"step": 12140
},
{
"epoch": 1.2762605042016806,
"grad_norm": 13.375,
"learning_rate": 1.284115805946792e-05,
"loss": 0.9342,
"step": 12150
},
{
"epoch": 1.2773109243697478,
"grad_norm": 13.125,
"learning_rate": 1.2833333333333335e-05,
"loss": 1.1793,
"step": 12160
},
{
"epoch": 1.278361344537815,
"grad_norm": 12.5625,
"learning_rate": 1.282550860719875e-05,
"loss": 1.3344,
"step": 12170
},
{
"epoch": 1.2794117647058822,
"grad_norm": 21.25,
"learning_rate": 1.2817683881064164e-05,
"loss": 0.8956,
"step": 12180
},
{
"epoch": 1.2804621848739495,
"grad_norm": 15.5625,
"learning_rate": 1.2809859154929579e-05,
"loss": 1.3549,
"step": 12190
},
{
"epoch": 1.2815126050420167,
"grad_norm": 17.875,
"learning_rate": 1.2802034428794993e-05,
"loss": 1.5818,
"step": 12200
},
{
"epoch": 1.282563025210084,
"grad_norm": 5.65625,
"learning_rate": 1.2794209702660408e-05,
"loss": 1.0335,
"step": 12210
},
{
"epoch": 1.2836134453781511,
"grad_norm": 3.921875,
"learning_rate": 1.2786384976525823e-05,
"loss": 1.2507,
"step": 12220
},
{
"epoch": 1.2846638655462184,
"grad_norm": 27.875,
"learning_rate": 1.2778560250391237e-05,
"loss": 1.9042,
"step": 12230
},
{
"epoch": 1.2857142857142856,
"grad_norm": 3.921875,
"learning_rate": 1.2770735524256652e-05,
"loss": 1.6034,
"step": 12240
},
{
"epoch": 1.2867647058823528,
"grad_norm": 4.6875,
"learning_rate": 1.2762910798122065e-05,
"loss": 1.2122,
"step": 12250
},
{
"epoch": 1.28781512605042,
"grad_norm": 8.75,
"learning_rate": 1.2755086071987481e-05,
"loss": 1.5361,
"step": 12260
},
{
"epoch": 1.2888655462184873,
"grad_norm": 25.25,
"learning_rate": 1.2747261345852898e-05,
"loss": 1.5994,
"step": 12270
},
{
"epoch": 1.2899159663865547,
"grad_norm": 20.125,
"learning_rate": 1.273943661971831e-05,
"loss": 1.5681,
"step": 12280
},
{
"epoch": 1.290966386554622,
"grad_norm": 13.4375,
"learning_rate": 1.2731611893583727e-05,
"loss": 1.2839,
"step": 12290
},
{
"epoch": 1.2920168067226891,
"grad_norm": 11.9375,
"learning_rate": 1.272378716744914e-05,
"loss": 1.1758,
"step": 12300
},
{
"epoch": 1.2930672268907564,
"grad_norm": 4.6875,
"learning_rate": 1.2715962441314556e-05,
"loss": 1.2308,
"step": 12310
},
{
"epoch": 1.2941176470588236,
"grad_norm": 27.125,
"learning_rate": 1.2708137715179969e-05,
"loss": 1.236,
"step": 12320
},
{
"epoch": 1.2951680672268908,
"grad_norm": 11.875,
"learning_rate": 1.2700312989045385e-05,
"loss": 1.0556,
"step": 12330
},
{
"epoch": 1.296218487394958,
"grad_norm": 13.3125,
"learning_rate": 1.2692488262910798e-05,
"loss": 1.2603,
"step": 12340
},
{
"epoch": 1.2972689075630253,
"grad_norm": 9.875,
"learning_rate": 1.2684663536776213e-05,
"loss": 0.8483,
"step": 12350
},
{
"epoch": 1.2983193277310925,
"grad_norm": 15.875,
"learning_rate": 1.267683881064163e-05,
"loss": 1.514,
"step": 12360
},
{
"epoch": 1.2993697478991597,
"grad_norm": 9.0,
"learning_rate": 1.2669014084507042e-05,
"loss": 1.1939,
"step": 12370
},
{
"epoch": 1.300420168067227,
"grad_norm": 15.75,
"learning_rate": 1.2661189358372459e-05,
"loss": 1.5688,
"step": 12380
},
{
"epoch": 1.3014705882352942,
"grad_norm": 19.5,
"learning_rate": 1.2653364632237872e-05,
"loss": 1.3059,
"step": 12390
},
{
"epoch": 1.3025210084033614,
"grad_norm": 12.375,
"learning_rate": 1.2645539906103288e-05,
"loss": 0.9362,
"step": 12400
},
{
"epoch": 1.3035714285714286,
"grad_norm": 9.625,
"learning_rate": 1.2637715179968701e-05,
"loss": 1.2489,
"step": 12410
},
{
"epoch": 1.3046218487394958,
"grad_norm": 10.8125,
"learning_rate": 1.2629890453834117e-05,
"loss": 1.4674,
"step": 12420
},
{
"epoch": 1.305672268907563,
"grad_norm": 11.75,
"learning_rate": 1.262206572769953e-05,
"loss": 1.2837,
"step": 12430
},
{
"epoch": 1.3067226890756303,
"grad_norm": 4.46875,
"learning_rate": 1.2614241001564947e-05,
"loss": 1.1496,
"step": 12440
},
{
"epoch": 1.3077731092436975,
"grad_norm": 5.65625,
"learning_rate": 1.2606416275430361e-05,
"loss": 1.2523,
"step": 12450
},
{
"epoch": 1.3088235294117647,
"grad_norm": 14.875,
"learning_rate": 1.2598591549295776e-05,
"loss": 1.1827,
"step": 12460
},
{
"epoch": 1.309873949579832,
"grad_norm": 6.375,
"learning_rate": 1.259076682316119e-05,
"loss": 1.3326,
"step": 12470
},
{
"epoch": 1.3109243697478992,
"grad_norm": 14.0,
"learning_rate": 1.2582942097026605e-05,
"loss": 1.5072,
"step": 12480
},
{
"epoch": 1.3119747899159664,
"grad_norm": 17.5,
"learning_rate": 1.257511737089202e-05,
"loss": 1.0364,
"step": 12490
},
{
"epoch": 1.3130252100840336,
"grad_norm": 13.3125,
"learning_rate": 1.2567292644757435e-05,
"loss": 1.4135,
"step": 12500
},
{
"epoch": 1.3140756302521008,
"grad_norm": 12.875,
"learning_rate": 1.255946791862285e-05,
"loss": 1.4885,
"step": 12510
},
{
"epoch": 1.315126050420168,
"grad_norm": 12.0,
"learning_rate": 1.2551643192488264e-05,
"loss": 1.3126,
"step": 12520
},
{
"epoch": 1.3161764705882353,
"grad_norm": 17.875,
"learning_rate": 1.2543818466353679e-05,
"loss": 1.2657,
"step": 12530
},
{
"epoch": 1.3172268907563025,
"grad_norm": 17.5,
"learning_rate": 1.2535993740219092e-05,
"loss": 1.389,
"step": 12540
},
{
"epoch": 1.3182773109243697,
"grad_norm": 17.125,
"learning_rate": 1.2528169014084508e-05,
"loss": 1.4456,
"step": 12550
},
{
"epoch": 1.319327731092437,
"grad_norm": 15.375,
"learning_rate": 1.2520344287949924e-05,
"loss": 1.2432,
"step": 12560
},
{
"epoch": 1.3203781512605042,
"grad_norm": 13.4375,
"learning_rate": 1.2512519561815337e-05,
"loss": 0.82,
"step": 12570
},
{
"epoch": 1.3214285714285714,
"grad_norm": 15.5625,
"learning_rate": 1.2504694835680754e-05,
"loss": 1.4444,
"step": 12580
},
{
"epoch": 1.3224789915966386,
"grad_norm": 12.625,
"learning_rate": 1.2496870109546166e-05,
"loss": 1.4157,
"step": 12590
},
{
"epoch": 1.3235294117647058,
"grad_norm": 11.8125,
"learning_rate": 1.2489045383411583e-05,
"loss": 1.3183,
"step": 12600
},
{
"epoch": 1.324579831932773,
"grad_norm": 18.875,
"learning_rate": 1.2481220657276996e-05,
"loss": 1.5178,
"step": 12610
},
{
"epoch": 1.3256302521008403,
"grad_norm": 12.3125,
"learning_rate": 1.2473395931142412e-05,
"loss": 1.2979,
"step": 12620
},
{
"epoch": 1.3266806722689075,
"grad_norm": 11.75,
"learning_rate": 1.2465571205007825e-05,
"loss": 1.5851,
"step": 12630
},
{
"epoch": 1.3277310924369747,
"grad_norm": 6.15625,
"learning_rate": 1.245774647887324e-05,
"loss": 1.2124,
"step": 12640
},
{
"epoch": 1.328781512605042,
"grad_norm": 13.1875,
"learning_rate": 1.2449921752738656e-05,
"loss": 1.0869,
"step": 12650
},
{
"epoch": 1.3298319327731092,
"grad_norm": 50.5,
"learning_rate": 1.2442097026604069e-05,
"loss": 1.2768,
"step": 12660
},
{
"epoch": 1.3308823529411764,
"grad_norm": 18.125,
"learning_rate": 1.2434272300469485e-05,
"loss": 1.8448,
"step": 12670
},
{
"epoch": 1.3319327731092436,
"grad_norm": 16.25,
"learning_rate": 1.2426447574334898e-05,
"loss": 1.1748,
"step": 12680
},
{
"epoch": 1.3329831932773109,
"grad_norm": 14.875,
"learning_rate": 1.2418622848200315e-05,
"loss": 1.5983,
"step": 12690
},
{
"epoch": 1.334033613445378,
"grad_norm": 9.3125,
"learning_rate": 1.2410798122065728e-05,
"loss": 1.3208,
"step": 12700
},
{
"epoch": 1.3350840336134453,
"grad_norm": 12.625,
"learning_rate": 1.2402973395931144e-05,
"loss": 0.8592,
"step": 12710
},
{
"epoch": 1.3361344537815127,
"grad_norm": 5.15625,
"learning_rate": 1.2395148669796557e-05,
"loss": 1.002,
"step": 12720
},
{
"epoch": 1.33718487394958,
"grad_norm": 13.0625,
"learning_rate": 1.2387323943661973e-05,
"loss": 1.1317,
"step": 12730
},
{
"epoch": 1.3382352941176472,
"grad_norm": 20.375,
"learning_rate": 1.2379499217527386e-05,
"loss": 1.7459,
"step": 12740
},
{
"epoch": 1.3392857142857144,
"grad_norm": 7.5625,
"learning_rate": 1.2371674491392803e-05,
"loss": 1.2175,
"step": 12750
},
{
"epoch": 1.3403361344537816,
"grad_norm": 14.625,
"learning_rate": 1.2363849765258217e-05,
"loss": 1.3963,
"step": 12760
},
{
"epoch": 1.3413865546218489,
"grad_norm": 13.875,
"learning_rate": 1.2356025039123632e-05,
"loss": 1.1123,
"step": 12770
},
{
"epoch": 1.342436974789916,
"grad_norm": 9.0,
"learning_rate": 1.2348200312989047e-05,
"loss": 1.2574,
"step": 12780
},
{
"epoch": 1.3434873949579833,
"grad_norm": 14.4375,
"learning_rate": 1.2340375586854461e-05,
"loss": 1.5698,
"step": 12790
},
{
"epoch": 1.3445378151260505,
"grad_norm": 12.375,
"learning_rate": 1.2332550860719876e-05,
"loss": 1.0556,
"step": 12800
},
{
"epoch": 1.3455882352941178,
"grad_norm": 2.953125,
"learning_rate": 1.232472613458529e-05,
"loss": 0.9861,
"step": 12810
},
{
"epoch": 1.346638655462185,
"grad_norm": 18.375,
"learning_rate": 1.2316901408450705e-05,
"loss": 1.039,
"step": 12820
},
{
"epoch": 1.3476890756302522,
"grad_norm": 18.625,
"learning_rate": 1.2309076682316118e-05,
"loss": 1.6725,
"step": 12830
},
{
"epoch": 1.3487394957983194,
"grad_norm": 12.3125,
"learning_rate": 1.2301251956181534e-05,
"loss": 0.8868,
"step": 12840
},
{
"epoch": 1.3497899159663866,
"grad_norm": 15.5,
"learning_rate": 1.229342723004695e-05,
"loss": 1.8068,
"step": 12850
},
{
"epoch": 1.3508403361344539,
"grad_norm": 10.1875,
"learning_rate": 1.2285602503912364e-05,
"loss": 1.4605,
"step": 12860
},
{
"epoch": 1.351890756302521,
"grad_norm": 2.21875,
"learning_rate": 1.227777777777778e-05,
"loss": 1.1635,
"step": 12870
},
{
"epoch": 1.3529411764705883,
"grad_norm": 12.3125,
"learning_rate": 1.2269953051643193e-05,
"loss": 1.4335,
"step": 12880
},
{
"epoch": 1.3539915966386555,
"grad_norm": 14.3125,
"learning_rate": 1.226212832550861e-05,
"loss": 1.5138,
"step": 12890
},
{
"epoch": 1.3550420168067228,
"grad_norm": 14.125,
"learning_rate": 1.2254303599374022e-05,
"loss": 1.5198,
"step": 12900
},
{
"epoch": 1.35609243697479,
"grad_norm": 5.3125,
"learning_rate": 1.2246478873239439e-05,
"loss": 1.2806,
"step": 12910
},
{
"epoch": 1.3571428571428572,
"grad_norm": 12.25,
"learning_rate": 1.2238654147104852e-05,
"loss": 1.7758,
"step": 12920
},
{
"epoch": 1.3581932773109244,
"grad_norm": 7.53125,
"learning_rate": 1.2230829420970266e-05,
"loss": 1.541,
"step": 12930
},
{
"epoch": 1.3592436974789917,
"grad_norm": 10.0625,
"learning_rate": 1.2223004694835683e-05,
"loss": 1.1173,
"step": 12940
},
{
"epoch": 1.3602941176470589,
"grad_norm": 11.625,
"learning_rate": 1.2215179968701096e-05,
"loss": 1.6626,
"step": 12950
},
{
"epoch": 1.361344537815126,
"grad_norm": 10.625,
"learning_rate": 1.2207355242566512e-05,
"loss": 1.4362,
"step": 12960
},
{
"epoch": 1.3623949579831933,
"grad_norm": 14.1875,
"learning_rate": 1.2199530516431925e-05,
"loss": 1.4961,
"step": 12970
},
{
"epoch": 1.3634453781512605,
"grad_norm": 12.25,
"learning_rate": 1.2191705790297341e-05,
"loss": 1.1132,
"step": 12980
},
{
"epoch": 1.3644957983193278,
"grad_norm": 5.53125,
"learning_rate": 1.2183881064162754e-05,
"loss": 1.3196,
"step": 12990
},
{
"epoch": 1.365546218487395,
"grad_norm": 5.34375,
"learning_rate": 1.217605633802817e-05,
"loss": 1.4392,
"step": 13000
},
{
"epoch": 1.3665966386554622,
"grad_norm": 3.9375,
"learning_rate": 1.2168231611893584e-05,
"loss": 1.4112,
"step": 13010
},
{
"epoch": 1.3676470588235294,
"grad_norm": 13.9375,
"learning_rate": 1.2160406885759e-05,
"loss": 1.3188,
"step": 13020
},
{
"epoch": 1.3686974789915967,
"grad_norm": 14.75,
"learning_rate": 1.2152582159624413e-05,
"loss": 1.1736,
"step": 13030
},
{
"epoch": 1.3697478991596639,
"grad_norm": 16.625,
"learning_rate": 1.214475743348983e-05,
"loss": 1.6647,
"step": 13040
},
{
"epoch": 1.370798319327731,
"grad_norm": 8.375,
"learning_rate": 1.2136932707355244e-05,
"loss": 1.325,
"step": 13050
},
{
"epoch": 1.3718487394957983,
"grad_norm": 18.125,
"learning_rate": 1.2129107981220659e-05,
"loss": 1.6822,
"step": 13060
},
{
"epoch": 1.3728991596638656,
"grad_norm": 12.375,
"learning_rate": 1.2121283255086073e-05,
"loss": 1.1331,
"step": 13070
},
{
"epoch": 1.3739495798319328,
"grad_norm": 4.375,
"learning_rate": 1.2113458528951488e-05,
"loss": 1.4808,
"step": 13080
},
{
"epoch": 1.375,
"grad_norm": 15.75,
"learning_rate": 1.2105633802816902e-05,
"loss": 1.4181,
"step": 13090
},
{
"epoch": 1.3760504201680672,
"grad_norm": 22.0,
"learning_rate": 1.2097809076682317e-05,
"loss": 1.7831,
"step": 13100
},
{
"epoch": 1.3771008403361344,
"grad_norm": 11.4375,
"learning_rate": 1.2089984350547732e-05,
"loss": 1.4553,
"step": 13110
},
{
"epoch": 1.3781512605042017,
"grad_norm": 13.625,
"learning_rate": 1.2082159624413146e-05,
"loss": 1.625,
"step": 13120
},
{
"epoch": 1.379201680672269,
"grad_norm": 16.125,
"learning_rate": 1.2074334898278561e-05,
"loss": 1.2871,
"step": 13130
},
{
"epoch": 1.3802521008403361,
"grad_norm": 15.625,
"learning_rate": 1.2066510172143977e-05,
"loss": 1.0753,
"step": 13140
},
{
"epoch": 1.3813025210084033,
"grad_norm": 13.875,
"learning_rate": 1.205868544600939e-05,
"loss": 1.3164,
"step": 13150
},
{
"epoch": 1.3823529411764706,
"grad_norm": 18.5,
"learning_rate": 1.2050860719874807e-05,
"loss": 1.3868,
"step": 13160
},
{
"epoch": 1.3834033613445378,
"grad_norm": 13.5625,
"learning_rate": 1.204303599374022e-05,
"loss": 1.3437,
"step": 13170
},
{
"epoch": 1.384453781512605,
"grad_norm": 13.0,
"learning_rate": 1.2035211267605636e-05,
"loss": 1.4609,
"step": 13180
},
{
"epoch": 1.3855042016806722,
"grad_norm": 7.65625,
"learning_rate": 1.2027386541471049e-05,
"loss": 1.4758,
"step": 13190
},
{
"epoch": 1.3865546218487395,
"grad_norm": 14.3125,
"learning_rate": 1.2019561815336465e-05,
"loss": 1.1268,
"step": 13200
},
{
"epoch": 1.3876050420168067,
"grad_norm": 4.65625,
"learning_rate": 1.2011737089201878e-05,
"loss": 1.0771,
"step": 13210
},
{
"epoch": 1.388655462184874,
"grad_norm": 17.125,
"learning_rate": 1.2003912363067293e-05,
"loss": 1.1369,
"step": 13220
},
{
"epoch": 1.3897058823529411,
"grad_norm": 17.5,
"learning_rate": 1.1996087636932708e-05,
"loss": 1.4544,
"step": 13230
},
{
"epoch": 1.3907563025210083,
"grad_norm": 18.875,
"learning_rate": 1.1988262910798122e-05,
"loss": 1.1155,
"step": 13240
},
{
"epoch": 1.3918067226890756,
"grad_norm": 12.8125,
"learning_rate": 1.1980438184663539e-05,
"loss": 0.973,
"step": 13250
},
{
"epoch": 1.3928571428571428,
"grad_norm": 19.75,
"learning_rate": 1.1972613458528952e-05,
"loss": 1.501,
"step": 13260
},
{
"epoch": 1.39390756302521,
"grad_norm": 12.625,
"learning_rate": 1.1964788732394368e-05,
"loss": 1.0422,
"step": 13270
},
{
"epoch": 1.3949579831932772,
"grad_norm": 19.625,
"learning_rate": 1.1956964006259781e-05,
"loss": 1.7695,
"step": 13280
},
{
"epoch": 1.3960084033613445,
"grad_norm": 9.9375,
"learning_rate": 1.1949139280125197e-05,
"loss": 0.7722,
"step": 13290
},
{
"epoch": 1.3970588235294117,
"grad_norm": 11.25,
"learning_rate": 1.194131455399061e-05,
"loss": 0.8371,
"step": 13300
},
{
"epoch": 1.398109243697479,
"grad_norm": 11.25,
"learning_rate": 1.1933489827856027e-05,
"loss": 1.4412,
"step": 13310
},
{
"epoch": 1.3991596638655461,
"grad_norm": 10.25,
"learning_rate": 1.192566510172144e-05,
"loss": 1.3016,
"step": 13320
},
{
"epoch": 1.4002100840336134,
"grad_norm": 12.3125,
"learning_rate": 1.1917840375586856e-05,
"loss": 1.5842,
"step": 13330
},
{
"epoch": 1.4012605042016806,
"grad_norm": 16.625,
"learning_rate": 1.191001564945227e-05,
"loss": 0.8069,
"step": 13340
},
{
"epoch": 1.4023109243697478,
"grad_norm": 13.75,
"learning_rate": 1.1902190923317685e-05,
"loss": 1.2223,
"step": 13350
},
{
"epoch": 1.403361344537815,
"grad_norm": 16.0,
"learning_rate": 1.18943661971831e-05,
"loss": 1.4568,
"step": 13360
},
{
"epoch": 1.4044117647058822,
"grad_norm": 5.625,
"learning_rate": 1.1886541471048514e-05,
"loss": 1.0528,
"step": 13370
},
{
"epoch": 1.4054621848739495,
"grad_norm": 11.875,
"learning_rate": 1.1878716744913929e-05,
"loss": 1.503,
"step": 13380
},
{
"epoch": 1.4065126050420167,
"grad_norm": 13.0625,
"learning_rate": 1.1870892018779344e-05,
"loss": 1.5821,
"step": 13390
},
{
"epoch": 1.407563025210084,
"grad_norm": 13.25,
"learning_rate": 1.1863067292644758e-05,
"loss": 1.2337,
"step": 13400
},
{
"epoch": 1.4086134453781511,
"grad_norm": 15.1875,
"learning_rate": 1.1855242566510173e-05,
"loss": 1.6133,
"step": 13410
},
{
"epoch": 1.4096638655462184,
"grad_norm": 15.875,
"learning_rate": 1.1847417840375588e-05,
"loss": 1.632,
"step": 13420
},
{
"epoch": 1.4107142857142856,
"grad_norm": 14.0625,
"learning_rate": 1.1839593114241e-05,
"loss": 1.0302,
"step": 13430
},
{
"epoch": 1.4117647058823528,
"grad_norm": 11.8125,
"learning_rate": 1.1831768388106417e-05,
"loss": 1.6045,
"step": 13440
},
{
"epoch": 1.41281512605042,
"grad_norm": 13.25,
"learning_rate": 1.1823943661971833e-05,
"loss": 1.2232,
"step": 13450
},
{
"epoch": 1.4138655462184873,
"grad_norm": 16.375,
"learning_rate": 1.1816118935837246e-05,
"loss": 1.2389,
"step": 13460
},
{
"epoch": 1.4149159663865547,
"grad_norm": 4.65625,
"learning_rate": 1.1808294209702663e-05,
"loss": 1.2232,
"step": 13470
},
{
"epoch": 1.415966386554622,
"grad_norm": 3.671875,
"learning_rate": 1.1800469483568076e-05,
"loss": 1.2644,
"step": 13480
},
{
"epoch": 1.4170168067226891,
"grad_norm": 11.3125,
"learning_rate": 1.1792644757433492e-05,
"loss": 1.3765,
"step": 13490
},
{
"epoch": 1.4180672268907564,
"grad_norm": 11.5625,
"learning_rate": 1.1784820031298905e-05,
"loss": 1.1586,
"step": 13500
},
{
"epoch": 1.4191176470588236,
"grad_norm": 13.625,
"learning_rate": 1.177699530516432e-05,
"loss": 0.9487,
"step": 13510
},
{
"epoch": 1.4201680672268908,
"grad_norm": 25.125,
"learning_rate": 1.1769170579029734e-05,
"loss": 0.9073,
"step": 13520
},
{
"epoch": 1.421218487394958,
"grad_norm": 20.875,
"learning_rate": 1.1761345852895149e-05,
"loss": 1.4868,
"step": 13530
},
{
"epoch": 1.4222689075630253,
"grad_norm": 22.25,
"learning_rate": 1.1753521126760565e-05,
"loss": 1.6157,
"step": 13540
},
{
"epoch": 1.4233193277310925,
"grad_norm": 18.0,
"learning_rate": 1.1745696400625978e-05,
"loss": 1.4386,
"step": 13550
},
{
"epoch": 1.4243697478991597,
"grad_norm": 15.25,
"learning_rate": 1.1737871674491395e-05,
"loss": 1.4988,
"step": 13560
},
{
"epoch": 1.425420168067227,
"grad_norm": 17.125,
"learning_rate": 1.1730046948356808e-05,
"loss": 0.8417,
"step": 13570
},
{
"epoch": 1.4264705882352942,
"grad_norm": 18.125,
"learning_rate": 1.1722222222222224e-05,
"loss": 1.6105,
"step": 13580
},
{
"epoch": 1.4275210084033614,
"grad_norm": 13.5,
"learning_rate": 1.1714397496087637e-05,
"loss": 1.5684,
"step": 13590
},
{
"epoch": 1.4285714285714286,
"grad_norm": 6.84375,
"learning_rate": 1.1706572769953053e-05,
"loss": 1.2026,
"step": 13600
},
{
"epoch": 1.4296218487394958,
"grad_norm": 16.125,
"learning_rate": 1.1698748043818466e-05,
"loss": 1.4824,
"step": 13610
},
{
"epoch": 1.430672268907563,
"grad_norm": 4.59375,
"learning_rate": 1.1690923317683882e-05,
"loss": 1.2055,
"step": 13620
},
{
"epoch": 1.4317226890756303,
"grad_norm": 14.0,
"learning_rate": 1.1683098591549297e-05,
"loss": 1.2775,
"step": 13630
},
{
"epoch": 1.4327731092436975,
"grad_norm": 12.1875,
"learning_rate": 1.1675273865414712e-05,
"loss": 1.0015,
"step": 13640
},
{
"epoch": 1.4338235294117647,
"grad_norm": 16.25,
"learning_rate": 1.1667449139280126e-05,
"loss": 1.6765,
"step": 13650
},
{
"epoch": 1.434873949579832,
"grad_norm": 13.8125,
"learning_rate": 1.1659624413145541e-05,
"loss": 1.5888,
"step": 13660
},
{
"epoch": 1.4359243697478992,
"grad_norm": 14.5,
"learning_rate": 1.1651799687010956e-05,
"loss": 1.382,
"step": 13670
},
{
"epoch": 1.4369747899159664,
"grad_norm": 12.5625,
"learning_rate": 1.164397496087637e-05,
"loss": 1.1734,
"step": 13680
},
{
"epoch": 1.4380252100840336,
"grad_norm": 13.375,
"learning_rate": 1.1636150234741785e-05,
"loss": 1.091,
"step": 13690
},
{
"epoch": 1.4390756302521008,
"grad_norm": 8.3125,
"learning_rate": 1.16283255086072e-05,
"loss": 1.7296,
"step": 13700
},
{
"epoch": 1.440126050420168,
"grad_norm": 4.59375,
"learning_rate": 1.1620500782472614e-05,
"loss": 0.7291,
"step": 13710
},
{
"epoch": 1.4411764705882353,
"grad_norm": 17.5,
"learning_rate": 1.1612676056338027e-05,
"loss": 1.3262,
"step": 13720
},
{
"epoch": 1.4422268907563025,
"grad_norm": 11.5625,
"learning_rate": 1.1604851330203444e-05,
"loss": 1.1583,
"step": 13730
},
{
"epoch": 1.4432773109243697,
"grad_norm": 20.75,
"learning_rate": 1.159702660406886e-05,
"loss": 1.6833,
"step": 13740
},
{
"epoch": 1.444327731092437,
"grad_norm": 20.0,
"learning_rate": 1.1589201877934273e-05,
"loss": 1.3845,
"step": 13750
},
{
"epoch": 1.4453781512605042,
"grad_norm": 12.5,
"learning_rate": 1.158137715179969e-05,
"loss": 0.8729,
"step": 13760
},
{
"epoch": 1.4464285714285714,
"grad_norm": 18.5,
"learning_rate": 1.1573552425665102e-05,
"loss": 1.3549,
"step": 13770
},
{
"epoch": 1.4474789915966386,
"grad_norm": 16.625,
"learning_rate": 1.1565727699530519e-05,
"loss": 1.3161,
"step": 13780
},
{
"epoch": 1.4485294117647058,
"grad_norm": 22.25,
"learning_rate": 1.1557902973395932e-05,
"loss": 1.431,
"step": 13790
},
{
"epoch": 1.449579831932773,
"grad_norm": 5.5,
"learning_rate": 1.1550078247261346e-05,
"loss": 1.1607,
"step": 13800
},
{
"epoch": 1.4506302521008403,
"grad_norm": 13.625,
"learning_rate": 1.1542253521126761e-05,
"loss": 1.1485,
"step": 13810
},
{
"epoch": 1.4516806722689075,
"grad_norm": 13.9375,
"learning_rate": 1.1534428794992176e-05,
"loss": 1.3236,
"step": 13820
},
{
"epoch": 1.4527310924369747,
"grad_norm": 15.5625,
"learning_rate": 1.1526604068857592e-05,
"loss": 1.0652,
"step": 13830
},
{
"epoch": 1.453781512605042,
"grad_norm": 15.0,
"learning_rate": 1.1518779342723005e-05,
"loss": 1.6018,
"step": 13840
},
{
"epoch": 1.4548319327731092,
"grad_norm": 11.375,
"learning_rate": 1.1510954616588421e-05,
"loss": 0.9,
"step": 13850
},
{
"epoch": 1.4558823529411764,
"grad_norm": 4.375,
"learning_rate": 1.1503129890453834e-05,
"loss": 0.8587,
"step": 13860
},
{
"epoch": 1.4569327731092436,
"grad_norm": 13.125,
"learning_rate": 1.149530516431925e-05,
"loss": 1.1876,
"step": 13870
},
{
"epoch": 1.4579831932773109,
"grad_norm": 12.4375,
"learning_rate": 1.1487480438184663e-05,
"loss": 1.221,
"step": 13880
},
{
"epoch": 1.459033613445378,
"grad_norm": 13.3125,
"learning_rate": 1.147965571205008e-05,
"loss": 1.2262,
"step": 13890
},
{
"epoch": 1.4600840336134453,
"grad_norm": 13.625,
"learning_rate": 1.1471830985915493e-05,
"loss": 1.3682,
"step": 13900
},
{
"epoch": 1.4611344537815127,
"grad_norm": 15.6875,
"learning_rate": 1.1464006259780909e-05,
"loss": 1.432,
"step": 13910
},
{
"epoch": 1.46218487394958,
"grad_norm": 39.75,
"learning_rate": 1.1456181533646322e-05,
"loss": 1.1011,
"step": 13920
},
{
"epoch": 1.4632352941176472,
"grad_norm": 11.6875,
"learning_rate": 1.1448356807511738e-05,
"loss": 0.961,
"step": 13930
},
{
"epoch": 1.4642857142857144,
"grad_norm": 12.375,
"learning_rate": 1.1440532081377153e-05,
"loss": 0.9199,
"step": 13940
},
{
"epoch": 1.4653361344537816,
"grad_norm": 15.5,
"learning_rate": 1.1432707355242568e-05,
"loss": 1.3411,
"step": 13950
},
{
"epoch": 1.4663865546218489,
"grad_norm": 14.8125,
"learning_rate": 1.1424882629107982e-05,
"loss": 1.3674,
"step": 13960
},
{
"epoch": 1.467436974789916,
"grad_norm": 6.53125,
"learning_rate": 1.1417057902973397e-05,
"loss": 1.1447,
"step": 13970
},
{
"epoch": 1.4684873949579833,
"grad_norm": 14.875,
"learning_rate": 1.1409233176838812e-05,
"loss": 1.4201,
"step": 13980
},
{
"epoch": 1.4695378151260505,
"grad_norm": 3.953125,
"learning_rate": 1.1401408450704226e-05,
"loss": 1.3417,
"step": 13990
},
{
"epoch": 1.4705882352941178,
"grad_norm": 8.5625,
"learning_rate": 1.1393583724569641e-05,
"loss": 0.9248,
"step": 14000
},
{
"epoch": 1.471638655462185,
"grad_norm": 19.875,
"learning_rate": 1.1385758998435054e-05,
"loss": 1.7956,
"step": 14010
},
{
"epoch": 1.4726890756302522,
"grad_norm": 18.75,
"learning_rate": 1.137793427230047e-05,
"loss": 1.2636,
"step": 14020
},
{
"epoch": 1.4737394957983194,
"grad_norm": 4.78125,
"learning_rate": 1.1370109546165887e-05,
"loss": 1.4248,
"step": 14030
},
{
"epoch": 1.4747899159663866,
"grad_norm": 4.25,
"learning_rate": 1.13622848200313e-05,
"loss": 1.1417,
"step": 14040
},
{
"epoch": 1.4758403361344539,
"grad_norm": 23.25,
"learning_rate": 1.1354460093896716e-05,
"loss": 1.0123,
"step": 14050
},
{
"epoch": 1.476890756302521,
"grad_norm": 4.34375,
"learning_rate": 1.1346635367762129e-05,
"loss": 1.5176,
"step": 14060
},
{
"epoch": 1.4779411764705883,
"grad_norm": 12.6875,
"learning_rate": 1.1338810641627545e-05,
"loss": 1.6103,
"step": 14070
},
{
"epoch": 1.4789915966386555,
"grad_norm": 12.9375,
"learning_rate": 1.1330985915492958e-05,
"loss": 2.1391,
"step": 14080
},
{
"epoch": 1.4800420168067228,
"grad_norm": 4.625,
"learning_rate": 1.1323161189358375e-05,
"loss": 1.3055,
"step": 14090
},
{
"epoch": 1.48109243697479,
"grad_norm": 12.4375,
"learning_rate": 1.1315336463223788e-05,
"loss": 1.306,
"step": 14100
},
{
"epoch": 1.4821428571428572,
"grad_norm": 16.0,
"learning_rate": 1.1307511737089202e-05,
"loss": 1.2485,
"step": 14110
},
{
"epoch": 1.4831932773109244,
"grad_norm": 22.0,
"learning_rate": 1.1299687010954619e-05,
"loss": 1.2959,
"step": 14120
},
{
"epoch": 1.4842436974789917,
"grad_norm": 18.125,
"learning_rate": 1.1291862284820031e-05,
"loss": 1.419,
"step": 14130
},
{
"epoch": 1.4852941176470589,
"grad_norm": 12.5625,
"learning_rate": 1.1284037558685448e-05,
"loss": 1.2413,
"step": 14140
},
{
"epoch": 1.486344537815126,
"grad_norm": 13.75,
"learning_rate": 1.127621283255086e-05,
"loss": 1.1521,
"step": 14150
},
{
"epoch": 1.4873949579831933,
"grad_norm": 6.71875,
"learning_rate": 1.1268388106416277e-05,
"loss": 1.0156,
"step": 14160
},
{
"epoch": 1.4884453781512605,
"grad_norm": 5.40625,
"learning_rate": 1.126056338028169e-05,
"loss": 1.4527,
"step": 14170
},
{
"epoch": 1.4894957983193278,
"grad_norm": 14.0,
"learning_rate": 1.1252738654147106e-05,
"loss": 1.3498,
"step": 14180
},
{
"epoch": 1.490546218487395,
"grad_norm": 4.1875,
"learning_rate": 1.124491392801252e-05,
"loss": 1.7654,
"step": 14190
},
{
"epoch": 1.4915966386554622,
"grad_norm": 13.4375,
"learning_rate": 1.1237089201877936e-05,
"loss": 1.0088,
"step": 14200
},
{
"epoch": 1.4926470588235294,
"grad_norm": 13.5,
"learning_rate": 1.1229264475743349e-05,
"loss": 1.1831,
"step": 14210
},
{
"epoch": 1.4936974789915967,
"grad_norm": 11.5,
"learning_rate": 1.1221439749608765e-05,
"loss": 1.472,
"step": 14220
},
{
"epoch": 1.4947478991596639,
"grad_norm": 21.5,
"learning_rate": 1.121361502347418e-05,
"loss": 1.0679,
"step": 14230
},
{
"epoch": 1.495798319327731,
"grad_norm": 20.0,
"learning_rate": 1.1205790297339594e-05,
"loss": 1.1635,
"step": 14240
},
{
"epoch": 1.4968487394957983,
"grad_norm": 12.25,
"learning_rate": 1.1197965571205009e-05,
"loss": 1.3566,
"step": 14250
},
{
"epoch": 1.4978991596638656,
"grad_norm": 12.25,
"learning_rate": 1.1190140845070424e-05,
"loss": 1.3279,
"step": 14260
},
{
"epoch": 1.4989495798319328,
"grad_norm": 18.125,
"learning_rate": 1.1182316118935838e-05,
"loss": 1.6923,
"step": 14270
},
{
"epoch": 1.5,
"grad_norm": 22.375,
"learning_rate": 1.1174491392801253e-05,
"loss": 1.9083,
"step": 14280
},
{
"epoch": 1.5010504201680672,
"grad_norm": 10.4375,
"learning_rate": 1.1166666666666668e-05,
"loss": 1.1433,
"step": 14290
},
{
"epoch": 1.5021008403361344,
"grad_norm": 8.9375,
"learning_rate": 1.115884194053208e-05,
"loss": 1.1706,
"step": 14300
},
{
"epoch": 1.5031512605042017,
"grad_norm": 18.75,
"learning_rate": 1.1151017214397497e-05,
"loss": 1.2276,
"step": 14310
},
{
"epoch": 1.504201680672269,
"grad_norm": 5.125,
"learning_rate": 1.1143192488262913e-05,
"loss": 1.0959,
"step": 14320
},
{
"epoch": 1.5052521008403361,
"grad_norm": 12.375,
"learning_rate": 1.1135367762128326e-05,
"loss": 1.2093,
"step": 14330
},
{
"epoch": 1.5063025210084033,
"grad_norm": 5.6875,
"learning_rate": 1.1127543035993743e-05,
"loss": 1.4079,
"step": 14340
},
{
"epoch": 1.5073529411764706,
"grad_norm": 16.875,
"learning_rate": 1.1119718309859156e-05,
"loss": 1.677,
"step": 14350
},
{
"epoch": 1.5084033613445378,
"grad_norm": 13.625,
"learning_rate": 1.1111893583724572e-05,
"loss": 1.8673,
"step": 14360
},
{
"epoch": 1.509453781512605,
"grad_norm": 26.125,
"learning_rate": 1.1104068857589985e-05,
"loss": 1.7775,
"step": 14370
},
{
"epoch": 1.5105042016806722,
"grad_norm": 12.1875,
"learning_rate": 1.1096244131455401e-05,
"loss": 1.238,
"step": 14380
},
{
"epoch": 1.5115546218487395,
"grad_norm": 34.5,
"learning_rate": 1.1088419405320814e-05,
"loss": 1.2416,
"step": 14390
},
{
"epoch": 1.5126050420168067,
"grad_norm": 6.25,
"learning_rate": 1.1080594679186229e-05,
"loss": 1.3153,
"step": 14400
},
{
"epoch": 1.513655462184874,
"grad_norm": 19.25,
"learning_rate": 1.1072769953051643e-05,
"loss": 1.5286,
"step": 14410
},
{
"epoch": 1.5147058823529411,
"grad_norm": 12.5625,
"learning_rate": 1.1064945226917058e-05,
"loss": 1.6686,
"step": 14420
},
{
"epoch": 1.5157563025210083,
"grad_norm": 13.5,
"learning_rate": 1.1057120500782474e-05,
"loss": 1.2533,
"step": 14430
},
{
"epoch": 1.5168067226890756,
"grad_norm": 13.25,
"learning_rate": 1.1049295774647887e-05,
"loss": 1.0326,
"step": 14440
},
{
"epoch": 1.5178571428571428,
"grad_norm": 8.4375,
"learning_rate": 1.1041471048513304e-05,
"loss": 1.0656,
"step": 14450
},
{
"epoch": 1.51890756302521,
"grad_norm": 13.8125,
"learning_rate": 1.1033646322378717e-05,
"loss": 1.2902,
"step": 14460
},
{
"epoch": 1.5199579831932772,
"grad_norm": 17.875,
"learning_rate": 1.1025821596244133e-05,
"loss": 1.4418,
"step": 14470
},
{
"epoch": 1.5210084033613445,
"grad_norm": 14.4375,
"learning_rate": 1.1017996870109546e-05,
"loss": 0.9938,
"step": 14480
},
{
"epoch": 1.5220588235294117,
"grad_norm": 9.25,
"learning_rate": 1.1010172143974962e-05,
"loss": 0.9951,
"step": 14490
},
{
"epoch": 1.523109243697479,
"grad_norm": 5.3125,
"learning_rate": 1.1002347417840375e-05,
"loss": 1.5267,
"step": 14500
},
{
"epoch": 1.5241596638655461,
"grad_norm": 14.375,
"learning_rate": 1.0994522691705792e-05,
"loss": 1.1844,
"step": 14510
},
{
"epoch": 1.5252100840336134,
"grad_norm": 22.625,
"learning_rate": 1.0986697965571206e-05,
"loss": 1.5373,
"step": 14520
},
{
"epoch": 1.5262605042016806,
"grad_norm": 15.9375,
"learning_rate": 1.0978873239436621e-05,
"loss": 1.5569,
"step": 14530
},
{
"epoch": 1.5273109243697478,
"grad_norm": 14.0,
"learning_rate": 1.0971048513302036e-05,
"loss": 1.4319,
"step": 14540
},
{
"epoch": 1.528361344537815,
"grad_norm": 4.53125,
"learning_rate": 1.096322378716745e-05,
"loss": 1.3139,
"step": 14550
},
{
"epoch": 1.5294117647058822,
"grad_norm": 20.25,
"learning_rate": 1.0955399061032865e-05,
"loss": 1.7041,
"step": 14560
},
{
"epoch": 1.5304621848739495,
"grad_norm": 17.875,
"learning_rate": 1.094757433489828e-05,
"loss": 1.3248,
"step": 14570
},
{
"epoch": 1.5315126050420167,
"grad_norm": 6.0,
"learning_rate": 1.0939749608763694e-05,
"loss": 0.9728,
"step": 14580
},
{
"epoch": 1.532563025210084,
"grad_norm": 15.1875,
"learning_rate": 1.0931924882629107e-05,
"loss": 1.0219,
"step": 14590
},
{
"epoch": 1.5336134453781511,
"grad_norm": 21.875,
"learning_rate": 1.0924100156494524e-05,
"loss": 1.2215,
"step": 14600
},
{
"epoch": 1.5346638655462184,
"grad_norm": 16.5,
"learning_rate": 1.0916275430359936e-05,
"loss": 1.3004,
"step": 14610
},
{
"epoch": 1.5357142857142856,
"grad_norm": 5.59375,
"learning_rate": 1.0908450704225353e-05,
"loss": 1.0769,
"step": 14620
},
{
"epoch": 1.5367647058823528,
"grad_norm": 17.875,
"learning_rate": 1.090062597809077e-05,
"loss": 1.2735,
"step": 14630
},
{
"epoch": 1.53781512605042,
"grad_norm": 20.25,
"learning_rate": 1.0892801251956182e-05,
"loss": 1.1559,
"step": 14640
},
{
"epoch": 1.5388655462184873,
"grad_norm": 28.75,
"learning_rate": 1.0884976525821598e-05,
"loss": 1.9156,
"step": 14650
},
{
"epoch": 1.5399159663865545,
"grad_norm": 15.4375,
"learning_rate": 1.0877151799687011e-05,
"loss": 1.2164,
"step": 14660
},
{
"epoch": 1.5409663865546217,
"grad_norm": 16.5,
"learning_rate": 1.0869327073552428e-05,
"loss": 1.8587,
"step": 14670
},
{
"epoch": 1.542016806722689,
"grad_norm": 7.15625,
"learning_rate": 1.086150234741784e-05,
"loss": 1.185,
"step": 14680
},
{
"epoch": 1.5430672268907561,
"grad_norm": 14.5,
"learning_rate": 1.0853677621283255e-05,
"loss": 1.1462,
"step": 14690
},
{
"epoch": 1.5441176470588234,
"grad_norm": 10.5,
"learning_rate": 1.084585289514867e-05,
"loss": 1.1659,
"step": 14700
},
{
"epoch": 1.5451680672268906,
"grad_norm": 40.75,
"learning_rate": 1.0838028169014085e-05,
"loss": 1.4489,
"step": 14710
},
{
"epoch": 1.5462184873949578,
"grad_norm": 15.875,
"learning_rate": 1.0830203442879501e-05,
"loss": 1.2954,
"step": 14720
},
{
"epoch": 1.5472689075630253,
"grad_norm": 4.3125,
"learning_rate": 1.0822378716744914e-05,
"loss": 1.5098,
"step": 14730
},
{
"epoch": 1.5483193277310925,
"grad_norm": 51.5,
"learning_rate": 1.081455399061033e-05,
"loss": 1.268,
"step": 14740
},
{
"epoch": 1.5493697478991597,
"grad_norm": 17.75,
"learning_rate": 1.0806729264475743e-05,
"loss": 1.5536,
"step": 14750
},
{
"epoch": 1.550420168067227,
"grad_norm": 14.8125,
"learning_rate": 1.079890453834116e-05,
"loss": 1.1089,
"step": 14760
},
{
"epoch": 1.5514705882352942,
"grad_norm": 9.75,
"learning_rate": 1.0791079812206573e-05,
"loss": 1.3357,
"step": 14770
},
{
"epoch": 1.5525210084033614,
"grad_norm": 4.65625,
"learning_rate": 1.0783255086071989e-05,
"loss": 1.5138,
"step": 14780
},
{
"epoch": 1.5535714285714286,
"grad_norm": 42.5,
"learning_rate": 1.0775430359937402e-05,
"loss": 1.6304,
"step": 14790
},
{
"epoch": 1.5546218487394958,
"grad_norm": 13.25,
"learning_rate": 1.0767605633802818e-05,
"loss": 1.5183,
"step": 14800
},
{
"epoch": 1.555672268907563,
"grad_norm": 13.0,
"learning_rate": 1.0759780907668233e-05,
"loss": 1.4326,
"step": 14810
},
{
"epoch": 1.5567226890756303,
"grad_norm": 4.84375,
"learning_rate": 1.0751956181533648e-05,
"loss": 0.8125,
"step": 14820
},
{
"epoch": 1.5577731092436975,
"grad_norm": 12.5625,
"learning_rate": 1.0744131455399062e-05,
"loss": 1.1498,
"step": 14830
},
{
"epoch": 1.5588235294117647,
"grad_norm": 5.5625,
"learning_rate": 1.0736306729264477e-05,
"loss": 1.069,
"step": 14840
},
{
"epoch": 1.559873949579832,
"grad_norm": 15.0,
"learning_rate": 1.0728482003129892e-05,
"loss": 1.6658,
"step": 14850
},
{
"epoch": 1.5609243697478992,
"grad_norm": 19.375,
"learning_rate": 1.0720657276995306e-05,
"loss": 1.3619,
"step": 14860
},
{
"epoch": 1.5619747899159664,
"grad_norm": 1312.0,
"learning_rate": 1.0712832550860721e-05,
"loss": 1.1617,
"step": 14870
},
{
"epoch": 1.5630252100840336,
"grad_norm": 9.875,
"learning_rate": 1.0705007824726134e-05,
"loss": 1.1236,
"step": 14880
},
{
"epoch": 1.5640756302521008,
"grad_norm": 17.0,
"learning_rate": 1.069718309859155e-05,
"loss": 1.0404,
"step": 14890
},
{
"epoch": 1.565126050420168,
"grad_norm": 12.9375,
"learning_rate": 1.0689358372456963e-05,
"loss": 1.2872,
"step": 14900
},
{
"epoch": 1.5661764705882353,
"grad_norm": 12.75,
"learning_rate": 1.068153364632238e-05,
"loss": 1.3784,
"step": 14910
},
{
"epoch": 1.5672268907563025,
"grad_norm": 6.34375,
"learning_rate": 1.0673708920187796e-05,
"loss": 1.1165,
"step": 14920
},
{
"epoch": 1.5682773109243697,
"grad_norm": 22.625,
"learning_rate": 1.0665884194053209e-05,
"loss": 1.2642,
"step": 14930
},
{
"epoch": 1.569327731092437,
"grad_norm": 18.5,
"learning_rate": 1.0658059467918625e-05,
"loss": 1.3787,
"step": 14940
},
{
"epoch": 1.5703781512605042,
"grad_norm": 11.3125,
"learning_rate": 1.0650234741784038e-05,
"loss": 1.1048,
"step": 14950
},
{
"epoch": 1.5714285714285714,
"grad_norm": 20.75,
"learning_rate": 1.0642410015649454e-05,
"loss": 1.2615,
"step": 14960
},
{
"epoch": 1.5724789915966386,
"grad_norm": 19.875,
"learning_rate": 1.0634585289514867e-05,
"loss": 0.7653,
"step": 14970
},
{
"epoch": 1.5735294117647058,
"grad_norm": 17.875,
"learning_rate": 1.0626760563380282e-05,
"loss": 1.3239,
"step": 14980
},
{
"epoch": 1.574579831932773,
"grad_norm": 18.375,
"learning_rate": 1.0618935837245697e-05,
"loss": 1.7513,
"step": 14990
},
{
"epoch": 1.5756302521008403,
"grad_norm": 17.0,
"learning_rate": 1.0611111111111111e-05,
"loss": 1.4967,
"step": 15000
},
{
"epoch": 1.5766806722689075,
"grad_norm": 17.25,
"learning_rate": 1.0603286384976528e-05,
"loss": 1.2066,
"step": 15010
},
{
"epoch": 1.5777310924369747,
"grad_norm": 5.65625,
"learning_rate": 1.059546165884194e-05,
"loss": 1.2133,
"step": 15020
},
{
"epoch": 1.5787815126050422,
"grad_norm": 13.5625,
"learning_rate": 1.0587636932707357e-05,
"loss": 1.6947,
"step": 15030
},
{
"epoch": 1.5798319327731094,
"grad_norm": 12.5625,
"learning_rate": 1.057981220657277e-05,
"loss": 1.1693,
"step": 15040
},
{
"epoch": 1.5808823529411766,
"grad_norm": 18.75,
"learning_rate": 1.0571987480438186e-05,
"loss": 1.1638,
"step": 15050
},
{
"epoch": 1.5819327731092439,
"grad_norm": 13.1875,
"learning_rate": 1.05641627543036e-05,
"loss": 1.203,
"step": 15060
},
{
"epoch": 1.582983193277311,
"grad_norm": 12.5625,
"learning_rate": 1.0556338028169016e-05,
"loss": 1.6161,
"step": 15070
},
{
"epoch": 1.5840336134453783,
"grad_norm": 11.625,
"learning_rate": 1.0548513302034429e-05,
"loss": 1.8032,
"step": 15080
},
{
"epoch": 1.5850840336134455,
"grad_norm": 12.125,
"learning_rate": 1.0540688575899845e-05,
"loss": 1.3926,
"step": 15090
},
{
"epoch": 1.5861344537815127,
"grad_norm": 13.25,
"learning_rate": 1.0532863849765258e-05,
"loss": 1.7814,
"step": 15100
},
{
"epoch": 1.58718487394958,
"grad_norm": 13.4375,
"learning_rate": 1.0525039123630674e-05,
"loss": 1.0973,
"step": 15110
},
{
"epoch": 1.5882352941176472,
"grad_norm": 12.875,
"learning_rate": 1.0517214397496089e-05,
"loss": 1.2523,
"step": 15120
},
{
"epoch": 1.5892857142857144,
"grad_norm": 12.1875,
"learning_rate": 1.0509389671361504e-05,
"loss": 1.5806,
"step": 15130
},
{
"epoch": 1.5903361344537816,
"grad_norm": 5.53125,
"learning_rate": 1.0501564945226918e-05,
"loss": 1.2682,
"step": 15140
},
{
"epoch": 1.5913865546218489,
"grad_norm": 21.0,
"learning_rate": 1.0493740219092333e-05,
"loss": 1.1658,
"step": 15150
},
{
"epoch": 1.592436974789916,
"grad_norm": 9.9375,
"learning_rate": 1.0485915492957747e-05,
"loss": 1.0203,
"step": 15160
},
{
"epoch": 1.5934873949579833,
"grad_norm": 15.8125,
"learning_rate": 1.047809076682316e-05,
"loss": 1.3342,
"step": 15170
},
{
"epoch": 1.5945378151260505,
"grad_norm": 19.625,
"learning_rate": 1.0470266040688577e-05,
"loss": 1.0889,
"step": 15180
},
{
"epoch": 1.5955882352941178,
"grad_norm": 8.5625,
"learning_rate": 1.046244131455399e-05,
"loss": 1.4284,
"step": 15190
},
{
"epoch": 1.596638655462185,
"grad_norm": 24.375,
"learning_rate": 1.0454616588419406e-05,
"loss": 1.3882,
"step": 15200
},
{
"epoch": 1.5976890756302522,
"grad_norm": 14.9375,
"learning_rate": 1.0446791862284822e-05,
"loss": 1.2336,
"step": 15210
},
{
"epoch": 1.5987394957983194,
"grad_norm": 4.0,
"learning_rate": 1.0438967136150235e-05,
"loss": 0.8792,
"step": 15220
},
{
"epoch": 1.5997899159663866,
"grad_norm": 14.3125,
"learning_rate": 1.0431142410015652e-05,
"loss": 1.4848,
"step": 15230
},
{
"epoch": 1.6008403361344539,
"grad_norm": 16.625,
"learning_rate": 1.0423317683881065e-05,
"loss": 1.7036,
"step": 15240
},
{
"epoch": 1.601890756302521,
"grad_norm": 12.625,
"learning_rate": 1.0415492957746481e-05,
"loss": 1.0141,
"step": 15250
},
{
"epoch": 1.6029411764705883,
"grad_norm": 9.4375,
"learning_rate": 1.0407668231611894e-05,
"loss": 1.3178,
"step": 15260
},
{
"epoch": 1.6039915966386555,
"grad_norm": 5.28125,
"learning_rate": 1.0399843505477309e-05,
"loss": 0.9306,
"step": 15270
},
{
"epoch": 1.6050420168067228,
"grad_norm": 16.125,
"learning_rate": 1.0392018779342723e-05,
"loss": 1.1392,
"step": 15280
},
{
"epoch": 1.60609243697479,
"grad_norm": 8.5625,
"learning_rate": 1.0384194053208138e-05,
"loss": 0.8321,
"step": 15290
},
{
"epoch": 1.6071428571428572,
"grad_norm": 13.3125,
"learning_rate": 1.0376369327073554e-05,
"loss": 0.957,
"step": 15300
},
{
"epoch": 1.6081932773109244,
"grad_norm": 17.125,
"learning_rate": 1.0368544600938967e-05,
"loss": 1.4487,
"step": 15310
},
{
"epoch": 1.6092436974789917,
"grad_norm": 11.6875,
"learning_rate": 1.0360719874804384e-05,
"loss": 1.2441,
"step": 15320
},
{
"epoch": 1.6102941176470589,
"grad_norm": 16.0,
"learning_rate": 1.0352895148669797e-05,
"loss": 1.313,
"step": 15330
},
{
"epoch": 1.611344537815126,
"grad_norm": 5.96875,
"learning_rate": 1.0345070422535213e-05,
"loss": 1.4231,
"step": 15340
},
{
"epoch": 1.6123949579831933,
"grad_norm": 10.125,
"learning_rate": 1.0337245696400626e-05,
"loss": 1.2076,
"step": 15350
},
{
"epoch": 1.6134453781512605,
"grad_norm": 17.125,
"learning_rate": 1.0329420970266042e-05,
"loss": 1.6903,
"step": 15360
},
{
"epoch": 1.6144957983193278,
"grad_norm": 5.3125,
"learning_rate": 1.0321596244131455e-05,
"loss": 0.9224,
"step": 15370
},
{
"epoch": 1.615546218487395,
"grad_norm": 9.9375,
"learning_rate": 1.0313771517996872e-05,
"loss": 1.5575,
"step": 15380
},
{
"epoch": 1.6165966386554622,
"grad_norm": 10.25,
"learning_rate": 1.0305946791862285e-05,
"loss": 1.2729,
"step": 15390
},
{
"epoch": 1.6176470588235294,
"grad_norm": 9.0,
"learning_rate": 1.0298122065727701e-05,
"loss": 1.1269,
"step": 15400
},
{
"epoch": 1.6186974789915967,
"grad_norm": 17.75,
"learning_rate": 1.0290297339593116e-05,
"loss": 1.5302,
"step": 15410
},
{
"epoch": 1.6197478991596639,
"grad_norm": 13.75,
"learning_rate": 1.028247261345853e-05,
"loss": 1.2133,
"step": 15420
},
{
"epoch": 1.620798319327731,
"grad_norm": 5.625,
"learning_rate": 1.0274647887323945e-05,
"loss": 0.8847,
"step": 15430
},
{
"epoch": 1.6218487394957983,
"grad_norm": 20.0,
"learning_rate": 1.026682316118936e-05,
"loss": 1.1911,
"step": 15440
},
{
"epoch": 1.6228991596638656,
"grad_norm": 8.6875,
"learning_rate": 1.0258998435054774e-05,
"loss": 1.1295,
"step": 15450
},
{
"epoch": 1.6239495798319328,
"grad_norm": 21.0,
"learning_rate": 1.0251173708920189e-05,
"loss": 1.133,
"step": 15460
},
{
"epoch": 1.625,
"grad_norm": 11.1875,
"learning_rate": 1.0243348982785603e-05,
"loss": 2.0244,
"step": 15470
},
{
"epoch": 1.6260504201680672,
"grad_norm": 15.1875,
"learning_rate": 1.0235524256651016e-05,
"loss": 1.7165,
"step": 15480
},
{
"epoch": 1.6271008403361344,
"grad_norm": 6.0625,
"learning_rate": 1.0227699530516433e-05,
"loss": 1.1485,
"step": 15490
},
{
"epoch": 1.6281512605042017,
"grad_norm": 17.875,
"learning_rate": 1.0219874804381849e-05,
"loss": 0.9418,
"step": 15500
},
{
"epoch": 1.629201680672269,
"grad_norm": 17.875,
"learning_rate": 1.0212050078247262e-05,
"loss": 1.9413,
"step": 15510
},
{
"epoch": 1.6302521008403361,
"grad_norm": 9.5,
"learning_rate": 1.0204225352112678e-05,
"loss": 1.4225,
"step": 15520
},
{
"epoch": 1.6313025210084033,
"grad_norm": 14.0625,
"learning_rate": 1.0196400625978091e-05,
"loss": 1.3062,
"step": 15530
},
{
"epoch": 1.6323529411764706,
"grad_norm": 11.75,
"learning_rate": 1.0188575899843508e-05,
"loss": 1.0521,
"step": 15540
},
{
"epoch": 1.6334033613445378,
"grad_norm": 13.1875,
"learning_rate": 1.018075117370892e-05,
"loss": 1.7687,
"step": 15550
},
{
"epoch": 1.634453781512605,
"grad_norm": 15.875,
"learning_rate": 1.0172926447574335e-05,
"loss": 1.1349,
"step": 15560
},
{
"epoch": 1.6355042016806722,
"grad_norm": 23.5,
"learning_rate": 1.016510172143975e-05,
"loss": 1.6808,
"step": 15570
},
{
"epoch": 1.6365546218487395,
"grad_norm": 22.875,
"learning_rate": 1.0157276995305165e-05,
"loss": 1.6487,
"step": 15580
},
{
"epoch": 1.6376050420168067,
"grad_norm": 20.875,
"learning_rate": 1.014945226917058e-05,
"loss": 1.1881,
"step": 15590
},
{
"epoch": 1.638655462184874,
"grad_norm": 12.5625,
"learning_rate": 1.0141627543035994e-05,
"loss": 1.3333,
"step": 15600
},
{
"epoch": 1.6397058823529411,
"grad_norm": 19.125,
"learning_rate": 1.013380281690141e-05,
"loss": 1.3079,
"step": 15610
},
{
"epoch": 1.6407563025210083,
"grad_norm": 17.875,
"learning_rate": 1.0125978090766823e-05,
"loss": 1.3545,
"step": 15620
},
{
"epoch": 1.6418067226890756,
"grad_norm": 16.5,
"learning_rate": 1.011815336463224e-05,
"loss": 1.3019,
"step": 15630
},
{
"epoch": 1.6428571428571428,
"grad_norm": 15.25,
"learning_rate": 1.0110328638497653e-05,
"loss": 1.2696,
"step": 15640
},
{
"epoch": 1.64390756302521,
"grad_norm": 17.125,
"learning_rate": 1.0102503912363069e-05,
"loss": 1.5522,
"step": 15650
},
{
"epoch": 1.6449579831932772,
"grad_norm": 13.8125,
"learning_rate": 1.0094679186228482e-05,
"loss": 1.0805,
"step": 15660
},
{
"epoch": 1.6460084033613445,
"grad_norm": 11.875,
"learning_rate": 1.0086854460093898e-05,
"loss": 1.3991,
"step": 15670
},
{
"epoch": 1.6470588235294117,
"grad_norm": 14.125,
"learning_rate": 1.0079029733959311e-05,
"loss": 1.4535,
"step": 15680
},
{
"epoch": 1.648109243697479,
"grad_norm": 12.375,
"learning_rate": 1.0071205007824727e-05,
"loss": 1.3899,
"step": 15690
},
{
"epoch": 1.6491596638655461,
"grad_norm": 6.0625,
"learning_rate": 1.0063380281690142e-05,
"loss": 1.293,
"step": 15700
},
{
"epoch": 1.6502100840336134,
"grad_norm": 11.5,
"learning_rate": 1.0055555555555557e-05,
"loss": 1.3171,
"step": 15710
},
{
"epoch": 1.6512605042016806,
"grad_norm": 14.875,
"learning_rate": 1.0047730829420971e-05,
"loss": 1.7538,
"step": 15720
},
{
"epoch": 1.6523109243697478,
"grad_norm": 12.1875,
"learning_rate": 1.0039906103286386e-05,
"loss": 1.9343,
"step": 15730
},
{
"epoch": 1.653361344537815,
"grad_norm": 38.0,
"learning_rate": 1.00320813771518e-05,
"loss": 1.2496,
"step": 15740
},
{
"epoch": 1.6544117647058822,
"grad_norm": 5.5625,
"learning_rate": 1.0024256651017215e-05,
"loss": 1.0771,
"step": 15750
},
{
"epoch": 1.6554621848739495,
"grad_norm": 12.6875,
"learning_rate": 1.001643192488263e-05,
"loss": 1.5892,
"step": 15760
},
{
"epoch": 1.6565126050420167,
"grad_norm": 18.75,
"learning_rate": 1.0008607198748043e-05,
"loss": 0.826,
"step": 15770
},
{
"epoch": 1.657563025210084,
"grad_norm": 12.6875,
"learning_rate": 1.000078247261346e-05,
"loss": 1.4061,
"step": 15780
},
{
"epoch": 1.6586134453781511,
"grad_norm": 12.4375,
"learning_rate": 9.992957746478874e-06,
"loss": 0.9534,
"step": 15790
},
{
"epoch": 1.6596638655462184,
"grad_norm": 12.625,
"learning_rate": 9.985133020344289e-06,
"loss": 1.2486,
"step": 15800
},
{
"epoch": 1.6607142857142856,
"grad_norm": 16.75,
"learning_rate": 9.977308294209703e-06,
"loss": 1.5199,
"step": 15810
},
{
"epoch": 1.6617647058823528,
"grad_norm": 16.625,
"learning_rate": 9.969483568075118e-06,
"loss": 1.8439,
"step": 15820
},
{
"epoch": 1.66281512605042,
"grad_norm": 17.375,
"learning_rate": 9.961658841940533e-06,
"loss": 1.3802,
"step": 15830
},
{
"epoch": 1.6638655462184873,
"grad_norm": 5.4375,
"learning_rate": 9.953834115805949e-06,
"loss": 1.2319,
"step": 15840
},
{
"epoch": 1.6649159663865545,
"grad_norm": 11.0,
"learning_rate": 9.946009389671362e-06,
"loss": 1.0634,
"step": 15850
},
{
"epoch": 1.6659663865546217,
"grad_norm": 19.125,
"learning_rate": 9.938184663536777e-06,
"loss": 0.9874,
"step": 15860
},
{
"epoch": 1.667016806722689,
"grad_norm": 15.1875,
"learning_rate": 9.930359937402191e-06,
"loss": 1.1704,
"step": 15870
},
{
"epoch": 1.6680672268907561,
"grad_norm": 5.03125,
"learning_rate": 9.922535211267606e-06,
"loss": 1.3389,
"step": 15880
},
{
"epoch": 1.6691176470588234,
"grad_norm": 16.625,
"learning_rate": 9.91471048513302e-06,
"loss": 1.357,
"step": 15890
},
{
"epoch": 1.6701680672268906,
"grad_norm": 12.75,
"learning_rate": 9.906885758998435e-06,
"loss": 1.5127,
"step": 15900
},
{
"epoch": 1.6712184873949578,
"grad_norm": 12.3125,
"learning_rate": 9.89906103286385e-06,
"loss": 1.2742,
"step": 15910
},
{
"epoch": 1.6722689075630253,
"grad_norm": 12.1875,
"learning_rate": 9.891236306729264e-06,
"loss": 1.429,
"step": 15920
},
{
"epoch": 1.6733193277310925,
"grad_norm": 19.75,
"learning_rate": 9.883411580594679e-06,
"loss": 0.8065,
"step": 15930
},
{
"epoch": 1.6743697478991597,
"grad_norm": 16.5,
"learning_rate": 9.875586854460095e-06,
"loss": 1.213,
"step": 15940
},
{
"epoch": 1.675420168067227,
"grad_norm": 28.875,
"learning_rate": 9.86776212832551e-06,
"loss": 1.1768,
"step": 15950
},
{
"epoch": 1.6764705882352942,
"grad_norm": 13.75,
"learning_rate": 9.859937402190925e-06,
"loss": 1.6273,
"step": 15960
},
{
"epoch": 1.6775210084033614,
"grad_norm": 13.875,
"learning_rate": 9.85211267605634e-06,
"loss": 1.6125,
"step": 15970
},
{
"epoch": 1.6785714285714286,
"grad_norm": 12.6875,
"learning_rate": 9.844287949921754e-06,
"loss": 1.5806,
"step": 15980
},
{
"epoch": 1.6796218487394958,
"grad_norm": 12.375,
"learning_rate": 9.836463223787169e-06,
"loss": 1.8286,
"step": 15990
},
{
"epoch": 1.680672268907563,
"grad_norm": 13.9375,
"learning_rate": 9.828638497652583e-06,
"loss": 0.9962,
"step": 16000
},
{
"epoch": 1.6817226890756303,
"grad_norm": 13.1875,
"learning_rate": 9.820813771517998e-06,
"loss": 1.2894,
"step": 16010
},
{
"epoch": 1.6827731092436975,
"grad_norm": 10.125,
"learning_rate": 9.812989045383413e-06,
"loss": 1.0479,
"step": 16020
},
{
"epoch": 1.6838235294117647,
"grad_norm": 14.0625,
"learning_rate": 9.805164319248827e-06,
"loss": 1.313,
"step": 16030
},
{
"epoch": 1.684873949579832,
"grad_norm": 18.0,
"learning_rate": 9.797339593114242e-06,
"loss": 1.4493,
"step": 16040
},
{
"epoch": 1.6859243697478992,
"grad_norm": 20.125,
"learning_rate": 9.789514866979657e-06,
"loss": 1.5301,
"step": 16050
},
{
"epoch": 1.6869747899159664,
"grad_norm": 15.625,
"learning_rate": 9.781690140845071e-06,
"loss": 1.2022,
"step": 16060
},
{
"epoch": 1.6880252100840336,
"grad_norm": 13.0625,
"learning_rate": 9.773865414710486e-06,
"loss": 1.3654,
"step": 16070
},
{
"epoch": 1.6890756302521008,
"grad_norm": 15.6875,
"learning_rate": 9.7660406885759e-06,
"loss": 1.0885,
"step": 16080
},
{
"epoch": 1.690126050420168,
"grad_norm": 15.25,
"learning_rate": 9.758215962441315e-06,
"loss": 1.5678,
"step": 16090
},
{
"epoch": 1.6911764705882353,
"grad_norm": 19.875,
"learning_rate": 9.75039123630673e-06,
"loss": 1.2938,
"step": 16100
},
{
"epoch": 1.6922268907563025,
"grad_norm": 10.8125,
"learning_rate": 9.742566510172145e-06,
"loss": 1.2581,
"step": 16110
},
{
"epoch": 1.6932773109243697,
"grad_norm": 6.0,
"learning_rate": 9.73474178403756e-06,
"loss": 1.1328,
"step": 16120
},
{
"epoch": 1.694327731092437,
"grad_norm": 10.1875,
"learning_rate": 9.726917057902974e-06,
"loss": 1.191,
"step": 16130
},
{
"epoch": 1.6953781512605042,
"grad_norm": 22.0,
"learning_rate": 9.71909233176839e-06,
"loss": 1.1682,
"step": 16140
},
{
"epoch": 1.6964285714285714,
"grad_norm": 12.0625,
"learning_rate": 9.711267605633803e-06,
"loss": 1.4293,
"step": 16150
},
{
"epoch": 1.6974789915966386,
"grad_norm": 11.6875,
"learning_rate": 9.703442879499218e-06,
"loss": 1.5305,
"step": 16160
},
{
"epoch": 1.6985294117647058,
"grad_norm": 13.5625,
"learning_rate": 9.695618153364633e-06,
"loss": 1.1146,
"step": 16170
},
{
"epoch": 1.699579831932773,
"grad_norm": 6.1875,
"learning_rate": 9.687793427230047e-06,
"loss": 0.9118,
"step": 16180
},
{
"epoch": 1.7006302521008403,
"grad_norm": 13.6875,
"learning_rate": 9.679968701095462e-06,
"loss": 1.4518,
"step": 16190
},
{
"epoch": 1.7016806722689075,
"grad_norm": 19.5,
"learning_rate": 9.672143974960876e-06,
"loss": 1.4933,
"step": 16200
},
{
"epoch": 1.7027310924369747,
"grad_norm": 209.0,
"learning_rate": 9.664319248826291e-06,
"loss": 1.5073,
"step": 16210
},
{
"epoch": 1.7037815126050422,
"grad_norm": 12.0625,
"learning_rate": 9.656494522691706e-06,
"loss": 1.0963,
"step": 16220
},
{
"epoch": 1.7048319327731094,
"grad_norm": 11.875,
"learning_rate": 9.64866979655712e-06,
"loss": 1.6323,
"step": 16230
},
{
"epoch": 1.7058823529411766,
"grad_norm": 12.125,
"learning_rate": 9.640845070422537e-06,
"loss": 1.0694,
"step": 16240
},
{
"epoch": 1.7069327731092439,
"grad_norm": 24.375,
"learning_rate": 9.633020344287951e-06,
"loss": 1.269,
"step": 16250
},
{
"epoch": 1.707983193277311,
"grad_norm": 10.8125,
"learning_rate": 9.625195618153366e-06,
"loss": 1.628,
"step": 16260
},
{
"epoch": 1.7090336134453783,
"grad_norm": 17.625,
"learning_rate": 9.61737089201878e-06,
"loss": 1.8879,
"step": 16270
},
{
"epoch": 1.7100840336134455,
"grad_norm": 23.75,
"learning_rate": 9.609546165884195e-06,
"loss": 1.1923,
"step": 16280
},
{
"epoch": 1.7111344537815127,
"grad_norm": 17.125,
"learning_rate": 9.60172143974961e-06,
"loss": 1.2534,
"step": 16290
},
{
"epoch": 1.71218487394958,
"grad_norm": 8.4375,
"learning_rate": 9.593896713615025e-06,
"loss": 1.1554,
"step": 16300
},
{
"epoch": 1.7132352941176472,
"grad_norm": 13.0625,
"learning_rate": 9.58607198748044e-06,
"loss": 1.0627,
"step": 16310
},
{
"epoch": 1.7142857142857144,
"grad_norm": 10.8125,
"learning_rate": 9.578247261345854e-06,
"loss": 1.0352,
"step": 16320
},
{
"epoch": 1.7153361344537816,
"grad_norm": 10.0,
"learning_rate": 9.570422535211269e-06,
"loss": 0.916,
"step": 16330
},
{
"epoch": 1.7163865546218489,
"grad_norm": 14.875,
"learning_rate": 9.562597809076683e-06,
"loss": 1.0697,
"step": 16340
},
{
"epoch": 1.717436974789916,
"grad_norm": 11.5,
"learning_rate": 9.554773082942098e-06,
"loss": 1.1456,
"step": 16350
},
{
"epoch": 1.7184873949579833,
"grad_norm": 11.9375,
"learning_rate": 9.546948356807513e-06,
"loss": 1.2569,
"step": 16360
},
{
"epoch": 1.7195378151260505,
"grad_norm": 19.125,
"learning_rate": 9.539123630672927e-06,
"loss": 1.0225,
"step": 16370
},
{
"epoch": 1.7205882352941178,
"grad_norm": 6.65625,
"learning_rate": 9.531298904538342e-06,
"loss": 0.8316,
"step": 16380
},
{
"epoch": 1.721638655462185,
"grad_norm": 9.5,
"learning_rate": 9.523474178403757e-06,
"loss": 1.6279,
"step": 16390
},
{
"epoch": 1.7226890756302522,
"grad_norm": 20.625,
"learning_rate": 9.515649452269171e-06,
"loss": 1.3566,
"step": 16400
},
{
"epoch": 1.7237394957983194,
"grad_norm": 5.0625,
"learning_rate": 9.507824726134586e-06,
"loss": 1.3098,
"step": 16410
},
{
"epoch": 1.7247899159663866,
"grad_norm": 18.125,
"learning_rate": 9.5e-06,
"loss": 1.2304,
"step": 16420
},
{
"epoch": 1.7258403361344539,
"grad_norm": 16.75,
"learning_rate": 9.492175273865417e-06,
"loss": 1.2579,
"step": 16430
},
{
"epoch": 1.726890756302521,
"grad_norm": 12.75,
"learning_rate": 9.48435054773083e-06,
"loss": 1.2478,
"step": 16440
},
{
"epoch": 1.7279411764705883,
"grad_norm": 9.8125,
"learning_rate": 9.476525821596244e-06,
"loss": 1.3145,
"step": 16450
},
{
"epoch": 1.7289915966386555,
"grad_norm": 15.0,
"learning_rate": 9.468701095461659e-06,
"loss": 1.2866,
"step": 16460
},
{
"epoch": 1.7300420168067228,
"grad_norm": 13.4375,
"learning_rate": 9.460876369327074e-06,
"loss": 1.5644,
"step": 16470
},
{
"epoch": 1.73109243697479,
"grad_norm": 12.5,
"learning_rate": 9.453051643192488e-06,
"loss": 1.2161,
"step": 16480
},
{
"epoch": 1.7321428571428572,
"grad_norm": 18.875,
"learning_rate": 9.445226917057903e-06,
"loss": 1.2717,
"step": 16490
},
{
"epoch": 1.7331932773109244,
"grad_norm": 11.375,
"learning_rate": 9.437402190923318e-06,
"loss": 1.7148,
"step": 16500
},
{
"epoch": 1.7342436974789917,
"grad_norm": 18.375,
"learning_rate": 9.429577464788732e-06,
"loss": 1.5477,
"step": 16510
},
{
"epoch": 1.7352941176470589,
"grad_norm": 12.3125,
"learning_rate": 9.421752738654147e-06,
"loss": 1.2824,
"step": 16520
},
{
"epoch": 1.736344537815126,
"grad_norm": 18.125,
"learning_rate": 9.413928012519563e-06,
"loss": 1.3925,
"step": 16530
},
{
"epoch": 1.7373949579831933,
"grad_norm": 4.625,
"learning_rate": 9.406103286384978e-06,
"loss": 1.3081,
"step": 16540
},
{
"epoch": 1.7384453781512605,
"grad_norm": 17.5,
"learning_rate": 9.398278560250393e-06,
"loss": 1.089,
"step": 16550
},
{
"epoch": 1.7394957983193278,
"grad_norm": 15.75,
"learning_rate": 9.390453834115807e-06,
"loss": 0.8492,
"step": 16560
},
{
"epoch": 1.740546218487395,
"grad_norm": 203.0,
"learning_rate": 9.382629107981222e-06,
"loss": 1.4314,
"step": 16570
},
{
"epoch": 1.7415966386554622,
"grad_norm": 18.25,
"learning_rate": 9.374804381846637e-06,
"loss": 1.9853,
"step": 16580
},
{
"epoch": 1.7426470588235294,
"grad_norm": 21.75,
"learning_rate": 9.366979655712051e-06,
"loss": 1.6503,
"step": 16590
},
{
"epoch": 1.7436974789915967,
"grad_norm": 9.1875,
"learning_rate": 9.359154929577466e-06,
"loss": 1.2783,
"step": 16600
},
{
"epoch": 1.7447478991596639,
"grad_norm": 12.6875,
"learning_rate": 9.35133020344288e-06,
"loss": 1.5263,
"step": 16610
},
{
"epoch": 1.745798319327731,
"grad_norm": 15.375,
"learning_rate": 9.343505477308295e-06,
"loss": 1.5407,
"step": 16620
},
{
"epoch": 1.7468487394957983,
"grad_norm": 19.75,
"learning_rate": 9.33568075117371e-06,
"loss": 1.0901,
"step": 16630
},
{
"epoch": 1.7478991596638656,
"grad_norm": 14.3125,
"learning_rate": 9.327856025039125e-06,
"loss": 1.2885,
"step": 16640
},
{
"epoch": 1.7489495798319328,
"grad_norm": 12.625,
"learning_rate": 9.32003129890454e-06,
"loss": 1.5592,
"step": 16650
},
{
"epoch": 1.75,
"grad_norm": 8.75,
"learning_rate": 9.312206572769954e-06,
"loss": 1.2404,
"step": 16660
},
{
"epoch": 1.7510504201680672,
"grad_norm": 13.0,
"learning_rate": 9.304381846635369e-06,
"loss": 1.0363,
"step": 16670
},
{
"epoch": 1.7521008403361344,
"grad_norm": 12.25,
"learning_rate": 9.296557120500783e-06,
"loss": 1.1992,
"step": 16680
},
{
"epoch": 1.7531512605042017,
"grad_norm": 14.0,
"learning_rate": 9.288732394366198e-06,
"loss": 1.1861,
"step": 16690
},
{
"epoch": 1.754201680672269,
"grad_norm": 19.125,
"learning_rate": 9.280907668231612e-06,
"loss": 1.2102,
"step": 16700
},
{
"epoch": 1.7552521008403361,
"grad_norm": 17.75,
"learning_rate": 9.273082942097027e-06,
"loss": 1.7202,
"step": 16710
},
{
"epoch": 1.7563025210084033,
"grad_norm": 17.25,
"learning_rate": 9.265258215962442e-06,
"loss": 1.3753,
"step": 16720
},
{
"epoch": 1.7573529411764706,
"grad_norm": 17.625,
"learning_rate": 9.257433489827856e-06,
"loss": 1.3309,
"step": 16730
},
{
"epoch": 1.7584033613445378,
"grad_norm": 31.0,
"learning_rate": 9.249608763693271e-06,
"loss": 1.8675,
"step": 16740
},
{
"epoch": 1.759453781512605,
"grad_norm": 14.375,
"learning_rate": 9.241784037558686e-06,
"loss": 0.9695,
"step": 16750
},
{
"epoch": 1.7605042016806722,
"grad_norm": 9.3125,
"learning_rate": 9.2339593114241e-06,
"loss": 1.5124,
"step": 16760
},
{
"epoch": 1.7615546218487395,
"grad_norm": 25.75,
"learning_rate": 9.226134585289515e-06,
"loss": 1.2201,
"step": 16770
},
{
"epoch": 1.7626050420168067,
"grad_norm": 6.375,
"learning_rate": 9.21830985915493e-06,
"loss": 1.5277,
"step": 16780
},
{
"epoch": 1.763655462184874,
"grad_norm": 14.5,
"learning_rate": 9.210485133020344e-06,
"loss": 0.9598,
"step": 16790
},
{
"epoch": 1.7647058823529411,
"grad_norm": 13.375,
"learning_rate": 9.202660406885759e-06,
"loss": 1.4482,
"step": 16800
},
{
"epoch": 1.7657563025210083,
"grad_norm": 16.875,
"learning_rate": 9.194835680751174e-06,
"loss": 0.9516,
"step": 16810
},
{
"epoch": 1.7668067226890756,
"grad_norm": 16.375,
"learning_rate": 9.187010954616588e-06,
"loss": 1.4101,
"step": 16820
},
{
"epoch": 1.7678571428571428,
"grad_norm": 12.125,
"learning_rate": 9.179186228482005e-06,
"loss": 1.6945,
"step": 16830
},
{
"epoch": 1.76890756302521,
"grad_norm": 5.8125,
"learning_rate": 9.17136150234742e-06,
"loss": 1.4463,
"step": 16840
},
{
"epoch": 1.7699579831932772,
"grad_norm": 2.46875,
"learning_rate": 9.163536776212834e-06,
"loss": 0.9211,
"step": 16850
},
{
"epoch": 1.7710084033613445,
"grad_norm": 10.0,
"learning_rate": 9.155712050078249e-06,
"loss": 1.7824,
"step": 16860
},
{
"epoch": 1.7720588235294117,
"grad_norm": 13.1875,
"learning_rate": 9.147887323943663e-06,
"loss": 1.5375,
"step": 16870
},
{
"epoch": 1.773109243697479,
"grad_norm": 4.03125,
"learning_rate": 9.140062597809078e-06,
"loss": 1.0609,
"step": 16880
},
{
"epoch": 1.7741596638655461,
"grad_norm": 17.25,
"learning_rate": 9.132237871674493e-06,
"loss": 1.4122,
"step": 16890
},
{
"epoch": 1.7752100840336134,
"grad_norm": 13.0625,
"learning_rate": 9.124413145539907e-06,
"loss": 1.4912,
"step": 16900
},
{
"epoch": 1.7762605042016806,
"grad_norm": 14.625,
"learning_rate": 9.116588419405322e-06,
"loss": 1.3883,
"step": 16910
},
{
"epoch": 1.7773109243697478,
"grad_norm": 12.75,
"learning_rate": 9.108763693270737e-06,
"loss": 1.2688,
"step": 16920
},
{
"epoch": 1.778361344537815,
"grad_norm": 11.625,
"learning_rate": 9.100938967136151e-06,
"loss": 1.1332,
"step": 16930
},
{
"epoch": 1.7794117647058822,
"grad_norm": 5.4375,
"learning_rate": 9.093114241001566e-06,
"loss": 1.1757,
"step": 16940
},
{
"epoch": 1.7804621848739495,
"grad_norm": 15.25,
"learning_rate": 9.08528951486698e-06,
"loss": 1.7292,
"step": 16950
},
{
"epoch": 1.7815126050420167,
"grad_norm": 12.3125,
"learning_rate": 9.077464788732395e-06,
"loss": 1.2453,
"step": 16960
},
{
"epoch": 1.782563025210084,
"grad_norm": 11.5625,
"learning_rate": 9.06964006259781e-06,
"loss": 1.3372,
"step": 16970
},
{
"epoch": 1.7836134453781511,
"grad_norm": 17.875,
"learning_rate": 9.061815336463224e-06,
"loss": 1.1219,
"step": 16980
},
{
"epoch": 1.7846638655462184,
"grad_norm": 10.6875,
"learning_rate": 9.053990610328639e-06,
"loss": 1.6594,
"step": 16990
},
{
"epoch": 1.7857142857142856,
"grad_norm": 3.5625,
"learning_rate": 9.046165884194054e-06,
"loss": 0.88,
"step": 17000
},
{
"epoch": 1.7867647058823528,
"grad_norm": 12.25,
"learning_rate": 9.038341158059468e-06,
"loss": 0.9889,
"step": 17010
},
{
"epoch": 1.78781512605042,
"grad_norm": 13.0,
"learning_rate": 9.030516431924883e-06,
"loss": 1.3583,
"step": 17020
},
{
"epoch": 1.7888655462184873,
"grad_norm": 14.625,
"learning_rate": 9.022691705790298e-06,
"loss": 1.5652,
"step": 17030
},
{
"epoch": 1.7899159663865545,
"grad_norm": 16.25,
"learning_rate": 9.014866979655712e-06,
"loss": 1.448,
"step": 17040
},
{
"epoch": 1.7909663865546217,
"grad_norm": 21.25,
"learning_rate": 9.007042253521127e-06,
"loss": 1.4419,
"step": 17050
},
{
"epoch": 1.792016806722689,
"grad_norm": 13.5,
"learning_rate": 8.999217527386542e-06,
"loss": 1.3056,
"step": 17060
},
{
"epoch": 1.7930672268907561,
"grad_norm": 13.8125,
"learning_rate": 8.991392801251956e-06,
"loss": 1.2497,
"step": 17070
},
{
"epoch": 1.7941176470588234,
"grad_norm": 19.75,
"learning_rate": 8.983568075117371e-06,
"loss": 1.1989,
"step": 17080
},
{
"epoch": 1.7951680672268906,
"grad_norm": 25.25,
"learning_rate": 8.975743348982786e-06,
"loss": 1.2195,
"step": 17090
},
{
"epoch": 1.7962184873949578,
"grad_norm": 14.3125,
"learning_rate": 8.9679186228482e-06,
"loss": 1.4881,
"step": 17100
},
{
"epoch": 1.7972689075630253,
"grad_norm": 5.9375,
"learning_rate": 8.960093896713615e-06,
"loss": 0.9233,
"step": 17110
},
{
"epoch": 1.7983193277310925,
"grad_norm": 9.3125,
"learning_rate": 8.952269170579031e-06,
"loss": 1.3991,
"step": 17120
},
{
"epoch": 1.7993697478991597,
"grad_norm": 16.125,
"learning_rate": 8.944444444444446e-06,
"loss": 1.5303,
"step": 17130
},
{
"epoch": 1.800420168067227,
"grad_norm": 10.125,
"learning_rate": 8.93661971830986e-06,
"loss": 1.3232,
"step": 17140
},
{
"epoch": 1.8014705882352942,
"grad_norm": 12.375,
"learning_rate": 8.928794992175275e-06,
"loss": 1.0356,
"step": 17150
},
{
"epoch": 1.8025210084033614,
"grad_norm": 12.625,
"learning_rate": 8.92097026604069e-06,
"loss": 1.1945,
"step": 17160
},
{
"epoch": 1.8035714285714286,
"grad_norm": 15.25,
"learning_rate": 8.913145539906105e-06,
"loss": 1.4963,
"step": 17170
},
{
"epoch": 1.8046218487394958,
"grad_norm": 11.9375,
"learning_rate": 8.90532081377152e-06,
"loss": 0.9988,
"step": 17180
},
{
"epoch": 1.805672268907563,
"grad_norm": 17.75,
"learning_rate": 8.897496087636934e-06,
"loss": 1.3638,
"step": 17190
},
{
"epoch": 1.8067226890756303,
"grad_norm": 4.40625,
"learning_rate": 8.889671361502349e-06,
"loss": 1.2046,
"step": 17200
},
{
"epoch": 1.8077731092436975,
"grad_norm": 20.625,
"learning_rate": 8.881846635367763e-06,
"loss": 1.053,
"step": 17210
},
{
"epoch": 1.8088235294117647,
"grad_norm": 11.6875,
"learning_rate": 8.874021909233178e-06,
"loss": 1.6096,
"step": 17220
},
{
"epoch": 1.809873949579832,
"grad_norm": 9.875,
"learning_rate": 8.866197183098592e-06,
"loss": 1.5234,
"step": 17230
},
{
"epoch": 1.8109243697478992,
"grad_norm": 13.625,
"learning_rate": 8.858372456964007e-06,
"loss": 1.3558,
"step": 17240
},
{
"epoch": 1.8119747899159664,
"grad_norm": 14.625,
"learning_rate": 8.850547730829422e-06,
"loss": 1.3757,
"step": 17250
},
{
"epoch": 1.8130252100840336,
"grad_norm": 11.9375,
"learning_rate": 8.842723004694836e-06,
"loss": 1.2969,
"step": 17260
},
{
"epoch": 1.8140756302521008,
"grad_norm": 33.25,
"learning_rate": 8.834898278560251e-06,
"loss": 1.258,
"step": 17270
},
{
"epoch": 1.815126050420168,
"grad_norm": 10.9375,
"learning_rate": 8.827073552425666e-06,
"loss": 1.0944,
"step": 17280
},
{
"epoch": 1.8161764705882353,
"grad_norm": 12.3125,
"learning_rate": 8.81924882629108e-06,
"loss": 1.3661,
"step": 17290
},
{
"epoch": 1.8172268907563025,
"grad_norm": 37.75,
"learning_rate": 8.811424100156495e-06,
"loss": 1.1805,
"step": 17300
},
{
"epoch": 1.8182773109243697,
"grad_norm": 16.0,
"learning_rate": 8.80359937402191e-06,
"loss": 1.4742,
"step": 17310
},
{
"epoch": 1.819327731092437,
"grad_norm": 5.0625,
"learning_rate": 8.795774647887324e-06,
"loss": 1.4898,
"step": 17320
},
{
"epoch": 1.8203781512605042,
"grad_norm": 5.3125,
"learning_rate": 8.787949921752739e-06,
"loss": 1.2318,
"step": 17330
},
{
"epoch": 1.8214285714285714,
"grad_norm": 14.875,
"learning_rate": 8.780125195618154e-06,
"loss": 1.2514,
"step": 17340
},
{
"epoch": 1.8224789915966386,
"grad_norm": 3.25,
"learning_rate": 8.772300469483568e-06,
"loss": 1.0916,
"step": 17350
},
{
"epoch": 1.8235294117647058,
"grad_norm": 8.8125,
"learning_rate": 8.764475743348983e-06,
"loss": 1.5622,
"step": 17360
},
{
"epoch": 1.824579831932773,
"grad_norm": 11.9375,
"learning_rate": 8.756651017214398e-06,
"loss": 1.338,
"step": 17370
},
{
"epoch": 1.8256302521008403,
"grad_norm": 17.625,
"learning_rate": 8.748826291079812e-06,
"loss": 1.2941,
"step": 17380
},
{
"epoch": 1.8266806722689075,
"grad_norm": 17.875,
"learning_rate": 8.741001564945227e-06,
"loss": 1.303,
"step": 17390
},
{
"epoch": 1.8277310924369747,
"grad_norm": 14.3125,
"learning_rate": 8.733176838810642e-06,
"loss": 1.5946,
"step": 17400
},
{
"epoch": 1.8287815126050422,
"grad_norm": 17.375,
"learning_rate": 8.725352112676056e-06,
"loss": 1.3532,
"step": 17410
},
{
"epoch": 1.8298319327731094,
"grad_norm": 16.25,
"learning_rate": 8.717527386541473e-06,
"loss": 0.8938,
"step": 17420
},
{
"epoch": 1.8308823529411766,
"grad_norm": 6.21875,
"learning_rate": 8.709702660406887e-06,
"loss": 0.9492,
"step": 17430
},
{
"epoch": 1.8319327731092439,
"grad_norm": 8.8125,
"learning_rate": 8.701877934272302e-06,
"loss": 1.4988,
"step": 17440
},
{
"epoch": 1.832983193277311,
"grad_norm": 15.6875,
"learning_rate": 8.694053208137717e-06,
"loss": 1.0669,
"step": 17450
},
{
"epoch": 1.8340336134453783,
"grad_norm": 18.0,
"learning_rate": 8.686228482003131e-06,
"loss": 1.4973,
"step": 17460
},
{
"epoch": 1.8350840336134455,
"grad_norm": 9.9375,
"learning_rate": 8.678403755868546e-06,
"loss": 0.8783,
"step": 17470
},
{
"epoch": 1.8361344537815127,
"grad_norm": 14.375,
"learning_rate": 8.67057902973396e-06,
"loss": 1.4758,
"step": 17480
},
{
"epoch": 1.83718487394958,
"grad_norm": 18.875,
"learning_rate": 8.662754303599375e-06,
"loss": 1.4096,
"step": 17490
},
{
"epoch": 1.8382352941176472,
"grad_norm": 18.5,
"learning_rate": 8.65492957746479e-06,
"loss": 1.1369,
"step": 17500
},
{
"epoch": 1.8392857142857144,
"grad_norm": 4.84375,
"learning_rate": 8.647104851330203e-06,
"loss": 1.0654,
"step": 17510
},
{
"epoch": 1.8403361344537816,
"grad_norm": 20.375,
"learning_rate": 8.639280125195619e-06,
"loss": 1.3115,
"step": 17520
},
{
"epoch": 1.8413865546218489,
"grad_norm": 27.5,
"learning_rate": 8.631455399061034e-06,
"loss": 1.3379,
"step": 17530
},
{
"epoch": 1.842436974789916,
"grad_norm": 11.6875,
"learning_rate": 8.623630672926448e-06,
"loss": 1.1143,
"step": 17540
},
{
"epoch": 1.8434873949579833,
"grad_norm": 10.9375,
"learning_rate": 8.615805946791863e-06,
"loss": 1.3278,
"step": 17550
},
{
"epoch": 1.8445378151260505,
"grad_norm": 9.1875,
"learning_rate": 8.607981220657278e-06,
"loss": 1.3111,
"step": 17560
},
{
"epoch": 1.8455882352941178,
"grad_norm": 23.5,
"learning_rate": 8.600156494522692e-06,
"loss": 1.5519,
"step": 17570
},
{
"epoch": 1.846638655462185,
"grad_norm": 15.875,
"learning_rate": 8.592331768388107e-06,
"loss": 1.4849,
"step": 17580
},
{
"epoch": 1.8476890756302522,
"grad_norm": 8.0,
"learning_rate": 8.584507042253522e-06,
"loss": 0.8678,
"step": 17590
},
{
"epoch": 1.8487394957983194,
"grad_norm": 14.1875,
"learning_rate": 8.576682316118936e-06,
"loss": 1.3236,
"step": 17600
},
{
"epoch": 1.8497899159663866,
"grad_norm": 12.8125,
"learning_rate": 8.568857589984351e-06,
"loss": 1.2436,
"step": 17610
},
{
"epoch": 1.8508403361344539,
"grad_norm": 16.625,
"learning_rate": 8.561032863849766e-06,
"loss": 1.1921,
"step": 17620
},
{
"epoch": 1.851890756302521,
"grad_norm": 5.625,
"learning_rate": 8.55320813771518e-06,
"loss": 1.3603,
"step": 17630
},
{
"epoch": 1.8529411764705883,
"grad_norm": 10.6875,
"learning_rate": 8.545383411580595e-06,
"loss": 1.0873,
"step": 17640
},
{
"epoch": 1.8539915966386555,
"grad_norm": 16.875,
"learning_rate": 8.53755868544601e-06,
"loss": 1.4378,
"step": 17650
},
{
"epoch": 1.8550420168067228,
"grad_norm": 9.9375,
"learning_rate": 8.529733959311424e-06,
"loss": 1.4792,
"step": 17660
},
{
"epoch": 1.85609243697479,
"grad_norm": 11.75,
"learning_rate": 8.521909233176839e-06,
"loss": 1.8806,
"step": 17670
},
{
"epoch": 1.8571428571428572,
"grad_norm": 12.5625,
"learning_rate": 8.514084507042254e-06,
"loss": 1.3804,
"step": 17680
},
{
"epoch": 1.8581932773109244,
"grad_norm": 16.75,
"learning_rate": 8.506259780907668e-06,
"loss": 1.1978,
"step": 17690
},
{
"epoch": 1.8592436974789917,
"grad_norm": 12.0,
"learning_rate": 8.498435054773083e-06,
"loss": 0.841,
"step": 17700
},
{
"epoch": 1.8602941176470589,
"grad_norm": 5.375,
"learning_rate": 8.4906103286385e-06,
"loss": 1.0246,
"step": 17710
},
{
"epoch": 1.861344537815126,
"grad_norm": 16.75,
"learning_rate": 8.482785602503914e-06,
"loss": 1.5822,
"step": 17720
},
{
"epoch": 1.8623949579831933,
"grad_norm": 16.5,
"learning_rate": 8.474960876369329e-06,
"loss": 1.3041,
"step": 17730
},
{
"epoch": 1.8634453781512605,
"grad_norm": 13.375,
"learning_rate": 8.467136150234743e-06,
"loss": 1.4525,
"step": 17740
},
{
"epoch": 1.8644957983193278,
"grad_norm": 12.625,
"learning_rate": 8.459311424100158e-06,
"loss": 1.4399,
"step": 17750
},
{
"epoch": 1.865546218487395,
"grad_norm": 16.25,
"learning_rate": 8.451486697965572e-06,
"loss": 1.3268,
"step": 17760
},
{
"epoch": 1.8665966386554622,
"grad_norm": 14.6875,
"learning_rate": 8.443661971830987e-06,
"loss": 1.6509,
"step": 17770
},
{
"epoch": 1.8676470588235294,
"grad_norm": 17.625,
"learning_rate": 8.435837245696402e-06,
"loss": 1.6677,
"step": 17780
},
{
"epoch": 1.8686974789915967,
"grad_norm": 14.25,
"learning_rate": 8.428012519561816e-06,
"loss": 1.2302,
"step": 17790
},
{
"epoch": 1.8697478991596639,
"grad_norm": 4.46875,
"learning_rate": 8.420187793427231e-06,
"loss": 1.0689,
"step": 17800
},
{
"epoch": 1.870798319327731,
"grad_norm": 5.09375,
"learning_rate": 8.412363067292646e-06,
"loss": 1.1844,
"step": 17810
},
{
"epoch": 1.8718487394957983,
"grad_norm": 13.25,
"learning_rate": 8.40453834115806e-06,
"loss": 1.5129,
"step": 17820
},
{
"epoch": 1.8728991596638656,
"grad_norm": 15.125,
"learning_rate": 8.396713615023475e-06,
"loss": 1.363,
"step": 17830
},
{
"epoch": 1.8739495798319328,
"grad_norm": 16.875,
"learning_rate": 8.38888888888889e-06,
"loss": 1.2641,
"step": 17840
},
{
"epoch": 1.875,
"grad_norm": 12.5,
"learning_rate": 8.381064162754304e-06,
"loss": 1.599,
"step": 17850
},
{
"epoch": 1.8760504201680672,
"grad_norm": 4.25,
"learning_rate": 8.373239436619719e-06,
"loss": 1.3407,
"step": 17860
},
{
"epoch": 1.8771008403361344,
"grad_norm": 11.5,
"learning_rate": 8.365414710485134e-06,
"loss": 0.8743,
"step": 17870
},
{
"epoch": 1.8781512605042017,
"grad_norm": 12.4375,
"learning_rate": 8.357589984350548e-06,
"loss": 1.4704,
"step": 17880
},
{
"epoch": 1.879201680672269,
"grad_norm": 8.5,
"learning_rate": 8.349765258215963e-06,
"loss": 1.2385,
"step": 17890
},
{
"epoch": 1.8802521008403361,
"grad_norm": 13.0625,
"learning_rate": 8.341940532081378e-06,
"loss": 0.778,
"step": 17900
},
{
"epoch": 1.8813025210084033,
"grad_norm": 11.3125,
"learning_rate": 8.334115805946792e-06,
"loss": 1.3938,
"step": 17910
},
{
"epoch": 1.8823529411764706,
"grad_norm": 15.4375,
"learning_rate": 8.326291079812207e-06,
"loss": 1.684,
"step": 17920
},
{
"epoch": 1.8834033613445378,
"grad_norm": 18.25,
"learning_rate": 8.318466353677622e-06,
"loss": 1.6561,
"step": 17930
},
{
"epoch": 1.884453781512605,
"grad_norm": 17.0,
"learning_rate": 8.310641627543036e-06,
"loss": 1.5292,
"step": 17940
},
{
"epoch": 1.8855042016806722,
"grad_norm": 10.1875,
"learning_rate": 8.302816901408451e-06,
"loss": 1.2049,
"step": 17950
},
{
"epoch": 1.8865546218487395,
"grad_norm": 7.375,
"learning_rate": 8.294992175273866e-06,
"loss": 1.2616,
"step": 17960
},
{
"epoch": 1.8876050420168067,
"grad_norm": 11.0,
"learning_rate": 8.28716744913928e-06,
"loss": 0.9736,
"step": 17970
},
{
"epoch": 1.888655462184874,
"grad_norm": 18.0,
"learning_rate": 8.279342723004695e-06,
"loss": 1.194,
"step": 17980
},
{
"epoch": 1.8897058823529411,
"grad_norm": 16.125,
"learning_rate": 8.27151799687011e-06,
"loss": 1.3858,
"step": 17990
},
{
"epoch": 1.8907563025210083,
"grad_norm": 13.6875,
"learning_rate": 8.263693270735524e-06,
"loss": 0.965,
"step": 18000
},
{
"epoch": 1.8918067226890756,
"grad_norm": 11.75,
"learning_rate": 8.25586854460094e-06,
"loss": 1.2845,
"step": 18010
},
{
"epoch": 1.8928571428571428,
"grad_norm": 13.5,
"learning_rate": 8.248043818466355e-06,
"loss": 1.0943,
"step": 18020
},
{
"epoch": 1.89390756302521,
"grad_norm": 16.375,
"learning_rate": 8.24021909233177e-06,
"loss": 1.4654,
"step": 18030
},
{
"epoch": 1.8949579831932772,
"grad_norm": 15.3125,
"learning_rate": 8.232394366197184e-06,
"loss": 1.6451,
"step": 18040
},
{
"epoch": 1.8960084033613445,
"grad_norm": 11.8125,
"learning_rate": 8.224569640062599e-06,
"loss": 1.5634,
"step": 18050
},
{
"epoch": 1.8970588235294117,
"grad_norm": 6.625,
"learning_rate": 8.216744913928014e-06,
"loss": 1.3746,
"step": 18060
},
{
"epoch": 1.898109243697479,
"grad_norm": 5.40625,
"learning_rate": 8.208920187793428e-06,
"loss": 0.9587,
"step": 18070
},
{
"epoch": 1.8991596638655461,
"grad_norm": 25.875,
"learning_rate": 8.201095461658843e-06,
"loss": 1.6194,
"step": 18080
},
{
"epoch": 1.9002100840336134,
"grad_norm": 3.28125,
"learning_rate": 8.193270735524258e-06,
"loss": 1.1389,
"step": 18090
},
{
"epoch": 1.9012605042016806,
"grad_norm": 4.4375,
"learning_rate": 8.18544600938967e-06,
"loss": 0.8685,
"step": 18100
},
{
"epoch": 1.9023109243697478,
"grad_norm": 18.875,
"learning_rate": 8.177621283255087e-06,
"loss": 1.51,
"step": 18110
},
{
"epoch": 1.903361344537815,
"grad_norm": 24.125,
"learning_rate": 8.169796557120502e-06,
"loss": 1.5197,
"step": 18120
},
{
"epoch": 1.9044117647058822,
"grad_norm": 12.3125,
"learning_rate": 8.161971830985916e-06,
"loss": 1.4569,
"step": 18130
},
{
"epoch": 1.9054621848739495,
"grad_norm": 4.65625,
"learning_rate": 8.154147104851331e-06,
"loss": 1.1837,
"step": 18140
},
{
"epoch": 1.9065126050420167,
"grad_norm": 3.765625,
"learning_rate": 8.146322378716746e-06,
"loss": 1.6231,
"step": 18150
},
{
"epoch": 1.907563025210084,
"grad_norm": 10.3125,
"learning_rate": 8.13849765258216e-06,
"loss": 1.3244,
"step": 18160
},
{
"epoch": 1.9086134453781511,
"grad_norm": 12.4375,
"learning_rate": 8.130672926447575e-06,
"loss": 1.1799,
"step": 18170
},
{
"epoch": 1.9096638655462184,
"grad_norm": 13.25,
"learning_rate": 8.12284820031299e-06,
"loss": 1.0951,
"step": 18180
},
{
"epoch": 1.9107142857142856,
"grad_norm": 4.96875,
"learning_rate": 8.115023474178404e-06,
"loss": 1.2038,
"step": 18190
},
{
"epoch": 1.9117647058823528,
"grad_norm": 14.5625,
"learning_rate": 8.107198748043819e-06,
"loss": 1.514,
"step": 18200
},
{
"epoch": 1.91281512605042,
"grad_norm": 15.25,
"learning_rate": 8.099374021909234e-06,
"loss": 1.1104,
"step": 18210
},
{
"epoch": 1.9138655462184873,
"grad_norm": 15.9375,
"learning_rate": 8.091549295774648e-06,
"loss": 1.7082,
"step": 18220
},
{
"epoch": 1.9149159663865545,
"grad_norm": 13.375,
"learning_rate": 8.083724569640063e-06,
"loss": 0.8448,
"step": 18230
},
{
"epoch": 1.9159663865546217,
"grad_norm": 3.328125,
"learning_rate": 8.075899843505478e-06,
"loss": 1.286,
"step": 18240
},
{
"epoch": 1.917016806722689,
"grad_norm": 11.125,
"learning_rate": 8.068075117370892e-06,
"loss": 1.6202,
"step": 18250
},
{
"epoch": 1.9180672268907561,
"grad_norm": 5.0,
"learning_rate": 8.060250391236307e-06,
"loss": 1.2555,
"step": 18260
},
{
"epoch": 1.9191176470588234,
"grad_norm": 11.5,
"learning_rate": 8.052425665101721e-06,
"loss": 1.323,
"step": 18270
},
{
"epoch": 1.9201680672268906,
"grad_norm": 11.5625,
"learning_rate": 8.044600938967136e-06,
"loss": 1.8193,
"step": 18280
},
{
"epoch": 1.9212184873949578,
"grad_norm": 12.0625,
"learning_rate": 8.03677621283255e-06,
"loss": 1.487,
"step": 18290
},
{
"epoch": 1.9222689075630253,
"grad_norm": 12.25,
"learning_rate": 8.028951486697967e-06,
"loss": 1.0669,
"step": 18300
},
{
"epoch": 1.9233193277310925,
"grad_norm": 16.25,
"learning_rate": 8.021126760563382e-06,
"loss": 1.2489,
"step": 18310
},
{
"epoch": 1.9243697478991597,
"grad_norm": 15.75,
"learning_rate": 8.013302034428796e-06,
"loss": 1.2696,
"step": 18320
},
{
"epoch": 1.925420168067227,
"grad_norm": 17.625,
"learning_rate": 8.005477308294211e-06,
"loss": 1.5837,
"step": 18330
},
{
"epoch": 1.9264705882352942,
"grad_norm": 4.9375,
"learning_rate": 7.997652582159626e-06,
"loss": 1.2175,
"step": 18340
},
{
"epoch": 1.9275210084033614,
"grad_norm": 19.75,
"learning_rate": 7.98982785602504e-06,
"loss": 1.5983,
"step": 18350
},
{
"epoch": 1.9285714285714286,
"grad_norm": 13.125,
"learning_rate": 7.982003129890455e-06,
"loss": 1.6932,
"step": 18360
},
{
"epoch": 1.9296218487394958,
"grad_norm": 12.875,
"learning_rate": 7.97417840375587e-06,
"loss": 1.0066,
"step": 18370
},
{
"epoch": 1.930672268907563,
"grad_norm": 9.25,
"learning_rate": 7.966353677621284e-06,
"loss": 1.3433,
"step": 18380
},
{
"epoch": 1.9317226890756303,
"grad_norm": 15.5,
"learning_rate": 7.958528951486697e-06,
"loss": 1.2296,
"step": 18390
},
{
"epoch": 1.9327731092436975,
"grad_norm": 11.375,
"learning_rate": 7.950704225352114e-06,
"loss": 0.9645,
"step": 18400
},
{
"epoch": 1.9338235294117647,
"grad_norm": 19.125,
"learning_rate": 7.942879499217528e-06,
"loss": 1.1675,
"step": 18410
},
{
"epoch": 1.934873949579832,
"grad_norm": 16.25,
"learning_rate": 7.935054773082943e-06,
"loss": 1.2545,
"step": 18420
},
{
"epoch": 1.9359243697478992,
"grad_norm": 6.40625,
"learning_rate": 7.927230046948358e-06,
"loss": 1.3921,
"step": 18430
},
{
"epoch": 1.9369747899159664,
"grad_norm": 4.5625,
"learning_rate": 7.919405320813772e-06,
"loss": 1.333,
"step": 18440
},
{
"epoch": 1.9380252100840336,
"grad_norm": 13.625,
"learning_rate": 7.911580594679187e-06,
"loss": 1.4191,
"step": 18450
},
{
"epoch": 1.9390756302521008,
"grad_norm": 13.6875,
"learning_rate": 7.903755868544602e-06,
"loss": 0.9574,
"step": 18460
},
{
"epoch": 1.940126050420168,
"grad_norm": 13.8125,
"learning_rate": 7.895931142410016e-06,
"loss": 1.4445,
"step": 18470
},
{
"epoch": 1.9411764705882353,
"grad_norm": 8.8125,
"learning_rate": 7.888106416275431e-06,
"loss": 1.2046,
"step": 18480
},
{
"epoch": 1.9422268907563025,
"grad_norm": 13.375,
"learning_rate": 7.880281690140846e-06,
"loss": 0.6966,
"step": 18490
},
{
"epoch": 1.9432773109243697,
"grad_norm": 17.875,
"learning_rate": 7.87245696400626e-06,
"loss": 1.593,
"step": 18500
},
{
"epoch": 1.944327731092437,
"grad_norm": 14.125,
"learning_rate": 7.864632237871675e-06,
"loss": 1.5295,
"step": 18510
},
{
"epoch": 1.9453781512605042,
"grad_norm": 17.375,
"learning_rate": 7.85680751173709e-06,
"loss": 1.0856,
"step": 18520
},
{
"epoch": 1.9464285714285714,
"grad_norm": 11.75,
"learning_rate": 7.848982785602504e-06,
"loss": 1.4568,
"step": 18530
},
{
"epoch": 1.9474789915966386,
"grad_norm": 12.4375,
"learning_rate": 7.841158059467919e-06,
"loss": 1.2177,
"step": 18540
},
{
"epoch": 1.9485294117647058,
"grad_norm": 7.625,
"learning_rate": 7.833333333333333e-06,
"loss": 1.1167,
"step": 18550
},
{
"epoch": 1.949579831932773,
"grad_norm": 11.375,
"learning_rate": 7.825508607198748e-06,
"loss": 0.9677,
"step": 18560
},
{
"epoch": 1.9506302521008403,
"grad_norm": 15.125,
"learning_rate": 7.817683881064163e-06,
"loss": 1.2355,
"step": 18570
},
{
"epoch": 1.9516806722689075,
"grad_norm": 15.625,
"learning_rate": 7.809859154929577e-06,
"loss": 1.4345,
"step": 18580
},
{
"epoch": 1.9527310924369747,
"grad_norm": 18.875,
"learning_rate": 7.802034428794992e-06,
"loss": 1.2863,
"step": 18590
},
{
"epoch": 1.9537815126050422,
"grad_norm": 13.0,
"learning_rate": 7.794209702660408e-06,
"loss": 1.1896,
"step": 18600
},
{
"epoch": 1.9548319327731094,
"grad_norm": 6.15625,
"learning_rate": 7.786384976525823e-06,
"loss": 1.1191,
"step": 18610
},
{
"epoch": 1.9558823529411766,
"grad_norm": 7.6875,
"learning_rate": 7.778560250391238e-06,
"loss": 0.9882,
"step": 18620
},
{
"epoch": 1.9569327731092439,
"grad_norm": 14.9375,
"learning_rate": 7.770735524256652e-06,
"loss": 1.2986,
"step": 18630
},
{
"epoch": 1.957983193277311,
"grad_norm": 4.78125,
"learning_rate": 7.762910798122067e-06,
"loss": 1.1171,
"step": 18640
},
{
"epoch": 1.9590336134453783,
"grad_norm": 16.375,
"learning_rate": 7.755086071987482e-06,
"loss": 1.4327,
"step": 18650
},
{
"epoch": 1.9600840336134455,
"grad_norm": 19.125,
"learning_rate": 7.747261345852896e-06,
"loss": 1.1288,
"step": 18660
},
{
"epoch": 1.9611344537815127,
"grad_norm": 9.125,
"learning_rate": 7.739436619718311e-06,
"loss": 0.9525,
"step": 18670
},
{
"epoch": 1.96218487394958,
"grad_norm": 14.625,
"learning_rate": 7.731611893583724e-06,
"loss": 1.2451,
"step": 18680
},
{
"epoch": 1.9632352941176472,
"grad_norm": 70.0,
"learning_rate": 7.723787167449139e-06,
"loss": 1.5298,
"step": 18690
},
{
"epoch": 1.9642857142857144,
"grad_norm": 15.5,
"learning_rate": 7.715962441314555e-06,
"loss": 0.9037,
"step": 18700
},
{
"epoch": 1.9653361344537816,
"grad_norm": 17.75,
"learning_rate": 7.70813771517997e-06,
"loss": 1.4113,
"step": 18710
},
{
"epoch": 1.9663865546218489,
"grad_norm": 9.3125,
"learning_rate": 7.700312989045384e-06,
"loss": 1.1319,
"step": 18720
},
{
"epoch": 1.967436974789916,
"grad_norm": 18.625,
"learning_rate": 7.692488262910799e-06,
"loss": 0.897,
"step": 18730
},
{
"epoch": 1.9684873949579833,
"grad_norm": 18.5,
"learning_rate": 7.684663536776214e-06,
"loss": 1.8907,
"step": 18740
},
{
"epoch": 1.9695378151260505,
"grad_norm": 15.9375,
"learning_rate": 7.676838810641628e-06,
"loss": 1.0922,
"step": 18750
},
{
"epoch": 1.9705882352941178,
"grad_norm": 11.625,
"learning_rate": 7.669014084507043e-06,
"loss": 1.2242,
"step": 18760
},
{
"epoch": 1.971638655462185,
"grad_norm": 8.875,
"learning_rate": 7.661189358372457e-06,
"loss": 1.3724,
"step": 18770
},
{
"epoch": 1.9726890756302522,
"grad_norm": 16.0,
"learning_rate": 7.653364632237872e-06,
"loss": 1.4016,
"step": 18780
},
{
"epoch": 1.9737394957983194,
"grad_norm": 13.4375,
"learning_rate": 7.645539906103287e-06,
"loss": 1.2584,
"step": 18790
},
{
"epoch": 1.9747899159663866,
"grad_norm": 12.75,
"learning_rate": 7.637715179968701e-06,
"loss": 1.4473,
"step": 18800
},
{
"epoch": 1.9758403361344539,
"grad_norm": 16.625,
"learning_rate": 7.629890453834116e-06,
"loss": 1.5624,
"step": 18810
},
{
"epoch": 1.976890756302521,
"grad_norm": 15.5625,
"learning_rate": 7.622065727699532e-06,
"loss": 1.3079,
"step": 18820
},
{
"epoch": 1.9779411764705883,
"grad_norm": 15.125,
"learning_rate": 7.614241001564946e-06,
"loss": 1.3884,
"step": 18830
},
{
"epoch": 1.9789915966386555,
"grad_norm": 48.0,
"learning_rate": 7.606416275430361e-06,
"loss": 1.2941,
"step": 18840
},
{
"epoch": 1.9800420168067228,
"grad_norm": 4.46875,
"learning_rate": 7.598591549295775e-06,
"loss": 1.0213,
"step": 18850
},
{
"epoch": 1.98109243697479,
"grad_norm": 17.5,
"learning_rate": 7.590766823161189e-06,
"loss": 1.8067,
"step": 18860
},
{
"epoch": 1.9821428571428572,
"grad_norm": 12.3125,
"learning_rate": 7.582942097026604e-06,
"loss": 1.1539,
"step": 18870
},
{
"epoch": 1.9831932773109244,
"grad_norm": 3.765625,
"learning_rate": 7.575117370892019e-06,
"loss": 1.2292,
"step": 18880
},
{
"epoch": 1.9842436974789917,
"grad_norm": 16.5,
"learning_rate": 7.567292644757435e-06,
"loss": 1.4515,
"step": 18890
},
{
"epoch": 1.9852941176470589,
"grad_norm": 19.375,
"learning_rate": 7.559467918622849e-06,
"loss": 1.7594,
"step": 18900
},
{
"epoch": 1.986344537815126,
"grad_norm": 15.3125,
"learning_rate": 7.5516431924882635e-06,
"loss": 1.6458,
"step": 18910
},
{
"epoch": 1.9873949579831933,
"grad_norm": 12.375,
"learning_rate": 7.543818466353678e-06,
"loss": 1.7079,
"step": 18920
},
{
"epoch": 1.9884453781512605,
"grad_norm": 11.9375,
"learning_rate": 7.535993740219093e-06,
"loss": 1.4056,
"step": 18930
},
{
"epoch": 1.9894957983193278,
"grad_norm": 13.9375,
"learning_rate": 7.5281690140845074e-06,
"loss": 1.542,
"step": 18940
},
{
"epoch": 1.990546218487395,
"grad_norm": 14.375,
"learning_rate": 7.520344287949922e-06,
"loss": 1.4334,
"step": 18950
},
{
"epoch": 1.9915966386554622,
"grad_norm": 12.125,
"learning_rate": 7.512519561815337e-06,
"loss": 1.033,
"step": 18960
},
{
"epoch": 1.9926470588235294,
"grad_norm": 11.8125,
"learning_rate": 7.504694835680751e-06,
"loss": 1.5305,
"step": 18970
},
{
"epoch": 1.9936974789915967,
"grad_norm": 20.875,
"learning_rate": 7.496870109546166e-06,
"loss": 1.5138,
"step": 18980
},
{
"epoch": 1.9947478991596639,
"grad_norm": 5.71875,
"learning_rate": 7.4890453834115816e-06,
"loss": 1.1856,
"step": 18990
},
{
"epoch": 1.995798319327731,
"grad_norm": 12.9375,
"learning_rate": 7.481220657276996e-06,
"loss": 1.3632,
"step": 19000
},
{
"epoch": 1.9968487394957983,
"grad_norm": 18.0,
"learning_rate": 7.473395931142411e-06,
"loss": 1.6468,
"step": 19010
},
{
"epoch": 1.9978991596638656,
"grad_norm": 15.6875,
"learning_rate": 7.4655712050078255e-06,
"loss": 1.5305,
"step": 19020
},
{
"epoch": 1.9989495798319328,
"grad_norm": 38.0,
"learning_rate": 7.45774647887324e-06,
"loss": 1.3166,
"step": 19030
},
{
"epoch": 2.0,
"grad_norm": 15.8125,
"learning_rate": 7.449921752738655e-06,
"loss": 1.1849,
"step": 19040
},
{
"epoch": 2.0010504201680672,
"grad_norm": 11.5625,
"learning_rate": 7.4420970266040695e-06,
"loss": 1.2535,
"step": 19050
},
{
"epoch": 2.0021008403361344,
"grad_norm": 18.0,
"learning_rate": 7.434272300469484e-06,
"loss": 0.9719,
"step": 19060
},
{
"epoch": 2.0031512605042017,
"grad_norm": 11.25,
"learning_rate": 7.426447574334899e-06,
"loss": 1.3149,
"step": 19070
},
{
"epoch": 2.004201680672269,
"grad_norm": 8.5,
"learning_rate": 7.4186228482003134e-06,
"loss": 1.257,
"step": 19080
},
{
"epoch": 2.005252100840336,
"grad_norm": 13.5625,
"learning_rate": 7.410798122065729e-06,
"loss": 1.7853,
"step": 19090
},
{
"epoch": 2.0063025210084033,
"grad_norm": 15.125,
"learning_rate": 7.402973395931144e-06,
"loss": 1.6606,
"step": 19100
},
{
"epoch": 2.0073529411764706,
"grad_norm": 20.875,
"learning_rate": 7.395148669796558e-06,
"loss": 1.1459,
"step": 19110
},
{
"epoch": 2.008403361344538,
"grad_norm": 15.1875,
"learning_rate": 7.387323943661973e-06,
"loss": 1.5116,
"step": 19120
},
{
"epoch": 2.009453781512605,
"grad_norm": 14.375,
"learning_rate": 7.3794992175273875e-06,
"loss": 1.8419,
"step": 19130
},
{
"epoch": 2.0105042016806722,
"grad_norm": 15.4375,
"learning_rate": 7.371674491392802e-06,
"loss": 1.3708,
"step": 19140
},
{
"epoch": 2.0115546218487395,
"grad_norm": 13.75,
"learning_rate": 7.363849765258216e-06,
"loss": 1.3603,
"step": 19150
},
{
"epoch": 2.0126050420168067,
"grad_norm": 13.4375,
"learning_rate": 7.356025039123631e-06,
"loss": 1.3889,
"step": 19160
},
{
"epoch": 2.013655462184874,
"grad_norm": 12.625,
"learning_rate": 7.348200312989045e-06,
"loss": 1.4465,
"step": 19170
},
{
"epoch": 2.014705882352941,
"grad_norm": 15.125,
"learning_rate": 7.34037558685446e-06,
"loss": 1.51,
"step": 19180
},
{
"epoch": 2.0157563025210083,
"grad_norm": 4.15625,
"learning_rate": 7.3325508607198755e-06,
"loss": 1.547,
"step": 19190
},
{
"epoch": 2.0168067226890756,
"grad_norm": 14.5625,
"learning_rate": 7.32472613458529e-06,
"loss": 0.7578,
"step": 19200
},
{
"epoch": 2.017857142857143,
"grad_norm": 12.1875,
"learning_rate": 7.316901408450705e-06,
"loss": 1.379,
"step": 19210
},
{
"epoch": 2.01890756302521,
"grad_norm": 11.75,
"learning_rate": 7.309076682316119e-06,
"loss": 1.693,
"step": 19220
},
{
"epoch": 2.0199579831932772,
"grad_norm": 6.59375,
"learning_rate": 7.301251956181534e-06,
"loss": 0.9865,
"step": 19230
},
{
"epoch": 2.0210084033613445,
"grad_norm": 12.125,
"learning_rate": 7.293427230046949e-06,
"loss": 1.4209,
"step": 19240
},
{
"epoch": 2.0220588235294117,
"grad_norm": 9.8125,
"learning_rate": 7.285602503912363e-06,
"loss": 1.7937,
"step": 19250
},
{
"epoch": 2.023109243697479,
"grad_norm": 28.125,
"learning_rate": 7.277777777777778e-06,
"loss": 1.5097,
"step": 19260
},
{
"epoch": 2.024159663865546,
"grad_norm": 14.1875,
"learning_rate": 7.269953051643193e-06,
"loss": 1.4357,
"step": 19270
},
{
"epoch": 2.0252100840336134,
"grad_norm": 15.625,
"learning_rate": 7.262128325508607e-06,
"loss": 1.549,
"step": 19280
},
{
"epoch": 2.0262605042016806,
"grad_norm": 8.3125,
"learning_rate": 7.254303599374023e-06,
"loss": 1.2916,
"step": 19290
},
{
"epoch": 2.027310924369748,
"grad_norm": 15.125,
"learning_rate": 7.2464788732394375e-06,
"loss": 1.5242,
"step": 19300
},
{
"epoch": 2.028361344537815,
"grad_norm": 12.8125,
"learning_rate": 7.238654147104852e-06,
"loss": 1.2154,
"step": 19310
},
{
"epoch": 2.0294117647058822,
"grad_norm": 14.75,
"learning_rate": 7.230829420970267e-06,
"loss": 1.9179,
"step": 19320
},
{
"epoch": 2.0304621848739495,
"grad_norm": 56.75,
"learning_rate": 7.2230046948356814e-06,
"loss": 1.5349,
"step": 19330
},
{
"epoch": 2.0315126050420167,
"grad_norm": 9.25,
"learning_rate": 7.215179968701096e-06,
"loss": 1.3553,
"step": 19340
},
{
"epoch": 2.032563025210084,
"grad_norm": 11.9375,
"learning_rate": 7.207355242566511e-06,
"loss": 1.292,
"step": 19350
},
{
"epoch": 2.033613445378151,
"grad_norm": 12.875,
"learning_rate": 7.199530516431925e-06,
"loss": 1.8825,
"step": 19360
},
{
"epoch": 2.0346638655462184,
"grad_norm": 11.4375,
"learning_rate": 7.19170579029734e-06,
"loss": 1.4219,
"step": 19370
},
{
"epoch": 2.0357142857142856,
"grad_norm": 11.625,
"learning_rate": 7.1838810641627556e-06,
"loss": 1.3718,
"step": 19380
},
{
"epoch": 2.036764705882353,
"grad_norm": 23.25,
"learning_rate": 7.17605633802817e-06,
"loss": 1.4568,
"step": 19390
},
{
"epoch": 2.03781512605042,
"grad_norm": 13.0,
"learning_rate": 7.168231611893585e-06,
"loss": 1.4933,
"step": 19400
},
{
"epoch": 2.0388655462184873,
"grad_norm": 4.6875,
"learning_rate": 7.1604068857589995e-06,
"loss": 1.1494,
"step": 19410
},
{
"epoch": 2.0399159663865545,
"grad_norm": 14.0625,
"learning_rate": 7.152582159624414e-06,
"loss": 1.1781,
"step": 19420
},
{
"epoch": 2.0409663865546217,
"grad_norm": 13.625,
"learning_rate": 7.144757433489829e-06,
"loss": 1.5522,
"step": 19430
},
{
"epoch": 2.042016806722689,
"grad_norm": 15.8125,
"learning_rate": 7.136932707355243e-06,
"loss": 1.2522,
"step": 19440
},
{
"epoch": 2.043067226890756,
"grad_norm": 14.0625,
"learning_rate": 7.129107981220657e-06,
"loss": 1.0577,
"step": 19450
},
{
"epoch": 2.0441176470588234,
"grad_norm": 15.9375,
"learning_rate": 7.121283255086072e-06,
"loss": 1.3597,
"step": 19460
},
{
"epoch": 2.0451680672268906,
"grad_norm": 19.125,
"learning_rate": 7.113458528951487e-06,
"loss": 1.5251,
"step": 19470
},
{
"epoch": 2.046218487394958,
"grad_norm": 12.375,
"learning_rate": 7.105633802816903e-06,
"loss": 1.4767,
"step": 19480
},
{
"epoch": 2.047268907563025,
"grad_norm": 11.4375,
"learning_rate": 7.097809076682317e-06,
"loss": 1.2745,
"step": 19490
},
{
"epoch": 2.0483193277310923,
"grad_norm": 7.125,
"learning_rate": 7.089984350547731e-06,
"loss": 1.2556,
"step": 19500
},
{
"epoch": 2.0493697478991595,
"grad_norm": 17.625,
"learning_rate": 7.082159624413146e-06,
"loss": 1.4497,
"step": 19510
},
{
"epoch": 2.0504201680672267,
"grad_norm": 8.375,
"learning_rate": 7.074334898278561e-06,
"loss": 1.151,
"step": 19520
},
{
"epoch": 2.051470588235294,
"grad_norm": 16.5,
"learning_rate": 7.066510172143975e-06,
"loss": 1.7034,
"step": 19530
},
{
"epoch": 2.052521008403361,
"grad_norm": 11.5625,
"learning_rate": 7.05868544600939e-06,
"loss": 0.9299,
"step": 19540
},
{
"epoch": 2.0535714285714284,
"grad_norm": 15.625,
"learning_rate": 7.050860719874805e-06,
"loss": 1.501,
"step": 19550
},
{
"epoch": 2.0546218487394956,
"grad_norm": 18.5,
"learning_rate": 7.043035993740219e-06,
"loss": 1.1872,
"step": 19560
},
{
"epoch": 2.055672268907563,
"grad_norm": 4.8125,
"learning_rate": 7.035211267605634e-06,
"loss": 0.9891,
"step": 19570
},
{
"epoch": 2.05672268907563,
"grad_norm": 12.75,
"learning_rate": 7.0273865414710495e-06,
"loss": 1.0196,
"step": 19580
},
{
"epoch": 2.0577731092436973,
"grad_norm": 19.0,
"learning_rate": 7.019561815336464e-06,
"loss": 1.3953,
"step": 19590
},
{
"epoch": 2.0588235294117645,
"grad_norm": 11.875,
"learning_rate": 7.011737089201879e-06,
"loss": 1.0446,
"step": 19600
},
{
"epoch": 2.0598739495798317,
"grad_norm": 14.3125,
"learning_rate": 7.003912363067293e-06,
"loss": 1.1449,
"step": 19610
},
{
"epoch": 2.060924369747899,
"grad_norm": 11.3125,
"learning_rate": 6.996087636932708e-06,
"loss": 1.6037,
"step": 19620
},
{
"epoch": 2.0619747899159666,
"grad_norm": 4.0,
"learning_rate": 6.988262910798123e-06,
"loss": 1.2387,
"step": 19630
},
{
"epoch": 2.0630252100840334,
"grad_norm": 4.5625,
"learning_rate": 6.980438184663537e-06,
"loss": 0.8792,
"step": 19640
},
{
"epoch": 2.064075630252101,
"grad_norm": 9.0625,
"learning_rate": 6.972613458528952e-06,
"loss": 1.1865,
"step": 19650
},
{
"epoch": 2.0651260504201683,
"grad_norm": 11.0625,
"learning_rate": 6.964788732394367e-06,
"loss": 1.3791,
"step": 19660
},
{
"epoch": 2.0661764705882355,
"grad_norm": 15.3125,
"learning_rate": 6.956964006259781e-06,
"loss": 1.3943,
"step": 19670
},
{
"epoch": 2.0672268907563027,
"grad_norm": 22.875,
"learning_rate": 6.949139280125197e-06,
"loss": 1.3977,
"step": 19680
},
{
"epoch": 2.06827731092437,
"grad_norm": 14.6875,
"learning_rate": 6.9413145539906115e-06,
"loss": 1.6686,
"step": 19690
},
{
"epoch": 2.069327731092437,
"grad_norm": 16.25,
"learning_rate": 6.933489827856026e-06,
"loss": 1.8928,
"step": 19700
},
{
"epoch": 2.0703781512605044,
"grad_norm": 13.1875,
"learning_rate": 6.925665101721441e-06,
"loss": 1.2138,
"step": 19710
},
{
"epoch": 2.0714285714285716,
"grad_norm": 17.5,
"learning_rate": 6.9178403755868554e-06,
"loss": 1.5655,
"step": 19720
},
{
"epoch": 2.072478991596639,
"grad_norm": 8.875,
"learning_rate": 6.910015649452269e-06,
"loss": 1.2248,
"step": 19730
},
{
"epoch": 2.073529411764706,
"grad_norm": 14.0625,
"learning_rate": 6.902190923317684e-06,
"loss": 1.3255,
"step": 19740
},
{
"epoch": 2.0745798319327733,
"grad_norm": 4.375,
"learning_rate": 6.8943661971830986e-06,
"loss": 1.1353,
"step": 19750
},
{
"epoch": 2.0756302521008405,
"grad_norm": 21.25,
"learning_rate": 6.886541471048513e-06,
"loss": 1.2565,
"step": 19760
},
{
"epoch": 2.0766806722689077,
"grad_norm": 13.0625,
"learning_rate": 6.878716744913928e-06,
"loss": 1.0822,
"step": 19770
},
{
"epoch": 2.077731092436975,
"grad_norm": 18.25,
"learning_rate": 6.870892018779343e-06,
"loss": 1.4547,
"step": 19780
},
{
"epoch": 2.078781512605042,
"grad_norm": 17.125,
"learning_rate": 6.863067292644758e-06,
"loss": 1.5542,
"step": 19790
},
{
"epoch": 2.0798319327731094,
"grad_norm": 12.0,
"learning_rate": 6.855242566510173e-06,
"loss": 1.2621,
"step": 19800
},
{
"epoch": 2.0808823529411766,
"grad_norm": 13.6875,
"learning_rate": 6.847417840375587e-06,
"loss": 1.1667,
"step": 19810
},
{
"epoch": 2.081932773109244,
"grad_norm": 14.8125,
"learning_rate": 6.839593114241002e-06,
"loss": 1.8507,
"step": 19820
},
{
"epoch": 2.082983193277311,
"grad_norm": 11.9375,
"learning_rate": 6.831768388106417e-06,
"loss": 1.0498,
"step": 19830
},
{
"epoch": 2.0840336134453783,
"grad_norm": 13.875,
"learning_rate": 6.823943661971831e-06,
"loss": 1.2932,
"step": 19840
},
{
"epoch": 2.0850840336134455,
"grad_norm": 12.9375,
"learning_rate": 6.816118935837246e-06,
"loss": 1.2906,
"step": 19850
},
{
"epoch": 2.0861344537815127,
"grad_norm": 12.1875,
"learning_rate": 6.808294209702661e-06,
"loss": 1.0509,
"step": 19860
},
{
"epoch": 2.08718487394958,
"grad_norm": 12.8125,
"learning_rate": 6.800469483568075e-06,
"loss": 1.0825,
"step": 19870
},
{
"epoch": 2.088235294117647,
"grad_norm": 19.25,
"learning_rate": 6.792644757433491e-06,
"loss": 1.3501,
"step": 19880
},
{
"epoch": 2.0892857142857144,
"grad_norm": 13.0625,
"learning_rate": 6.784820031298905e-06,
"loss": 1.639,
"step": 19890
},
{
"epoch": 2.0903361344537816,
"grad_norm": 26.0,
"learning_rate": 6.77699530516432e-06,
"loss": 1.3657,
"step": 19900
},
{
"epoch": 2.091386554621849,
"grad_norm": 19.375,
"learning_rate": 6.769170579029735e-06,
"loss": 1.62,
"step": 19910
},
{
"epoch": 2.092436974789916,
"grad_norm": 5.46875,
"learning_rate": 6.761345852895149e-06,
"loss": 1.158,
"step": 19920
},
{
"epoch": 2.0934873949579833,
"grad_norm": 4.78125,
"learning_rate": 6.753521126760564e-06,
"loss": 0.9588,
"step": 19930
},
{
"epoch": 2.0945378151260505,
"grad_norm": 15.75,
"learning_rate": 6.745696400625979e-06,
"loss": 1.3956,
"step": 19940
},
{
"epoch": 2.0955882352941178,
"grad_norm": 4.65625,
"learning_rate": 6.737871674491393e-06,
"loss": 1.192,
"step": 19950
},
{
"epoch": 2.096638655462185,
"grad_norm": 13.0625,
"learning_rate": 6.730046948356808e-06,
"loss": 1.3621,
"step": 19960
},
{
"epoch": 2.097689075630252,
"grad_norm": 16.0,
"learning_rate": 6.7222222222222235e-06,
"loss": 1.1402,
"step": 19970
},
{
"epoch": 2.0987394957983194,
"grad_norm": 12.0625,
"learning_rate": 6.714397496087638e-06,
"loss": 1.1546,
"step": 19980
},
{
"epoch": 2.0997899159663866,
"grad_norm": 4.84375,
"learning_rate": 6.706572769953053e-06,
"loss": 1.1781,
"step": 19990
},
{
"epoch": 2.100840336134454,
"grad_norm": 11.625,
"learning_rate": 6.698748043818467e-06,
"loss": 1.2507,
"step": 20000
},
{
"epoch": 2.101890756302521,
"grad_norm": 13.4375,
"learning_rate": 6.690923317683882e-06,
"loss": 1.3831,
"step": 20010
},
{
"epoch": 2.1029411764705883,
"grad_norm": 8.5,
"learning_rate": 6.683098591549296e-06,
"loss": 0.8563,
"step": 20020
},
{
"epoch": 2.1039915966386555,
"grad_norm": 15.6875,
"learning_rate": 6.6752738654147105e-06,
"loss": 1.4442,
"step": 20030
},
{
"epoch": 2.1050420168067228,
"grad_norm": 12.875,
"learning_rate": 6.667449139280125e-06,
"loss": 1.5468,
"step": 20040
},
{
"epoch": 2.10609243697479,
"grad_norm": 9.4375,
"learning_rate": 6.65962441314554e-06,
"loss": 1.151,
"step": 20050
},
{
"epoch": 2.107142857142857,
"grad_norm": 9.0625,
"learning_rate": 6.6517996870109545e-06,
"loss": 1.4075,
"step": 20060
},
{
"epoch": 2.1081932773109244,
"grad_norm": 20.25,
"learning_rate": 6.64397496087637e-06,
"loss": 1.3844,
"step": 20070
},
{
"epoch": 2.1092436974789917,
"grad_norm": 4.84375,
"learning_rate": 6.636150234741785e-06,
"loss": 1.6283,
"step": 20080
},
{
"epoch": 2.110294117647059,
"grad_norm": 13.1875,
"learning_rate": 6.628325508607199e-06,
"loss": 1.0903,
"step": 20090
},
{
"epoch": 2.111344537815126,
"grad_norm": 12.0,
"learning_rate": 6.620500782472614e-06,
"loss": 1.4893,
"step": 20100
},
{
"epoch": 2.1123949579831933,
"grad_norm": 12.8125,
"learning_rate": 6.612676056338029e-06,
"loss": 1.5213,
"step": 20110
},
{
"epoch": 2.1134453781512605,
"grad_norm": 13.9375,
"learning_rate": 6.604851330203443e-06,
"loss": 1.2929,
"step": 20120
},
{
"epoch": 2.1144957983193278,
"grad_norm": 11.4375,
"learning_rate": 6.597026604068858e-06,
"loss": 1.2527,
"step": 20130
},
{
"epoch": 2.115546218487395,
"grad_norm": 20.625,
"learning_rate": 6.5892018779342726e-06,
"loss": 1.4411,
"step": 20140
},
{
"epoch": 2.116596638655462,
"grad_norm": 4.09375,
"learning_rate": 6.581377151799687e-06,
"loss": 1.4455,
"step": 20150
},
{
"epoch": 2.1176470588235294,
"grad_norm": 18.875,
"learning_rate": 6.573552425665102e-06,
"loss": 1.6437,
"step": 20160
},
{
"epoch": 2.1186974789915967,
"grad_norm": 12.8125,
"learning_rate": 6.565727699530517e-06,
"loss": 1.1723,
"step": 20170
},
{
"epoch": 2.119747899159664,
"grad_norm": 13.5,
"learning_rate": 6.557902973395932e-06,
"loss": 1.3895,
"step": 20180
},
{
"epoch": 2.120798319327731,
"grad_norm": 3.9375,
"learning_rate": 6.550078247261347e-06,
"loss": 1.1102,
"step": 20190
},
{
"epoch": 2.1218487394957983,
"grad_norm": 16.75,
"learning_rate": 6.542253521126761e-06,
"loss": 1.5825,
"step": 20200
},
{
"epoch": 2.1228991596638656,
"grad_norm": 10.875,
"learning_rate": 6.534428794992176e-06,
"loss": 1.0678,
"step": 20210
},
{
"epoch": 2.1239495798319328,
"grad_norm": 10.25,
"learning_rate": 6.526604068857591e-06,
"loss": 1.5383,
"step": 20220
},
{
"epoch": 2.125,
"grad_norm": 18.5,
"learning_rate": 6.518779342723005e-06,
"loss": 0.979,
"step": 20230
},
{
"epoch": 2.1260504201680672,
"grad_norm": 11.4375,
"learning_rate": 6.51095461658842e-06,
"loss": 1.2101,
"step": 20240
},
{
"epoch": 2.1271008403361344,
"grad_norm": 5.4375,
"learning_rate": 6.503129890453835e-06,
"loss": 1.0491,
"step": 20250
},
{
"epoch": 2.1281512605042017,
"grad_norm": 9.375,
"learning_rate": 6.495305164319249e-06,
"loss": 0.8727,
"step": 20260
},
{
"epoch": 2.129201680672269,
"grad_norm": 24.25,
"learning_rate": 6.487480438184665e-06,
"loss": 1.3615,
"step": 20270
},
{
"epoch": 2.130252100840336,
"grad_norm": 3.390625,
"learning_rate": 6.479655712050079e-06,
"loss": 0.8916,
"step": 20280
},
{
"epoch": 2.1313025210084033,
"grad_norm": 10.0625,
"learning_rate": 6.471830985915494e-06,
"loss": 1.0757,
"step": 20290
},
{
"epoch": 2.1323529411764706,
"grad_norm": 13.5,
"learning_rate": 6.464006259780909e-06,
"loss": 1.4941,
"step": 20300
},
{
"epoch": 2.133403361344538,
"grad_norm": 4.5,
"learning_rate": 6.456181533646323e-06,
"loss": 1.1808,
"step": 20310
},
{
"epoch": 2.134453781512605,
"grad_norm": 13.6875,
"learning_rate": 6.448356807511737e-06,
"loss": 1.61,
"step": 20320
},
{
"epoch": 2.1355042016806722,
"grad_norm": 14.4375,
"learning_rate": 6.440532081377152e-06,
"loss": 1.0927,
"step": 20330
},
{
"epoch": 2.1365546218487395,
"grad_norm": 12.4375,
"learning_rate": 6.4327073552425665e-06,
"loss": 0.9832,
"step": 20340
},
{
"epoch": 2.1376050420168067,
"grad_norm": 12.1875,
"learning_rate": 6.424882629107981e-06,
"loss": 1.8432,
"step": 20350
},
{
"epoch": 2.138655462184874,
"grad_norm": 13.8125,
"learning_rate": 6.417057902973396e-06,
"loss": 1.3069,
"step": 20360
},
{
"epoch": 2.139705882352941,
"grad_norm": 139.0,
"learning_rate": 6.409233176838811e-06,
"loss": 1.438,
"step": 20370
},
{
"epoch": 2.1407563025210083,
"grad_norm": 14.0,
"learning_rate": 6.401408450704226e-06,
"loss": 1.1414,
"step": 20380
},
{
"epoch": 2.1418067226890756,
"grad_norm": 8.875,
"learning_rate": 6.3935837245696406e-06,
"loss": 1.1154,
"step": 20390
},
{
"epoch": 2.142857142857143,
"grad_norm": 12.3125,
"learning_rate": 6.385758998435055e-06,
"loss": 1.225,
"step": 20400
},
{
"epoch": 2.14390756302521,
"grad_norm": 12.75,
"learning_rate": 6.37793427230047e-06,
"loss": 0.8264,
"step": 20410
},
{
"epoch": 2.1449579831932772,
"grad_norm": 41.75,
"learning_rate": 6.3701095461658845e-06,
"loss": 1.3104,
"step": 20420
},
{
"epoch": 2.1460084033613445,
"grad_norm": 22.75,
"learning_rate": 6.362284820031299e-06,
"loss": 1.6172,
"step": 20430
},
{
"epoch": 2.1470588235294117,
"grad_norm": 16.875,
"learning_rate": 6.354460093896714e-06,
"loss": 1.4141,
"step": 20440
},
{
"epoch": 2.148109243697479,
"grad_norm": 17.0,
"learning_rate": 6.3466353677621285e-06,
"loss": 1.0977,
"step": 20450
},
{
"epoch": 2.149159663865546,
"grad_norm": 11.375,
"learning_rate": 6.338810641627543e-06,
"loss": 1.2796,
"step": 20460
},
{
"epoch": 2.1502100840336134,
"grad_norm": 15.875,
"learning_rate": 6.330985915492959e-06,
"loss": 1.1421,
"step": 20470
},
{
"epoch": 2.1512605042016806,
"grad_norm": 11.25,
"learning_rate": 6.323161189358373e-06,
"loss": 1.1076,
"step": 20480
},
{
"epoch": 2.152310924369748,
"grad_norm": 4.4375,
"learning_rate": 6.315336463223788e-06,
"loss": 1.0524,
"step": 20490
},
{
"epoch": 2.153361344537815,
"grad_norm": 15.1875,
"learning_rate": 6.307511737089203e-06,
"loss": 1.2327,
"step": 20500
},
{
"epoch": 2.1544117647058822,
"grad_norm": 16.625,
"learning_rate": 6.299687010954617e-06,
"loss": 1.4369,
"step": 20510
},
{
"epoch": 2.1554621848739495,
"grad_norm": 12.6875,
"learning_rate": 6.291862284820032e-06,
"loss": 1.1132,
"step": 20520
},
{
"epoch": 2.1565126050420167,
"grad_norm": 17.0,
"learning_rate": 6.2840375586854466e-06,
"loss": 1.4176,
"step": 20530
},
{
"epoch": 2.157563025210084,
"grad_norm": 11.25,
"learning_rate": 6.276212832550861e-06,
"loss": 1.5324,
"step": 20540
},
{
"epoch": 2.158613445378151,
"grad_norm": 9.6875,
"learning_rate": 6.268388106416276e-06,
"loss": 1.7249,
"step": 20550
},
{
"epoch": 2.1596638655462184,
"grad_norm": 12.125,
"learning_rate": 6.260563380281691e-06,
"loss": 1.595,
"step": 20560
},
{
"epoch": 2.1607142857142856,
"grad_norm": 8.9375,
"learning_rate": 6.252738654147106e-06,
"loss": 1.4942,
"step": 20570
},
{
"epoch": 2.161764705882353,
"grad_norm": 14.3125,
"learning_rate": 6.244913928012521e-06,
"loss": 1.3773,
"step": 20580
},
{
"epoch": 2.16281512605042,
"grad_norm": 28.125,
"learning_rate": 6.237089201877935e-06,
"loss": 1.6441,
"step": 20590
},
{
"epoch": 2.1638655462184873,
"grad_norm": 16.75,
"learning_rate": 6.22926447574335e-06,
"loss": 1.139,
"step": 20600
},
{
"epoch": 2.1649159663865545,
"grad_norm": 12.5,
"learning_rate": 6.221439749608764e-06,
"loss": 1.5922,
"step": 20610
},
{
"epoch": 2.1659663865546217,
"grad_norm": 2.984375,
"learning_rate": 6.2136150234741784e-06,
"loss": 0.9379,
"step": 20620
},
{
"epoch": 2.167016806722689,
"grad_norm": 10.625,
"learning_rate": 6.205790297339593e-06,
"loss": 1.0389,
"step": 20630
},
{
"epoch": 2.168067226890756,
"grad_norm": 28.0,
"learning_rate": 6.197965571205008e-06,
"loss": 1.2278,
"step": 20640
},
{
"epoch": 2.1691176470588234,
"grad_norm": 16.0,
"learning_rate": 6.190140845070422e-06,
"loss": 1.1912,
"step": 20650
},
{
"epoch": 2.1701680672268906,
"grad_norm": 14.75,
"learning_rate": 6.182316118935838e-06,
"loss": 1.3608,
"step": 20660
},
{
"epoch": 2.171218487394958,
"grad_norm": 21.125,
"learning_rate": 6.1744913928012525e-06,
"loss": 1.0686,
"step": 20670
},
{
"epoch": 2.172268907563025,
"grad_norm": 10.125,
"learning_rate": 6.166666666666667e-06,
"loss": 1.2551,
"step": 20680
},
{
"epoch": 2.1733193277310923,
"grad_norm": 14.4375,
"learning_rate": 6.158841940532082e-06,
"loss": 1.511,
"step": 20690
},
{
"epoch": 2.1743697478991595,
"grad_norm": 11.5625,
"learning_rate": 6.1510172143974965e-06,
"loss": 1.4704,
"step": 20700
},
{
"epoch": 2.1754201680672267,
"grad_norm": 12.9375,
"learning_rate": 6.143192488262911e-06,
"loss": 1.6788,
"step": 20710
},
{
"epoch": 2.176470588235294,
"grad_norm": 22.25,
"learning_rate": 6.135367762128326e-06,
"loss": 1.4955,
"step": 20720
},
{
"epoch": 2.177521008403361,
"grad_norm": 16.75,
"learning_rate": 6.1275430359937405e-06,
"loss": 1.3728,
"step": 20730
},
{
"epoch": 2.1785714285714284,
"grad_norm": 13.25,
"learning_rate": 6.119718309859155e-06,
"loss": 1.213,
"step": 20740
},
{
"epoch": 2.1796218487394956,
"grad_norm": 14.625,
"learning_rate": 6.11189358372457e-06,
"loss": 1.6045,
"step": 20750
},
{
"epoch": 2.180672268907563,
"grad_norm": 13.75,
"learning_rate": 6.104068857589985e-06,
"loss": 1.4784,
"step": 20760
},
{
"epoch": 2.18172268907563,
"grad_norm": 11.0,
"learning_rate": 6.0962441314554e-06,
"loss": 0.9013,
"step": 20770
},
{
"epoch": 2.1827731092436973,
"grad_norm": 20.0,
"learning_rate": 6.0884194053208146e-06,
"loss": 1.4597,
"step": 20780
},
{
"epoch": 2.1838235294117645,
"grad_norm": 75.0,
"learning_rate": 6.080594679186229e-06,
"loss": 1.5331,
"step": 20790
},
{
"epoch": 2.184873949579832,
"grad_norm": 11.5625,
"learning_rate": 6.072769953051644e-06,
"loss": 1.0339,
"step": 20800
},
{
"epoch": 2.185924369747899,
"grad_norm": 12.375,
"learning_rate": 6.0649452269170585e-06,
"loss": 0.9868,
"step": 20810
},
{
"epoch": 2.1869747899159666,
"grad_norm": 14.1875,
"learning_rate": 6.057120500782473e-06,
"loss": 0.9225,
"step": 20820
},
{
"epoch": 2.1880252100840334,
"grad_norm": 13.5,
"learning_rate": 6.049295774647888e-06,
"loss": 1.4781,
"step": 20830
},
{
"epoch": 2.189075630252101,
"grad_norm": 13.625,
"learning_rate": 6.0414710485133025e-06,
"loss": 1.2404,
"step": 20840
},
{
"epoch": 2.190126050420168,
"grad_norm": 13.6875,
"learning_rate": 6.033646322378717e-06,
"loss": 1.6465,
"step": 20850
},
{
"epoch": 2.1911764705882355,
"grad_norm": 14.1875,
"learning_rate": 6.025821596244133e-06,
"loss": 1.2728,
"step": 20860
},
{
"epoch": 2.1922268907563027,
"grad_norm": 3.90625,
"learning_rate": 6.017996870109547e-06,
"loss": 1.2817,
"step": 20870
},
{
"epoch": 2.19327731092437,
"grad_norm": 6.03125,
"learning_rate": 6.010172143974962e-06,
"loss": 1.1268,
"step": 20880
},
{
"epoch": 2.194327731092437,
"grad_norm": 6.40625,
"learning_rate": 6.002347417840377e-06,
"loss": 1.4204,
"step": 20890
},
{
"epoch": 2.1953781512605044,
"grad_norm": 16.0,
"learning_rate": 5.99452269170579e-06,
"loss": 1.413,
"step": 20900
},
{
"epoch": 2.1964285714285716,
"grad_norm": 19.625,
"learning_rate": 5.986697965571205e-06,
"loss": 1.2378,
"step": 20910
},
{
"epoch": 2.197478991596639,
"grad_norm": 12.0625,
"learning_rate": 5.97887323943662e-06,
"loss": 1.3511,
"step": 20920
},
{
"epoch": 2.198529411764706,
"grad_norm": 11.3125,
"learning_rate": 5.971048513302034e-06,
"loss": 1.9652,
"step": 20930
},
{
"epoch": 2.1995798319327733,
"grad_norm": 17.875,
"learning_rate": 5.963223787167449e-06,
"loss": 1.3321,
"step": 20940
},
{
"epoch": 2.2006302521008405,
"grad_norm": 11.625,
"learning_rate": 5.955399061032864e-06,
"loss": 0.9774,
"step": 20950
},
{
"epoch": 2.2016806722689077,
"grad_norm": 79.0,
"learning_rate": 5.947574334898279e-06,
"loss": 1.2817,
"step": 20960
},
{
"epoch": 2.202731092436975,
"grad_norm": 24.125,
"learning_rate": 5.939749608763694e-06,
"loss": 1.328,
"step": 20970
},
{
"epoch": 2.203781512605042,
"grad_norm": 4.15625,
"learning_rate": 5.9319248826291085e-06,
"loss": 1.2104,
"step": 20980
},
{
"epoch": 2.2048319327731094,
"grad_norm": 12.6875,
"learning_rate": 5.924100156494523e-06,
"loss": 1.7662,
"step": 20990
},
{
"epoch": 2.2058823529411766,
"grad_norm": 25.875,
"learning_rate": 5.916275430359938e-06,
"loss": 1.0362,
"step": 21000
},
{
"epoch": 2.206932773109244,
"grad_norm": 16.75,
"learning_rate": 5.9084507042253524e-06,
"loss": 1.5403,
"step": 21010
},
{
"epoch": 2.207983193277311,
"grad_norm": 17.0,
"learning_rate": 5.900625978090767e-06,
"loss": 1.559,
"step": 21020
},
{
"epoch": 2.2090336134453783,
"grad_norm": 11.625,
"learning_rate": 5.892801251956182e-06,
"loss": 1.4248,
"step": 21030
},
{
"epoch": 2.2100840336134455,
"grad_norm": 26.875,
"learning_rate": 5.884976525821596e-06,
"loss": 1.8425,
"step": 21040
},
{
"epoch": 2.2111344537815127,
"grad_norm": 15.0625,
"learning_rate": 5.877151799687011e-06,
"loss": 1.2505,
"step": 21050
},
{
"epoch": 2.21218487394958,
"grad_norm": 13.0625,
"learning_rate": 5.8693270735524265e-06,
"loss": 1.2495,
"step": 21060
},
{
"epoch": 2.213235294117647,
"grad_norm": 12.1875,
"learning_rate": 5.861502347417841e-06,
"loss": 1.0802,
"step": 21070
},
{
"epoch": 2.2142857142857144,
"grad_norm": 66.5,
"learning_rate": 5.853677621283256e-06,
"loss": 1.3109,
"step": 21080
},
{
"epoch": 2.2153361344537816,
"grad_norm": 21.0,
"learning_rate": 5.8458528951486705e-06,
"loss": 1.2991,
"step": 21090
},
{
"epoch": 2.216386554621849,
"grad_norm": 12.9375,
"learning_rate": 5.838028169014085e-06,
"loss": 1.0582,
"step": 21100
},
{
"epoch": 2.217436974789916,
"grad_norm": 8.75,
"learning_rate": 5.8302034428795e-06,
"loss": 1.3465,
"step": 21110
},
{
"epoch": 2.2184873949579833,
"grad_norm": 12.0,
"learning_rate": 5.8223787167449145e-06,
"loss": 1.6646,
"step": 21120
},
{
"epoch": 2.2195378151260505,
"grad_norm": 12.4375,
"learning_rate": 5.814553990610329e-06,
"loss": 0.9414,
"step": 21130
},
{
"epoch": 2.2205882352941178,
"grad_norm": 4.25,
"learning_rate": 5.806729264475744e-06,
"loss": 1.1275,
"step": 21140
},
{
"epoch": 2.221638655462185,
"grad_norm": 17.5,
"learning_rate": 5.798904538341159e-06,
"loss": 1.2562,
"step": 21150
},
{
"epoch": 2.222689075630252,
"grad_norm": 28.125,
"learning_rate": 5.791079812206574e-06,
"loss": 1.1027,
"step": 21160
},
{
"epoch": 2.2237394957983194,
"grad_norm": 13.375,
"learning_rate": 5.783255086071989e-06,
"loss": 1.8837,
"step": 21170
},
{
"epoch": 2.2247899159663866,
"grad_norm": 13.3125,
"learning_rate": 5.775430359937403e-06,
"loss": 1.1867,
"step": 21180
},
{
"epoch": 2.225840336134454,
"grad_norm": 13.9375,
"learning_rate": 5.767605633802818e-06,
"loss": 1.3971,
"step": 21190
},
{
"epoch": 2.226890756302521,
"grad_norm": 22.5,
"learning_rate": 5.759780907668232e-06,
"loss": 1.2463,
"step": 21200
},
{
"epoch": 2.2279411764705883,
"grad_norm": 15.9375,
"learning_rate": 5.751956181533646e-06,
"loss": 1.6531,
"step": 21210
},
{
"epoch": 2.2289915966386555,
"grad_norm": 7.40625,
"learning_rate": 5.744131455399061e-06,
"loss": 1.3287,
"step": 21220
},
{
"epoch": 2.2300420168067228,
"grad_norm": 7.90625,
"learning_rate": 5.736306729264476e-06,
"loss": 0.915,
"step": 21230
},
{
"epoch": 2.23109243697479,
"grad_norm": 8.6875,
"learning_rate": 5.72848200312989e-06,
"loss": 1.3027,
"step": 21240
},
{
"epoch": 2.232142857142857,
"grad_norm": 22.0,
"learning_rate": 5.720657276995306e-06,
"loss": 1.6918,
"step": 21250
},
{
"epoch": 2.2331932773109244,
"grad_norm": 9.75,
"learning_rate": 5.7128325508607205e-06,
"loss": 0.9006,
"step": 21260
},
{
"epoch": 2.2342436974789917,
"grad_norm": 14.4375,
"learning_rate": 5.705007824726135e-06,
"loss": 1.3935,
"step": 21270
},
{
"epoch": 2.235294117647059,
"grad_norm": 12.6875,
"learning_rate": 5.69718309859155e-06,
"loss": 1.321,
"step": 21280
},
{
"epoch": 2.236344537815126,
"grad_norm": 20.625,
"learning_rate": 5.689358372456964e-06,
"loss": 0.9012,
"step": 21290
},
{
"epoch": 2.2373949579831933,
"grad_norm": 9.9375,
"learning_rate": 5.681533646322379e-06,
"loss": 0.8183,
"step": 21300
},
{
"epoch": 2.2384453781512605,
"grad_norm": 25.375,
"learning_rate": 5.673708920187794e-06,
"loss": 1.538,
"step": 21310
},
{
"epoch": 2.2394957983193278,
"grad_norm": 15.625,
"learning_rate": 5.665884194053208e-06,
"loss": 1.6829,
"step": 21320
},
{
"epoch": 2.240546218487395,
"grad_norm": 11.25,
"learning_rate": 5.658059467918623e-06,
"loss": 1.3246,
"step": 21330
},
{
"epoch": 2.241596638655462,
"grad_norm": 5.46875,
"learning_rate": 5.650234741784038e-06,
"loss": 0.9689,
"step": 21340
},
{
"epoch": 2.2426470588235294,
"grad_norm": 12.375,
"learning_rate": 5.642410015649453e-06,
"loss": 1.1202,
"step": 21350
},
{
"epoch": 2.2436974789915967,
"grad_norm": 11.6875,
"learning_rate": 5.634585289514868e-06,
"loss": 1.2305,
"step": 21360
},
{
"epoch": 2.244747899159664,
"grad_norm": 5.03125,
"learning_rate": 5.6267605633802825e-06,
"loss": 0.9779,
"step": 21370
},
{
"epoch": 2.245798319327731,
"grad_norm": 14.25,
"learning_rate": 5.618935837245697e-06,
"loss": 1.0515,
"step": 21380
},
{
"epoch": 2.2468487394957983,
"grad_norm": 16.5,
"learning_rate": 5.611111111111112e-06,
"loss": 1.0545,
"step": 21390
},
{
"epoch": 2.2478991596638656,
"grad_norm": 12.9375,
"learning_rate": 5.6032863849765264e-06,
"loss": 1.1076,
"step": 21400
},
{
"epoch": 2.2489495798319328,
"grad_norm": 15.4375,
"learning_rate": 5.595461658841941e-06,
"loss": 1.0142,
"step": 21410
},
{
"epoch": 2.25,
"grad_norm": 15.875,
"learning_rate": 5.587636932707356e-06,
"loss": 1.5,
"step": 21420
}
],
"logging_steps": 10,
"max_steps": 28560,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 7140,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}