omnis3 / trainer_state.json
multitensor's picture
Upload folder using huggingface_hub
6069276 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1161,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008613264427217916,
"grad_norm": 599.9192504882812,
"learning_rate": 5.714285714285715e-07,
"loss": 6.0815,
"step": 1
},
{
"epoch": 0.0017226528854435831,
"grad_norm": 575.8373413085938,
"learning_rate": 1.142857142857143e-06,
"loss": 5.5586,
"step": 2
},
{
"epoch": 0.002583979328165375,
"grad_norm": 564.647216796875,
"learning_rate": 1.7142857142857145e-06,
"loss": 5.7547,
"step": 3
},
{
"epoch": 0.0034453057708871662,
"grad_norm": 407.28704833984375,
"learning_rate": 2.285714285714286e-06,
"loss": 5.438,
"step": 4
},
{
"epoch": 0.004306632213608958,
"grad_norm": 371.2016906738281,
"learning_rate": 2.8571428571428573e-06,
"loss": 4.8455,
"step": 5
},
{
"epoch": 0.00516795865633075,
"grad_norm": 371.3276672363281,
"learning_rate": 3.428571428571429e-06,
"loss": 3.7534,
"step": 6
},
{
"epoch": 0.006029285099052541,
"grad_norm": 1765.4417724609375,
"learning_rate": 4.000000000000001e-06,
"loss": 3.7887,
"step": 7
},
{
"epoch": 0.0068906115417743325,
"grad_norm": 293.5586242675781,
"learning_rate": 4.571428571428572e-06,
"loss": 3.0103,
"step": 8
},
{
"epoch": 0.007751937984496124,
"grad_norm": 490.7660827636719,
"learning_rate": 5.142857142857142e-06,
"loss": 2.7897,
"step": 9
},
{
"epoch": 0.008613264427217916,
"grad_norm": 211.81118774414062,
"learning_rate": 5.7142857142857145e-06,
"loss": 2.951,
"step": 10
},
{
"epoch": 0.009474590869939707,
"grad_norm": 83.9891128540039,
"learning_rate": 6.285714285714286e-06,
"loss": 2.6668,
"step": 11
},
{
"epoch": 0.0103359173126615,
"grad_norm": 296.01885986328125,
"learning_rate": 6.857142857142858e-06,
"loss": 2.4551,
"step": 12
},
{
"epoch": 0.01119724375538329,
"grad_norm": 44.15952682495117,
"learning_rate": 7.428571428571429e-06,
"loss": 2.3069,
"step": 13
},
{
"epoch": 0.012058570198105082,
"grad_norm": 28.404870986938477,
"learning_rate": 8.000000000000001e-06,
"loss": 2.1574,
"step": 14
},
{
"epoch": 0.012919896640826873,
"grad_norm": 18892.63671875,
"learning_rate": 8.571428571428571e-06,
"loss": 8.4092,
"step": 15
},
{
"epoch": 0.013781223083548665,
"grad_norm": 13.016464233398438,
"learning_rate": 9.142857142857144e-06,
"loss": 2.0747,
"step": 16
},
{
"epoch": 0.014642549526270457,
"grad_norm": 24.35378074645996,
"learning_rate": 9.714285714285715e-06,
"loss": 2.1555,
"step": 17
},
{
"epoch": 0.015503875968992248,
"grad_norm": 129.4594268798828,
"learning_rate": 1.0285714285714285e-05,
"loss": 2.2513,
"step": 18
},
{
"epoch": 0.01636520241171404,
"grad_norm": 312.46337890625,
"learning_rate": 1.0857142857142858e-05,
"loss": 2.4386,
"step": 19
},
{
"epoch": 0.017226528854435832,
"grad_norm": 1113.3402099609375,
"learning_rate": 1.1428571428571429e-05,
"loss": 2.6086,
"step": 20
},
{
"epoch": 0.01808785529715762,
"grad_norm": 758.8408203125,
"learning_rate": 1.2e-05,
"loss": 2.3477,
"step": 21
},
{
"epoch": 0.018949181739879414,
"grad_norm": 90.13806915283203,
"learning_rate": 1.2571428571428572e-05,
"loss": 2.0906,
"step": 22
},
{
"epoch": 0.019810508182601206,
"grad_norm": 44.2735710144043,
"learning_rate": 1.3142857142857145e-05,
"loss": 2.0562,
"step": 23
},
{
"epoch": 0.020671834625323,
"grad_norm": 42.16297149658203,
"learning_rate": 1.3714285714285716e-05,
"loss": 2.0478,
"step": 24
},
{
"epoch": 0.02153316106804479,
"grad_norm": 35.452392578125,
"learning_rate": 1.4285714285714287e-05,
"loss": 2.0479,
"step": 25
},
{
"epoch": 0.02239448751076658,
"grad_norm": 23.058853149414062,
"learning_rate": 1.4857142857142858e-05,
"loss": 1.9681,
"step": 26
},
{
"epoch": 0.023255813953488372,
"grad_norm": 17.171300888061523,
"learning_rate": 1.542857142857143e-05,
"loss": 1.917,
"step": 27
},
{
"epoch": 0.024117140396210164,
"grad_norm": 12.922857284545898,
"learning_rate": 1.6000000000000003e-05,
"loss": 1.8992,
"step": 28
},
{
"epoch": 0.024978466838931956,
"grad_norm": 10.135607719421387,
"learning_rate": 1.6571428571428574e-05,
"loss": 1.8757,
"step": 29
},
{
"epoch": 0.025839793281653745,
"grad_norm": 8.323143005371094,
"learning_rate": 1.7142857142857142e-05,
"loss": 1.8451,
"step": 30
},
{
"epoch": 0.026701119724375538,
"grad_norm": 5.9443793296813965,
"learning_rate": 1.7714285714285717e-05,
"loss": 1.7739,
"step": 31
},
{
"epoch": 0.02756244616709733,
"grad_norm": 4.917455196380615,
"learning_rate": 1.8285714285714288e-05,
"loss": 1.7803,
"step": 32
},
{
"epoch": 0.028423772609819122,
"grad_norm": 5.2614240646362305,
"learning_rate": 1.885714285714286e-05,
"loss": 1.7771,
"step": 33
},
{
"epoch": 0.029285099052540915,
"grad_norm": 4.4380316734313965,
"learning_rate": 1.942857142857143e-05,
"loss": 1.7563,
"step": 34
},
{
"epoch": 0.030146425495262703,
"grad_norm": 4.11315393447876,
"learning_rate": 2e-05,
"loss": 1.7493,
"step": 35
},
{
"epoch": 0.031007751937984496,
"grad_norm": 3.8529300689697266,
"learning_rate": 1.9999961078220587e-05,
"loss": 1.7646,
"step": 36
},
{
"epoch": 0.03186907838070629,
"grad_norm": 3.429624080657959,
"learning_rate": 1.9999844313185335e-05,
"loss": 1.7406,
"step": 37
},
{
"epoch": 0.03273040482342808,
"grad_norm": 3.103177785873413,
"learning_rate": 1.9999649705803178e-05,
"loss": 1.7295,
"step": 38
},
{
"epoch": 0.03359173126614987,
"grad_norm": 2.6700687408447266,
"learning_rate": 1.9999377257589012e-05,
"loss": 1.6595,
"step": 39
},
{
"epoch": 0.034453057708871665,
"grad_norm": 2.35770583152771,
"learning_rate": 1.999902697066367e-05,
"loss": 1.6766,
"step": 40
},
{
"epoch": 0.03531438415159346,
"grad_norm": 2.01990008354187,
"learning_rate": 1.9998598847753918e-05,
"loss": 1.6472,
"step": 41
},
{
"epoch": 0.03617571059431524,
"grad_norm": 1.8652081489562988,
"learning_rate": 1.9998092892192403e-05,
"loss": 1.6448,
"step": 42
},
{
"epoch": 0.037037037037037035,
"grad_norm": 1.9675177335739136,
"learning_rate": 1.999750910791767e-05,
"loss": 1.6573,
"step": 43
},
{
"epoch": 0.03789836347975883,
"grad_norm": 2.176922082901001,
"learning_rate": 1.9996847499474102e-05,
"loss": 1.6364,
"step": 44
},
{
"epoch": 0.03875968992248062,
"grad_norm": 2.259882688522339,
"learning_rate": 1.99961080720119e-05,
"loss": 1.6164,
"step": 45
},
{
"epoch": 0.03962101636520241,
"grad_norm": 2.2850229740142822,
"learning_rate": 1.9995290831287032e-05,
"loss": 1.5988,
"step": 46
},
{
"epoch": 0.040482342807924204,
"grad_norm": 2.1443259716033936,
"learning_rate": 1.9994395783661177e-05,
"loss": 1.5978,
"step": 47
},
{
"epoch": 0.041343669250646,
"grad_norm": 2.1305596828460693,
"learning_rate": 1.9993422936101715e-05,
"loss": 1.6235,
"step": 48
},
{
"epoch": 0.04220499569336779,
"grad_norm": 2.253634214401245,
"learning_rate": 1.9992372296181637e-05,
"loss": 1.6344,
"step": 49
},
{
"epoch": 0.04306632213608958,
"grad_norm": 1.944365382194519,
"learning_rate": 1.9991243872079495e-05,
"loss": 1.5638,
"step": 50
},
{
"epoch": 0.04392764857881137,
"grad_norm": 1.8713358640670776,
"learning_rate": 1.9990037672579347e-05,
"loss": 1.59,
"step": 51
},
{
"epoch": 0.04478897502153316,
"grad_norm": 1.4943434000015259,
"learning_rate": 1.9988753707070675e-05,
"loss": 1.6001,
"step": 52
},
{
"epoch": 0.04565030146425495,
"grad_norm": 1.5794744491577148,
"learning_rate": 1.9987391985548326e-05,
"loss": 1.5618,
"step": 53
},
{
"epoch": 0.046511627906976744,
"grad_norm": 1.5509154796600342,
"learning_rate": 1.998595251861243e-05,
"loss": 1.5514,
"step": 54
},
{
"epoch": 0.047372954349698536,
"grad_norm": 1.7027164697647095,
"learning_rate": 1.9984435317468298e-05,
"loss": 1.5591,
"step": 55
},
{
"epoch": 0.04823428079242033,
"grad_norm": 1.851098656654358,
"learning_rate": 1.9982840393926374e-05,
"loss": 1.6171,
"step": 56
},
{
"epoch": 0.04909560723514212,
"grad_norm": 1.8336509466171265,
"learning_rate": 1.9981167760402104e-05,
"loss": 1.6013,
"step": 57
},
{
"epoch": 0.04995693367786391,
"grad_norm": 1.5762431621551514,
"learning_rate": 1.997941742991587e-05,
"loss": 1.5275,
"step": 58
},
{
"epoch": 0.050818260120585705,
"grad_norm": 1.5044102668762207,
"learning_rate": 1.997758941609286e-05,
"loss": 1.5214,
"step": 59
},
{
"epoch": 0.05167958656330749,
"grad_norm": 1.93209707736969,
"learning_rate": 1.9975683733162987e-05,
"loss": 1.5819,
"step": 60
},
{
"epoch": 0.05254091300602928,
"grad_norm": 1.545015811920166,
"learning_rate": 1.9973700395960765e-05,
"loss": 1.5227,
"step": 61
},
{
"epoch": 0.053402239448751075,
"grad_norm": 1.2270804643630981,
"learning_rate": 1.9971639419925197e-05,
"loss": 1.5119,
"step": 62
},
{
"epoch": 0.05426356589147287,
"grad_norm": 1.7906986474990845,
"learning_rate": 1.9969500821099654e-05,
"loss": 1.5528,
"step": 63
},
{
"epoch": 0.05512489233419466,
"grad_norm": 1.5916507244110107,
"learning_rate": 1.996728461613175e-05,
"loss": 1.5367,
"step": 64
},
{
"epoch": 0.05598621877691645,
"grad_norm": 1.413952350616455,
"learning_rate": 1.996499082227321e-05,
"loss": 1.5247,
"step": 65
},
{
"epoch": 0.056847545219638244,
"grad_norm": 1.3606282472610474,
"learning_rate": 1.996261945737975e-05,
"loss": 1.459,
"step": 66
},
{
"epoch": 0.05770887166236004,
"grad_norm": 1.327688455581665,
"learning_rate": 1.996017053991091e-05,
"loss": 1.5062,
"step": 67
},
{
"epoch": 0.05857019810508183,
"grad_norm": 1.2740212678909302,
"learning_rate": 1.995764408892994e-05,
"loss": 1.478,
"step": 68
},
{
"epoch": 0.059431524547803614,
"grad_norm": 1.1678736209869385,
"learning_rate": 1.995504012410363e-05,
"loss": 1.4755,
"step": 69
},
{
"epoch": 0.06029285099052541,
"grad_norm": 1.574155569076538,
"learning_rate": 1.995235866570217e-05,
"loss": 1.5191,
"step": 70
},
{
"epoch": 0.0611541774332472,
"grad_norm": 1.2211532592773438,
"learning_rate": 1.9949599734598993e-05,
"loss": 1.4561,
"step": 71
},
{
"epoch": 0.06201550387596899,
"grad_norm": 1.313081979751587,
"learning_rate": 1.994676335227059e-05,
"loss": 1.502,
"step": 72
},
{
"epoch": 0.06287683031869079,
"grad_norm": 1.2082189321517944,
"learning_rate": 1.9943849540796375e-05,
"loss": 1.4598,
"step": 73
},
{
"epoch": 0.06373815676141258,
"grad_norm": 1.2929805517196655,
"learning_rate": 1.9940858322858493e-05,
"loss": 1.4993,
"step": 74
},
{
"epoch": 0.06459948320413436,
"grad_norm": 1.199054479598999,
"learning_rate": 1.9937789721741654e-05,
"loss": 1.4904,
"step": 75
},
{
"epoch": 0.06546080964685616,
"grad_norm": 1.350379467010498,
"learning_rate": 1.9934643761332933e-05,
"loss": 1.4534,
"step": 76
},
{
"epoch": 0.06632213608957795,
"grad_norm": 1.1363669633865356,
"learning_rate": 1.9931420466121613e-05,
"loss": 1.453,
"step": 77
},
{
"epoch": 0.06718346253229975,
"grad_norm": 1.1514127254486084,
"learning_rate": 1.9928119861198962e-05,
"loss": 1.4607,
"step": 78
},
{
"epoch": 0.06804478897502153,
"grad_norm": 1.0154885053634644,
"learning_rate": 1.9924741972258076e-05,
"loss": 1.4401,
"step": 79
},
{
"epoch": 0.06890611541774333,
"grad_norm": 1.055841326713562,
"learning_rate": 1.9921286825593632e-05,
"loss": 1.48,
"step": 80
},
{
"epoch": 0.06976744186046512,
"grad_norm": 1.3121042251586914,
"learning_rate": 1.9917754448101725e-05,
"loss": 1.4263,
"step": 81
},
{
"epoch": 0.07062876830318691,
"grad_norm": 1.102817416191101,
"learning_rate": 1.9914144867279644e-05,
"loss": 1.4401,
"step": 82
},
{
"epoch": 0.0714900947459087,
"grad_norm": 1.0836132764816284,
"learning_rate": 1.9910458111225645e-05,
"loss": 1.4866,
"step": 83
},
{
"epoch": 0.07235142118863049,
"grad_norm": 1.0442920923233032,
"learning_rate": 1.990669420863875e-05,
"loss": 1.4162,
"step": 84
},
{
"epoch": 0.07321274763135228,
"grad_norm": 1.0844299793243408,
"learning_rate": 1.9902853188818518e-05,
"loss": 1.4707,
"step": 85
},
{
"epoch": 0.07407407407407407,
"grad_norm": 0.993617057800293,
"learning_rate": 1.9898935081664814e-05,
"loss": 1.4567,
"step": 86
},
{
"epoch": 0.07493540051679587,
"grad_norm": 1.103285312652588,
"learning_rate": 1.9894939917677577e-05,
"loss": 1.4249,
"step": 87
},
{
"epoch": 0.07579672695951765,
"grad_norm": 1.0927506685256958,
"learning_rate": 1.9890867727956587e-05,
"loss": 1.4294,
"step": 88
},
{
"epoch": 0.07665805340223945,
"grad_norm": 1.0682389736175537,
"learning_rate": 1.988671854420122e-05,
"loss": 1.3915,
"step": 89
},
{
"epoch": 0.07751937984496124,
"grad_norm": 1.0784050226211548,
"learning_rate": 1.9882492398710192e-05,
"loss": 1.4445,
"step": 90
},
{
"epoch": 0.07838070628768304,
"grad_norm": 1.0482838153839111,
"learning_rate": 1.987818932438133e-05,
"loss": 1.3939,
"step": 91
},
{
"epoch": 0.07924203273040482,
"grad_norm": 1.1594949960708618,
"learning_rate": 1.987380935471129e-05,
"loss": 1.4543,
"step": 92
},
{
"epoch": 0.08010335917312661,
"grad_norm": 1.0159746408462524,
"learning_rate": 1.986935252379532e-05,
"loss": 1.4048,
"step": 93
},
{
"epoch": 0.08096468561584841,
"grad_norm": 1.1461023092269897,
"learning_rate": 1.9864818866326978e-05,
"loss": 1.4314,
"step": 94
},
{
"epoch": 0.0818260120585702,
"grad_norm": 1.1117491722106934,
"learning_rate": 1.9860208417597863e-05,
"loss": 1.4383,
"step": 95
},
{
"epoch": 0.082687338501292,
"grad_norm": 0.9831107258796692,
"learning_rate": 1.9855521213497355e-05,
"loss": 1.4245,
"step": 96
},
{
"epoch": 0.08354866494401378,
"grad_norm": 1.0372180938720703,
"learning_rate": 1.9850757290512313e-05,
"loss": 1.3838,
"step": 97
},
{
"epoch": 0.08440999138673558,
"grad_norm": 0.9056967496871948,
"learning_rate": 1.9845916685726808e-05,
"loss": 1.4076,
"step": 98
},
{
"epoch": 0.08527131782945736,
"grad_norm": 1.002386450767517,
"learning_rate": 1.9840999436821836e-05,
"loss": 1.4301,
"step": 99
},
{
"epoch": 0.08613264427217916,
"grad_norm": 0.9989022612571716,
"learning_rate": 1.983600558207501e-05,
"loss": 1.3993,
"step": 100
},
{
"epoch": 0.08699397071490095,
"grad_norm": 1.0508233308792114,
"learning_rate": 1.983093516036027e-05,
"loss": 1.4241,
"step": 101
},
{
"epoch": 0.08785529715762273,
"grad_norm": 0.9549940824508667,
"learning_rate": 1.9825788211147587e-05,
"loss": 1.427,
"step": 102
},
{
"epoch": 0.08871662360034453,
"grad_norm": 1.0347970724105835,
"learning_rate": 1.9820564774502644e-05,
"loss": 1.4029,
"step": 103
},
{
"epoch": 0.08957795004306632,
"grad_norm": 1.0045099258422852,
"learning_rate": 1.981526489108653e-05,
"loss": 1.4146,
"step": 104
},
{
"epoch": 0.09043927648578812,
"grad_norm": 1.2777268886566162,
"learning_rate": 1.980988860215542e-05,
"loss": 1.3599,
"step": 105
},
{
"epoch": 0.0913006029285099,
"grad_norm": 0.9002528190612793,
"learning_rate": 1.980443594956027e-05,
"loss": 1.4008,
"step": 106
},
{
"epoch": 0.0921619293712317,
"grad_norm": 0.9888056516647339,
"learning_rate": 1.9798906975746462e-05,
"loss": 1.422,
"step": 107
},
{
"epoch": 0.09302325581395349,
"grad_norm": 0.9847419857978821,
"learning_rate": 1.9793301723753494e-05,
"loss": 1.3866,
"step": 108
},
{
"epoch": 0.09388458225667529,
"grad_norm": 0.9763849377632141,
"learning_rate": 1.9787620237214648e-05,
"loss": 1.3883,
"step": 109
},
{
"epoch": 0.09474590869939707,
"grad_norm": 1.1006907224655151,
"learning_rate": 1.9781862560356632e-05,
"loss": 1.3748,
"step": 110
},
{
"epoch": 0.09560723514211886,
"grad_norm": 1.0140228271484375,
"learning_rate": 1.9776028737999256e-05,
"loss": 1.3581,
"step": 111
},
{
"epoch": 0.09646856158484066,
"grad_norm": 1.0311439037322998,
"learning_rate": 1.9770118815555063e-05,
"loss": 1.4253,
"step": 112
},
{
"epoch": 0.09732988802756244,
"grad_norm": 0.9372150301933289,
"learning_rate": 1.9764132839029e-05,
"loss": 1.4078,
"step": 113
},
{
"epoch": 0.09819121447028424,
"grad_norm": 1.0055251121520996,
"learning_rate": 1.9758070855018033e-05,
"loss": 1.432,
"step": 114
},
{
"epoch": 0.09905254091300603,
"grad_norm": 0.9005001783370972,
"learning_rate": 1.9751932910710808e-05,
"loss": 1.3998,
"step": 115
},
{
"epoch": 0.09991386735572783,
"grad_norm": 0.958956778049469,
"learning_rate": 1.9745719053887265e-05,
"loss": 1.3867,
"step": 116
},
{
"epoch": 0.10077519379844961,
"grad_norm": 1.2123082876205444,
"learning_rate": 1.9739429332918276e-05,
"loss": 1.382,
"step": 117
},
{
"epoch": 0.10163652024117141,
"grad_norm": 0.858340859413147,
"learning_rate": 1.9733063796765267e-05,
"loss": 1.3794,
"step": 118
},
{
"epoch": 0.1024978466838932,
"grad_norm": 0.8608000874519348,
"learning_rate": 1.972662249497984e-05,
"loss": 1.3996,
"step": 119
},
{
"epoch": 0.10335917312661498,
"grad_norm": 0.8888645768165588,
"learning_rate": 1.972010547770338e-05,
"loss": 1.3874,
"step": 120
},
{
"epoch": 0.10422049956933678,
"grad_norm": 0.9219585061073303,
"learning_rate": 1.9713512795666663e-05,
"loss": 1.3881,
"step": 121
},
{
"epoch": 0.10508182601205857,
"grad_norm": 0.8192768096923828,
"learning_rate": 1.970684450018948e-05,
"loss": 1.3827,
"step": 122
},
{
"epoch": 0.10594315245478036,
"grad_norm": 1.0063087940216064,
"learning_rate": 1.9700100643180213e-05,
"loss": 1.3679,
"step": 123
},
{
"epoch": 0.10680447889750215,
"grad_norm": 0.9152660369873047,
"learning_rate": 1.969328127713544e-05,
"loss": 1.3637,
"step": 124
},
{
"epoch": 0.10766580534022395,
"grad_norm": 0.9176394939422607,
"learning_rate": 1.9686386455139544e-05,
"loss": 1.3972,
"step": 125
},
{
"epoch": 0.10852713178294573,
"grad_norm": 0.91953444480896,
"learning_rate": 1.9679416230864265e-05,
"loss": 1.3833,
"step": 126
},
{
"epoch": 0.10938845822566753,
"grad_norm": 0.967275083065033,
"learning_rate": 1.9672370658568306e-05,
"loss": 1.3628,
"step": 127
},
{
"epoch": 0.11024978466838932,
"grad_norm": 0.8972539305686951,
"learning_rate": 1.966524979309692e-05,
"loss": 1.3457,
"step": 128
},
{
"epoch": 0.1111111111111111,
"grad_norm": 0.926163911819458,
"learning_rate": 1.9658053689881453e-05,
"loss": 1.3278,
"step": 129
},
{
"epoch": 0.1119724375538329,
"grad_norm": 0.9265878200531006,
"learning_rate": 1.9650782404938933e-05,
"loss": 1.347,
"step": 130
},
{
"epoch": 0.11283376399655469,
"grad_norm": 1.088254451751709,
"learning_rate": 1.9643435994871626e-05,
"loss": 1.3498,
"step": 131
},
{
"epoch": 0.11369509043927649,
"grad_norm": 0.9928959608078003,
"learning_rate": 1.963601451686661e-05,
"loss": 1.372,
"step": 132
},
{
"epoch": 0.11455641688199827,
"grad_norm": 0.8130192756652832,
"learning_rate": 1.9628518028695307e-05,
"loss": 1.3185,
"step": 133
},
{
"epoch": 0.11541774332472007,
"grad_norm": 0.9813799262046814,
"learning_rate": 1.9620946588713048e-05,
"loss": 1.3723,
"step": 134
},
{
"epoch": 0.11627906976744186,
"grad_norm": 0.8440076112747192,
"learning_rate": 1.9613300255858615e-05,
"loss": 1.379,
"step": 135
},
{
"epoch": 0.11714039621016366,
"grad_norm": 0.9899678826332092,
"learning_rate": 1.960557908965379e-05,
"loss": 1.3396,
"step": 136
},
{
"epoch": 0.11800172265288544,
"grad_norm": 0.9470325708389282,
"learning_rate": 1.9597783150202873e-05,
"loss": 1.3631,
"step": 137
},
{
"epoch": 0.11886304909560723,
"grad_norm": 0.9099454879760742,
"learning_rate": 1.9589912498192233e-05,
"loss": 1.3623,
"step": 138
},
{
"epoch": 0.11972437553832903,
"grad_norm": 1.1269025802612305,
"learning_rate": 1.9581967194889826e-05,
"loss": 1.3615,
"step": 139
},
{
"epoch": 0.12058570198105081,
"grad_norm": 0.8688855767250061,
"learning_rate": 1.957394730214472e-05,
"loss": 1.36,
"step": 140
},
{
"epoch": 0.12144702842377261,
"grad_norm": 0.9251095652580261,
"learning_rate": 1.956585288238662e-05,
"loss": 1.3274,
"step": 141
},
{
"epoch": 0.1223083548664944,
"grad_norm": 1.2113760709762573,
"learning_rate": 1.955768399862536e-05,
"loss": 1.3687,
"step": 142
},
{
"epoch": 0.1231696813092162,
"grad_norm": 1.0069940090179443,
"learning_rate": 1.9549440714450447e-05,
"loss": 1.3602,
"step": 143
},
{
"epoch": 0.12403100775193798,
"grad_norm": 0.987554132938385,
"learning_rate": 1.9541123094030528e-05,
"loss": 1.3515,
"step": 144
},
{
"epoch": 0.12489233419465978,
"grad_norm": 0.8786775469779968,
"learning_rate": 1.9532731202112935e-05,
"loss": 1.3849,
"step": 145
},
{
"epoch": 0.12575366063738158,
"grad_norm": 1.0503038167953491,
"learning_rate": 1.9524265104023133e-05,
"loss": 1.3638,
"step": 146
},
{
"epoch": 0.12661498708010335,
"grad_norm": 0.9434555768966675,
"learning_rate": 1.9515724865664242e-05,
"loss": 1.3065,
"step": 147
},
{
"epoch": 0.12747631352282515,
"grad_norm": 1.0372234582901,
"learning_rate": 1.9507110553516518e-05,
"loss": 1.3398,
"step": 148
},
{
"epoch": 0.12833763996554695,
"grad_norm": 0.9711533188819885,
"learning_rate": 1.949842223463683e-05,
"loss": 1.3428,
"step": 149
},
{
"epoch": 0.12919896640826872,
"grad_norm": 0.8844906091690063,
"learning_rate": 1.9489659976658152e-05,
"loss": 1.3685,
"step": 150
},
{
"epoch": 0.13006029285099052,
"grad_norm": 0.9219164848327637,
"learning_rate": 1.9480823847789007e-05,
"loss": 1.3465,
"step": 151
},
{
"epoch": 0.13092161929371232,
"grad_norm": 0.9082310795783997,
"learning_rate": 1.947191391681298e-05,
"loss": 1.3799,
"step": 152
},
{
"epoch": 0.13178294573643412,
"grad_norm": 0.9203616380691528,
"learning_rate": 1.946293025308813e-05,
"loss": 1.3636,
"step": 153
},
{
"epoch": 0.1326442721791559,
"grad_norm": 0.8217676877975464,
"learning_rate": 1.9453872926546505e-05,
"loss": 1.3247,
"step": 154
},
{
"epoch": 0.1335055986218777,
"grad_norm": 0.8964298963546753,
"learning_rate": 1.944474200769355e-05,
"loss": 1.3357,
"step": 155
},
{
"epoch": 0.1343669250645995,
"grad_norm": 0.9523929357528687,
"learning_rate": 1.943553756760759e-05,
"loss": 1.3712,
"step": 156
},
{
"epoch": 0.13522825150732126,
"grad_norm": 0.8506253361701965,
"learning_rate": 1.9426259677939264e-05,
"loss": 1.3522,
"step": 157
},
{
"epoch": 0.13608957795004306,
"grad_norm": 0.9101868867874146,
"learning_rate": 1.9416908410910965e-05,
"loss": 1.3266,
"step": 158
},
{
"epoch": 0.13695090439276486,
"grad_norm": 0.8775342702865601,
"learning_rate": 1.9407483839316284e-05,
"loss": 1.3575,
"step": 159
},
{
"epoch": 0.13781223083548666,
"grad_norm": 0.9115650057792664,
"learning_rate": 1.939798603651944e-05,
"loss": 1.3578,
"step": 160
},
{
"epoch": 0.13867355727820843,
"grad_norm": 0.8985600471496582,
"learning_rate": 1.938841507645471e-05,
"loss": 1.3059,
"step": 161
},
{
"epoch": 0.13953488372093023,
"grad_norm": 1.053168773651123,
"learning_rate": 1.9378771033625855e-05,
"loss": 1.3736,
"step": 162
},
{
"epoch": 0.14039621016365203,
"grad_norm": 0.9247081279754639,
"learning_rate": 1.9369053983105533e-05,
"loss": 1.3268,
"step": 163
},
{
"epoch": 0.14125753660637383,
"grad_norm": 0.937142014503479,
"learning_rate": 1.9359264000534726e-05,
"loss": 1.3352,
"step": 164
},
{
"epoch": 0.1421188630490956,
"grad_norm": 0.8469811081886292,
"learning_rate": 1.934940116212214e-05,
"loss": 1.3277,
"step": 165
},
{
"epoch": 0.1429801894918174,
"grad_norm": 0.8988816142082214,
"learning_rate": 1.9339465544643623e-05,
"loss": 1.3447,
"step": 166
},
{
"epoch": 0.1438415159345392,
"grad_norm": 1.033051609992981,
"learning_rate": 1.9329457225441554e-05,
"loss": 1.3072,
"step": 167
},
{
"epoch": 0.14470284237726097,
"grad_norm": 0.9636563658714294,
"learning_rate": 1.9319376282424255e-05,
"loss": 1.3027,
"step": 168
},
{
"epoch": 0.14556416881998277,
"grad_norm": 0.9479746222496033,
"learning_rate": 1.9309222794065373e-05,
"loss": 1.3319,
"step": 169
},
{
"epoch": 0.14642549526270457,
"grad_norm": 0.9528256058692932,
"learning_rate": 1.929899683940327e-05,
"loss": 1.3737,
"step": 170
},
{
"epoch": 0.14728682170542637,
"grad_norm": 0.87147057056427,
"learning_rate": 1.9288698498040423e-05,
"loss": 1.3361,
"step": 171
},
{
"epoch": 0.14814814814814814,
"grad_norm": 0.9613904356956482,
"learning_rate": 1.9278327850142783e-05,
"loss": 1.3485,
"step": 172
},
{
"epoch": 0.14900947459086994,
"grad_norm": 1.0545318126678467,
"learning_rate": 1.9267884976439163e-05,
"loss": 1.333,
"step": 173
},
{
"epoch": 0.14987080103359174,
"grad_norm": 0.9187701940536499,
"learning_rate": 1.9257369958220612e-05,
"loss": 1.3241,
"step": 174
},
{
"epoch": 0.1507321274763135,
"grad_norm": 0.866005539894104,
"learning_rate": 1.9246782877339767e-05,
"loss": 1.3133,
"step": 175
},
{
"epoch": 0.1515934539190353,
"grad_norm": 0.9420531392097473,
"learning_rate": 1.923612381621024e-05,
"loss": 1.2784,
"step": 176
},
{
"epoch": 0.1524547803617571,
"grad_norm": 0.9032683968544006,
"learning_rate": 1.9225392857805955e-05,
"loss": 1.2992,
"step": 177
},
{
"epoch": 0.1533161068044789,
"grad_norm": 0.9758039116859436,
"learning_rate": 1.921459008566051e-05,
"loss": 1.3013,
"step": 178
},
{
"epoch": 0.15417743324720068,
"grad_norm": 0.9804133176803589,
"learning_rate": 1.9203715583866527e-05,
"loss": 1.2893,
"step": 179
},
{
"epoch": 0.15503875968992248,
"grad_norm": 0.9305117130279541,
"learning_rate": 1.9192769437075e-05,
"loss": 1.3307,
"step": 180
},
{
"epoch": 0.15590008613264428,
"grad_norm": 0.9009129405021667,
"learning_rate": 1.918175173049463e-05,
"loss": 1.3344,
"step": 181
},
{
"epoch": 0.15676141257536608,
"grad_norm": 0.9379671812057495,
"learning_rate": 1.9170662549891162e-05,
"loss": 1.3434,
"step": 182
},
{
"epoch": 0.15762273901808785,
"grad_norm": 0.8975892066955566,
"learning_rate": 1.9159501981586738e-05,
"loss": 1.305,
"step": 183
},
{
"epoch": 0.15848406546080965,
"grad_norm": 0.8844696283340454,
"learning_rate": 1.9148270112459178e-05,
"loss": 1.3373,
"step": 184
},
{
"epoch": 0.15934539190353145,
"grad_norm": 0.9148349761962891,
"learning_rate": 1.9136967029941354e-05,
"loss": 1.322,
"step": 185
},
{
"epoch": 0.16020671834625322,
"grad_norm": 0.9962027668952942,
"learning_rate": 1.9125592822020485e-05,
"loss": 1.3065,
"step": 186
},
{
"epoch": 0.16106804478897502,
"grad_norm": 0.9287891983985901,
"learning_rate": 1.9114147577237452e-05,
"loss": 1.3102,
"step": 187
},
{
"epoch": 0.16192937123169682,
"grad_norm": 0.9015977382659912,
"learning_rate": 1.9102631384686116e-05,
"loss": 1.3011,
"step": 188
},
{
"epoch": 0.16279069767441862,
"grad_norm": 1.0545498132705688,
"learning_rate": 1.909104433401261e-05,
"loss": 1.2916,
"step": 189
},
{
"epoch": 0.1636520241171404,
"grad_norm": 0.8966095447540283,
"learning_rate": 1.9079386515414667e-05,
"loss": 1.2899,
"step": 190
},
{
"epoch": 0.1645133505598622,
"grad_norm": 0.8880913853645325,
"learning_rate": 1.9067658019640897e-05,
"loss": 1.3187,
"step": 191
},
{
"epoch": 0.165374677002584,
"grad_norm": 1.007127285003662,
"learning_rate": 1.9055858937990083e-05,
"loss": 1.2909,
"step": 192
},
{
"epoch": 0.16623600344530576,
"grad_norm": 0.8095739483833313,
"learning_rate": 1.9043989362310472e-05,
"loss": 1.2956,
"step": 193
},
{
"epoch": 0.16709732988802756,
"grad_norm": 0.8505532145500183,
"learning_rate": 1.903204938499907e-05,
"loss": 1.344,
"step": 194
},
{
"epoch": 0.16795865633074936,
"grad_norm": 0.8656225204467773,
"learning_rate": 1.902003909900091e-05,
"loss": 1.3279,
"step": 195
},
{
"epoch": 0.16881998277347116,
"grad_norm": 0.8762969374656677,
"learning_rate": 1.9007958597808326e-05,
"loss": 1.313,
"step": 196
},
{
"epoch": 0.16968130921619293,
"grad_norm": 0.8867192268371582,
"learning_rate": 1.8995807975460246e-05,
"loss": 1.2943,
"step": 197
},
{
"epoch": 0.17054263565891473,
"grad_norm": 0.9131550192832947,
"learning_rate": 1.8983587326541437e-05,
"loss": 1.302,
"step": 198
},
{
"epoch": 0.17140396210163653,
"grad_norm": 0.9059438705444336,
"learning_rate": 1.8971296746181774e-05,
"loss": 1.2927,
"step": 199
},
{
"epoch": 0.17226528854435832,
"grad_norm": 0.912719190120697,
"learning_rate": 1.8958936330055516e-05,
"loss": 1.2986,
"step": 200
},
{
"epoch": 0.1731266149870801,
"grad_norm": 0.9960724711418152,
"learning_rate": 1.894650617438054e-05,
"loss": 1.2989,
"step": 201
},
{
"epoch": 0.1739879414298019,
"grad_norm": 0.9200662970542908,
"learning_rate": 1.893400637591759e-05,
"loss": 1.3312,
"step": 202
},
{
"epoch": 0.1748492678725237,
"grad_norm": 1.040659785270691,
"learning_rate": 1.8921437031969557e-05,
"loss": 1.3031,
"step": 203
},
{
"epoch": 0.17571059431524547,
"grad_norm": 0.8604618310928345,
"learning_rate": 1.8908798240380692e-05,
"loss": 1.341,
"step": 204
},
{
"epoch": 0.17657192075796727,
"grad_norm": 0.982661247253418,
"learning_rate": 1.8896090099535834e-05,
"loss": 1.305,
"step": 205
},
{
"epoch": 0.17743324720068906,
"grad_norm": 0.8687034249305725,
"learning_rate": 1.888331270835968e-05,
"loss": 1.3114,
"step": 206
},
{
"epoch": 0.17829457364341086,
"grad_norm": 0.9046248197555542,
"learning_rate": 1.8870466166315992e-05,
"loss": 1.3212,
"step": 207
},
{
"epoch": 0.17915590008613264,
"grad_norm": 0.8828570246696472,
"learning_rate": 1.885755057340682e-05,
"loss": 1.2929,
"step": 208
},
{
"epoch": 0.18001722652885443,
"grad_norm": 0.8848705291748047,
"learning_rate": 1.8844566030171737e-05,
"loss": 1.3057,
"step": 209
},
{
"epoch": 0.18087855297157623,
"grad_norm": 1.004079818725586,
"learning_rate": 1.8831512637687054e-05,
"loss": 1.2909,
"step": 210
},
{
"epoch": 0.181739879414298,
"grad_norm": 0.9026859998703003,
"learning_rate": 1.881839049756502e-05,
"loss": 1.3081,
"step": 211
},
{
"epoch": 0.1826012058570198,
"grad_norm": 1.0891921520233154,
"learning_rate": 1.880519971195304e-05,
"loss": 1.3273,
"step": 212
},
{
"epoch": 0.1834625322997416,
"grad_norm": 0.9379571676254272,
"learning_rate": 1.879194038353289e-05,
"loss": 1.3223,
"step": 213
},
{
"epoch": 0.1843238587424634,
"grad_norm": 0.9415060877799988,
"learning_rate": 1.87786126155199e-05,
"loss": 1.3464,
"step": 214
},
{
"epoch": 0.18518518518518517,
"grad_norm": 0.9501126408576965,
"learning_rate": 1.8765216511662153e-05,
"loss": 1.3008,
"step": 215
},
{
"epoch": 0.18604651162790697,
"grad_norm": 0.9133473634719849,
"learning_rate": 1.8751752176239693e-05,
"loss": 1.2966,
"step": 216
},
{
"epoch": 0.18690783807062877,
"grad_norm": 0.9399188160896301,
"learning_rate": 1.87382197140637e-05,
"loss": 1.3193,
"step": 217
},
{
"epoch": 0.18776916451335057,
"grad_norm": 0.9107801914215088,
"learning_rate": 1.8724619230475675e-05,
"loss": 1.3024,
"step": 218
},
{
"epoch": 0.18863049095607234,
"grad_norm": 0.894831120967865,
"learning_rate": 1.8710950831346623e-05,
"loss": 1.3163,
"step": 219
},
{
"epoch": 0.18949181739879414,
"grad_norm": 1.0155446529388428,
"learning_rate": 1.8697214623076222e-05,
"loss": 1.2575,
"step": 220
},
{
"epoch": 0.19035314384151594,
"grad_norm": 0.9866533279418945,
"learning_rate": 1.8683410712592015e-05,
"loss": 1.3209,
"step": 221
},
{
"epoch": 0.19121447028423771,
"grad_norm": 0.8821126222610474,
"learning_rate": 1.8669539207348544e-05,
"loss": 1.2828,
"step": 222
},
{
"epoch": 0.1920757967269595,
"grad_norm": 0.8987988233566284,
"learning_rate": 1.8655600215326547e-05,
"loss": 1.2898,
"step": 223
},
{
"epoch": 0.1929371231696813,
"grad_norm": 0.9085987210273743,
"learning_rate": 1.8641593845032098e-05,
"loss": 1.3065,
"step": 224
},
{
"epoch": 0.1937984496124031,
"grad_norm": 0.9193576574325562,
"learning_rate": 1.8627520205495772e-05,
"loss": 1.2837,
"step": 225
},
{
"epoch": 0.19465977605512488,
"grad_norm": 0.9095317721366882,
"learning_rate": 1.8613379406271784e-05,
"loss": 1.3171,
"step": 226
},
{
"epoch": 0.19552110249784668,
"grad_norm": 0.9969485402107239,
"learning_rate": 1.8599171557437147e-05,
"loss": 1.2819,
"step": 227
},
{
"epoch": 0.19638242894056848,
"grad_norm": 0.9653918743133545,
"learning_rate": 1.858489676959081e-05,
"loss": 1.2601,
"step": 228
},
{
"epoch": 0.19724375538329025,
"grad_norm": 0.977749764919281,
"learning_rate": 1.8570555153852806e-05,
"loss": 1.2861,
"step": 229
},
{
"epoch": 0.19810508182601205,
"grad_norm": 0.9468740820884705,
"learning_rate": 1.855614682186338e-05,
"loss": 1.2869,
"step": 230
},
{
"epoch": 0.19896640826873385,
"grad_norm": 1.1150413751602173,
"learning_rate": 1.8541671885782106e-05,
"loss": 1.2798,
"step": 231
},
{
"epoch": 0.19982773471145565,
"grad_norm": 0.9200144410133362,
"learning_rate": 1.8527130458287047e-05,
"loss": 1.2855,
"step": 232
},
{
"epoch": 0.20068906115417742,
"grad_norm": 0.937813401222229,
"learning_rate": 1.851252265257384e-05,
"loss": 1.2598,
"step": 233
},
{
"epoch": 0.20155038759689922,
"grad_norm": 1.02151620388031,
"learning_rate": 1.8497848582354852e-05,
"loss": 1.2828,
"step": 234
},
{
"epoch": 0.20241171403962102,
"grad_norm": 0.9327558279037476,
"learning_rate": 1.8483108361858263e-05,
"loss": 1.2898,
"step": 235
},
{
"epoch": 0.20327304048234282,
"grad_norm": 1.0296434164047241,
"learning_rate": 1.8468302105827195e-05,
"loss": 1.2869,
"step": 236
},
{
"epoch": 0.2041343669250646,
"grad_norm": 0.9533336758613586,
"learning_rate": 1.845342992951882e-05,
"loss": 1.2768,
"step": 237
},
{
"epoch": 0.2049956933677864,
"grad_norm": 0.9365238547325134,
"learning_rate": 1.8438491948703445e-05,
"loss": 1.2783,
"step": 238
},
{
"epoch": 0.2058570198105082,
"grad_norm": 1.104421854019165,
"learning_rate": 1.842348827966363e-05,
"loss": 1.293,
"step": 239
},
{
"epoch": 0.20671834625322996,
"grad_norm": 0.8812316060066223,
"learning_rate": 1.840841903919328e-05,
"loss": 1.2856,
"step": 240
},
{
"epoch": 0.20757967269595176,
"grad_norm": 0.9076652526855469,
"learning_rate": 1.8393284344596715e-05,
"loss": 1.3453,
"step": 241
},
{
"epoch": 0.20844099913867356,
"grad_norm": 0.9731348156929016,
"learning_rate": 1.837808431368779e-05,
"loss": 1.3084,
"step": 242
},
{
"epoch": 0.20930232558139536,
"grad_norm": 0.8820015788078308,
"learning_rate": 1.8362819064788956e-05,
"loss": 1.2753,
"step": 243
},
{
"epoch": 0.21016365202411713,
"grad_norm": 1.0030732154846191,
"learning_rate": 1.8347488716730343e-05,
"loss": 1.302,
"step": 244
},
{
"epoch": 0.21102497846683893,
"grad_norm": 0.9901053309440613,
"learning_rate": 1.8332093388848836e-05,
"loss": 1.31,
"step": 245
},
{
"epoch": 0.21188630490956073,
"grad_norm": 0.81141597032547,
"learning_rate": 1.8316633200987143e-05,
"loss": 1.2973,
"step": 246
},
{
"epoch": 0.2127476313522825,
"grad_norm": 0.9569693803787231,
"learning_rate": 1.830110827349287e-05,
"loss": 1.2805,
"step": 247
},
{
"epoch": 0.2136089577950043,
"grad_norm": 0.9289495348930359,
"learning_rate": 1.8285518727217578e-05,
"loss": 1.2735,
"step": 248
},
{
"epoch": 0.2144702842377261,
"grad_norm": 0.8315669894218445,
"learning_rate": 1.8269864683515847e-05,
"loss": 1.2775,
"step": 249
},
{
"epoch": 0.2153316106804479,
"grad_norm": 0.9181628823280334,
"learning_rate": 1.8254146264244316e-05,
"loss": 1.2328,
"step": 250
},
{
"epoch": 0.21619293712316967,
"grad_norm": 0.8977162837982178,
"learning_rate": 1.8238363591760758e-05,
"loss": 1.2346,
"step": 251
},
{
"epoch": 0.21705426356589147,
"grad_norm": 0.9173194169998169,
"learning_rate": 1.822251678892312e-05,
"loss": 1.2653,
"step": 252
},
{
"epoch": 0.21791559000861327,
"grad_norm": 0.9074868559837341,
"learning_rate": 1.8206605979088545e-05,
"loss": 1.2514,
"step": 253
},
{
"epoch": 0.21877691645133507,
"grad_norm": 0.8458305597305298,
"learning_rate": 1.819063128611244e-05,
"loss": 1.2885,
"step": 254
},
{
"epoch": 0.21963824289405684,
"grad_norm": 0.914714515209198,
"learning_rate": 1.8174592834347503e-05,
"loss": 1.2431,
"step": 255
},
{
"epoch": 0.22049956933677864,
"grad_norm": 0.8686874508857727,
"learning_rate": 1.815849074864275e-05,
"loss": 1.2554,
"step": 256
},
{
"epoch": 0.22136089577950044,
"grad_norm": 0.8784075379371643,
"learning_rate": 1.814232515434255e-05,
"loss": 1.3005,
"step": 257
},
{
"epoch": 0.2222222222222222,
"grad_norm": 0.919947624206543,
"learning_rate": 1.8126096177285637e-05,
"loss": 1.2968,
"step": 258
},
{
"epoch": 0.223083548664944,
"grad_norm": 0.9774091839790344,
"learning_rate": 1.8109803943804146e-05,
"loss": 1.2763,
"step": 259
},
{
"epoch": 0.2239448751076658,
"grad_norm": 0.8449957370758057,
"learning_rate": 1.8093448580722617e-05,
"loss": 1.2653,
"step": 260
},
{
"epoch": 0.2248062015503876,
"grad_norm": 0.9756072163581848,
"learning_rate": 1.8077030215357024e-05,
"loss": 1.272,
"step": 261
},
{
"epoch": 0.22566752799310938,
"grad_norm": 0.8839893341064453,
"learning_rate": 1.806054897551376e-05,
"loss": 1.2743,
"step": 262
},
{
"epoch": 0.22652885443583118,
"grad_norm": 0.8637559413909912,
"learning_rate": 1.8044004989488662e-05,
"loss": 1.2862,
"step": 263
},
{
"epoch": 0.22739018087855298,
"grad_norm": 0.9832426309585571,
"learning_rate": 1.802739838606601e-05,
"loss": 1.2886,
"step": 264
},
{
"epoch": 0.22825150732127478,
"grad_norm": 0.9957407116889954,
"learning_rate": 1.801072929451751e-05,
"loss": 1.2629,
"step": 265
},
{
"epoch": 0.22911283376399655,
"grad_norm": 0.9450539946556091,
"learning_rate": 1.7993997844601305e-05,
"loss": 1.2606,
"step": 266
},
{
"epoch": 0.22997416020671835,
"grad_norm": 0.9669734239578247,
"learning_rate": 1.7977204166560954e-05,
"loss": 1.2614,
"step": 267
},
{
"epoch": 0.23083548664944015,
"grad_norm": 0.9341006278991699,
"learning_rate": 1.7960348391124422e-05,
"loss": 1.2826,
"step": 268
},
{
"epoch": 0.23169681309216192,
"grad_norm": 0.8190209865570068,
"learning_rate": 1.7943430649503065e-05,
"loss": 1.2575,
"step": 269
},
{
"epoch": 0.23255813953488372,
"grad_norm": 0.9061177968978882,
"learning_rate": 1.7926451073390612e-05,
"loss": 1.2458,
"step": 270
},
{
"epoch": 0.23341946597760552,
"grad_norm": 0.8741104602813721,
"learning_rate": 1.7909409794962115e-05,
"loss": 1.2805,
"step": 271
},
{
"epoch": 0.23428079242032732,
"grad_norm": 0.8899810910224915,
"learning_rate": 1.7892306946872952e-05,
"loss": 1.2581,
"step": 272
},
{
"epoch": 0.2351421188630491,
"grad_norm": 0.9245790243148804,
"learning_rate": 1.7875142662257788e-05,
"loss": 1.2877,
"step": 273
},
{
"epoch": 0.2360034453057709,
"grad_norm": 0.8939240574836731,
"learning_rate": 1.7857917074729513e-05,
"loss": 1.2461,
"step": 274
},
{
"epoch": 0.2368647717484927,
"grad_norm": 0.8694809079170227,
"learning_rate": 1.7840630318378233e-05,
"loss": 1.2458,
"step": 275
},
{
"epoch": 0.23772609819121446,
"grad_norm": 0.8689870238304138,
"learning_rate": 1.7823282527770214e-05,
"loss": 1.2821,
"step": 276
},
{
"epoch": 0.23858742463393626,
"grad_norm": 0.8938902616500854,
"learning_rate": 1.7805873837946833e-05,
"loss": 1.2691,
"step": 277
},
{
"epoch": 0.23944875107665806,
"grad_norm": 1.0327783823013306,
"learning_rate": 1.778840438442352e-05,
"loss": 1.2732,
"step": 278
},
{
"epoch": 0.24031007751937986,
"grad_norm": 0.9891493916511536,
"learning_rate": 1.7770874303188727e-05,
"loss": 1.253,
"step": 279
},
{
"epoch": 0.24117140396210163,
"grad_norm": 0.9043530821800232,
"learning_rate": 1.7753283730702837e-05,
"loss": 1.2731,
"step": 280
},
{
"epoch": 0.24203273040482343,
"grad_norm": 0.9076008796691895,
"learning_rate": 1.7735632803897135e-05,
"loss": 1.2684,
"step": 281
},
{
"epoch": 0.24289405684754523,
"grad_norm": 0.8306509852409363,
"learning_rate": 1.7717921660172708e-05,
"loss": 1.2577,
"step": 282
},
{
"epoch": 0.24375538329026702,
"grad_norm": 0.8941156268119812,
"learning_rate": 1.7700150437399405e-05,
"loss": 1.2682,
"step": 283
},
{
"epoch": 0.2446167097329888,
"grad_norm": 0.8855567574501038,
"learning_rate": 1.7682319273914755e-05,
"loss": 1.2904,
"step": 284
},
{
"epoch": 0.2454780361757106,
"grad_norm": 1.0233161449432373,
"learning_rate": 1.766442830852287e-05,
"loss": 1.2907,
"step": 285
},
{
"epoch": 0.2463393626184324,
"grad_norm": 1.106757402420044,
"learning_rate": 1.76464776804934e-05,
"loss": 1.245,
"step": 286
},
{
"epoch": 0.24720068906115417,
"grad_norm": 0.92214435338974,
"learning_rate": 1.7628467529560417e-05,
"loss": 1.254,
"step": 287
},
{
"epoch": 0.24806201550387597,
"grad_norm": 0.8845908045768738,
"learning_rate": 1.7610397995921348e-05,
"loss": 1.314,
"step": 288
},
{
"epoch": 0.24892334194659776,
"grad_norm": 0.837216317653656,
"learning_rate": 1.759226922023587e-05,
"loss": 1.2943,
"step": 289
},
{
"epoch": 0.24978466838931956,
"grad_norm": 0.9128316044807434,
"learning_rate": 1.7574081343624827e-05,
"loss": 1.2881,
"step": 290
},
{
"epoch": 0.25064599483204136,
"grad_norm": 0.9482645392417908,
"learning_rate": 1.7555834507669124e-05,
"loss": 1.2846,
"step": 291
},
{
"epoch": 0.25150732127476316,
"grad_norm": 0.9201834797859192,
"learning_rate": 1.7537528854408625e-05,
"loss": 1.2729,
"step": 292
},
{
"epoch": 0.2523686477174849,
"grad_norm": 0.8601769804954529,
"learning_rate": 1.751916452634105e-05,
"loss": 1.2601,
"step": 293
},
{
"epoch": 0.2532299741602067,
"grad_norm": 0.8500188589096069,
"learning_rate": 1.7500741666420863e-05,
"loss": 1.2627,
"step": 294
},
{
"epoch": 0.2540913006029285,
"grad_norm": 0.9467564821243286,
"learning_rate": 1.7482260418058167e-05,
"loss": 1.2572,
"step": 295
},
{
"epoch": 0.2549526270456503,
"grad_norm": 0.9599546790122986,
"learning_rate": 1.7463720925117565e-05,
"loss": 1.2445,
"step": 296
},
{
"epoch": 0.2558139534883721,
"grad_norm": 0.8565171957015991,
"learning_rate": 1.744512333191708e-05,
"loss": 1.2222,
"step": 297
},
{
"epoch": 0.2566752799310939,
"grad_norm": 1.068682074546814,
"learning_rate": 1.7426467783226992e-05,
"loss": 1.2574,
"step": 298
},
{
"epoch": 0.2575366063738157,
"grad_norm": 0.9868722558021545,
"learning_rate": 1.7407754424268727e-05,
"loss": 1.2234,
"step": 299
},
{
"epoch": 0.25839793281653745,
"grad_norm": 0.8837171196937561,
"learning_rate": 1.7388983400713736e-05,
"loss": 1.2489,
"step": 300
},
{
"epoch": 0.25925925925925924,
"grad_norm": 1.0964189767837524,
"learning_rate": 1.7370154858682347e-05,
"loss": 1.2658,
"step": 301
},
{
"epoch": 0.26012058570198104,
"grad_norm": 0.9035983085632324,
"learning_rate": 1.7351268944742626e-05,
"loss": 1.261,
"step": 302
},
{
"epoch": 0.26098191214470284,
"grad_norm": 1.021364688873291,
"learning_rate": 1.7332325805909256e-05,
"loss": 1.2542,
"step": 303
},
{
"epoch": 0.26184323858742464,
"grad_norm": 0.8748368620872498,
"learning_rate": 1.7313325589642363e-05,
"loss": 1.2155,
"step": 304
},
{
"epoch": 0.26270456503014644,
"grad_norm": 1.0000536441802979,
"learning_rate": 1.7294268443846403e-05,
"loss": 1.2386,
"step": 305
},
{
"epoch": 0.26356589147286824,
"grad_norm": 0.8522447943687439,
"learning_rate": 1.727515451686897e-05,
"loss": 1.262,
"step": 306
},
{
"epoch": 0.26442721791559,
"grad_norm": 1.03394615650177,
"learning_rate": 1.7255983957499676e-05,
"loss": 1.2607,
"step": 307
},
{
"epoch": 0.2652885443583118,
"grad_norm": 0.9327899217605591,
"learning_rate": 1.7236756914968985e-05,
"loss": 1.239,
"step": 308
},
{
"epoch": 0.2661498708010336,
"grad_norm": 0.8953905701637268,
"learning_rate": 1.7217473538947032e-05,
"loss": 1.2584,
"step": 309
},
{
"epoch": 0.2670111972437554,
"grad_norm": 0.877118706703186,
"learning_rate": 1.719813397954248e-05,
"loss": 1.2692,
"step": 310
},
{
"epoch": 0.2678725236864772,
"grad_norm": 0.8814844489097595,
"learning_rate": 1.7178738387301342e-05,
"loss": 1.258,
"step": 311
},
{
"epoch": 0.268733850129199,
"grad_norm": 0.882537305355072,
"learning_rate": 1.7159286913205813e-05,
"loss": 1.2923,
"step": 312
},
{
"epoch": 0.2695951765719208,
"grad_norm": 0.9672994613647461,
"learning_rate": 1.7139779708673084e-05,
"loss": 1.2725,
"step": 313
},
{
"epoch": 0.2704565030146425,
"grad_norm": 0.8873171806335449,
"learning_rate": 1.7120216925554185e-05,
"loss": 1.2268,
"step": 314
},
{
"epoch": 0.2713178294573643,
"grad_norm": 0.9454041123390198,
"learning_rate": 1.7100598716132775e-05,
"loss": 1.2358,
"step": 315
},
{
"epoch": 0.2721791559000861,
"grad_norm": 0.943789005279541,
"learning_rate": 1.708092523312398e-05,
"loss": 1.2138,
"step": 316
},
{
"epoch": 0.2730404823428079,
"grad_norm": 0.8467637300491333,
"learning_rate": 1.7061196629673198e-05,
"loss": 1.2495,
"step": 317
},
{
"epoch": 0.2739018087855297,
"grad_norm": 0.8850976228713989,
"learning_rate": 1.7041413059354893e-05,
"loss": 1.2586,
"step": 318
},
{
"epoch": 0.2747631352282515,
"grad_norm": 0.8928930759429932,
"learning_rate": 1.7021574676171418e-05,
"loss": 1.2279,
"step": 319
},
{
"epoch": 0.2756244616709733,
"grad_norm": 0.9026039242744446,
"learning_rate": 1.7001681634551813e-05,
"loss": 1.2361,
"step": 320
},
{
"epoch": 0.27648578811369506,
"grad_norm": 0.90481036901474,
"learning_rate": 1.6981734089350585e-05,
"loss": 1.2308,
"step": 321
},
{
"epoch": 0.27734711455641686,
"grad_norm": 0.8753896951675415,
"learning_rate": 1.696173219584653e-05,
"loss": 1.2383,
"step": 322
},
{
"epoch": 0.27820844099913866,
"grad_norm": 1.0231022834777832,
"learning_rate": 1.6941676109741506e-05,
"loss": 1.2472,
"step": 323
},
{
"epoch": 0.27906976744186046,
"grad_norm": 0.8703423142433167,
"learning_rate": 1.6921565987159226e-05,
"loss": 1.2712,
"step": 324
},
{
"epoch": 0.27993109388458226,
"grad_norm": 0.9238699078559875,
"learning_rate": 1.6901401984644034e-05,
"loss": 1.2279,
"step": 325
},
{
"epoch": 0.28079242032730406,
"grad_norm": 0.9144941568374634,
"learning_rate": 1.6881184259159708e-05,
"loss": 1.2416,
"step": 326
},
{
"epoch": 0.28165374677002586,
"grad_norm": 0.8758153915405273,
"learning_rate": 1.686091296808822e-05,
"loss": 1.2736,
"step": 327
},
{
"epoch": 0.28251507321274766,
"grad_norm": 0.8742189407348633,
"learning_rate": 1.6840588269228507e-05,
"loss": 1.2876,
"step": 328
},
{
"epoch": 0.2833763996554694,
"grad_norm": 0.9139267206192017,
"learning_rate": 1.682021032079526e-05,
"loss": 1.2919,
"step": 329
},
{
"epoch": 0.2842377260981912,
"grad_norm": 0.8497806191444397,
"learning_rate": 1.6799779281417685e-05,
"loss": 1.2275,
"step": 330
},
{
"epoch": 0.285099052540913,
"grad_norm": 0.9016520380973816,
"learning_rate": 1.6779295310138264e-05,
"loss": 1.2126,
"step": 331
},
{
"epoch": 0.2859603789836348,
"grad_norm": 0.8737862706184387,
"learning_rate": 1.6758758566411516e-05,
"loss": 1.2641,
"step": 332
},
{
"epoch": 0.2868217054263566,
"grad_norm": 0.9002192616462708,
"learning_rate": 1.6738169210102765e-05,
"loss": 1.2872,
"step": 333
},
{
"epoch": 0.2876830318690784,
"grad_norm": 0.8395231366157532,
"learning_rate": 1.6717527401486882e-05,
"loss": 1.2808,
"step": 334
},
{
"epoch": 0.2885443583118002,
"grad_norm": 0.9024813771247864,
"learning_rate": 1.669683330124706e-05,
"loss": 1.2415,
"step": 335
},
{
"epoch": 0.28940568475452194,
"grad_norm": 0.9167109131813049,
"learning_rate": 1.667608707047354e-05,
"loss": 1.2375,
"step": 336
},
{
"epoch": 0.29026701119724374,
"grad_norm": 0.8365147113800049,
"learning_rate": 1.6655288870662354e-05,
"loss": 1.2451,
"step": 337
},
{
"epoch": 0.29112833763996554,
"grad_norm": 0.9034088253974915,
"learning_rate": 1.6634438863714108e-05,
"loss": 1.2237,
"step": 338
},
{
"epoch": 0.29198966408268734,
"grad_norm": 0.8997372984886169,
"learning_rate": 1.661353721193266e-05,
"loss": 1.2358,
"step": 339
},
{
"epoch": 0.29285099052540914,
"grad_norm": 0.9689245820045471,
"learning_rate": 1.6592584078023915e-05,
"loss": 1.2523,
"step": 340
},
{
"epoch": 0.29371231696813094,
"grad_norm": 0.9412267208099365,
"learning_rate": 1.657157962509452e-05,
"loss": 1.2445,
"step": 341
},
{
"epoch": 0.29457364341085274,
"grad_norm": 0.8464860916137695,
"learning_rate": 1.6550524016650617e-05,
"loss": 1.2102,
"step": 342
},
{
"epoch": 0.2954349698535745,
"grad_norm": 0.8929284811019897,
"learning_rate": 1.652941741659655e-05,
"loss": 1.2433,
"step": 343
},
{
"epoch": 0.2962962962962963,
"grad_norm": 1.0461153984069824,
"learning_rate": 1.650825998923361e-05,
"loss": 1.2289,
"step": 344
},
{
"epoch": 0.2971576227390181,
"grad_norm": 0.8382362127304077,
"learning_rate": 1.6487051899258738e-05,
"loss": 1.2455,
"step": 345
},
{
"epoch": 0.2980189491817399,
"grad_norm": 0.9788974523544312,
"learning_rate": 1.6465793311763255e-05,
"loss": 1.2314,
"step": 346
},
{
"epoch": 0.2988802756244617,
"grad_norm": 0.9103128910064697,
"learning_rate": 1.6444484392231574e-05,
"loss": 1.2331,
"step": 347
},
{
"epoch": 0.2997416020671835,
"grad_norm": 0.9638088345527649,
"learning_rate": 1.6423125306539903e-05,
"loss": 1.2228,
"step": 348
},
{
"epoch": 0.3006029285099053,
"grad_norm": 0.9347442388534546,
"learning_rate": 1.6401716220954968e-05,
"loss": 1.227,
"step": 349
},
{
"epoch": 0.301464254952627,
"grad_norm": 0.9683001637458801,
"learning_rate": 1.638025730213271e-05,
"loss": 1.2253,
"step": 350
},
{
"epoch": 0.3023255813953488,
"grad_norm": 0.9251329898834229,
"learning_rate": 1.6358748717116993e-05,
"loss": 1.2698,
"step": 351
},
{
"epoch": 0.3031869078380706,
"grad_norm": 0.9530742168426514,
"learning_rate": 1.6337190633338294e-05,
"loss": 1.2407,
"step": 352
},
{
"epoch": 0.3040482342807924,
"grad_norm": 0.9009522199630737,
"learning_rate": 1.631558321861241e-05,
"loss": 1.2478,
"step": 353
},
{
"epoch": 0.3049095607235142,
"grad_norm": 1.0106037855148315,
"learning_rate": 1.6293926641139154e-05,
"loss": 1.2298,
"step": 354
},
{
"epoch": 0.305770887166236,
"grad_norm": 0.8804092407226562,
"learning_rate": 1.627222106950102e-05,
"loss": 1.2324,
"step": 355
},
{
"epoch": 0.3066322136089578,
"grad_norm": 0.8187827467918396,
"learning_rate": 1.625046667266191e-05,
"loss": 1.2714,
"step": 356
},
{
"epoch": 0.30749354005167956,
"grad_norm": 1.0687847137451172,
"learning_rate": 1.6228663619965787e-05,
"loss": 1.2423,
"step": 357
},
{
"epoch": 0.30835486649440136,
"grad_norm": 0.8696728348731995,
"learning_rate": 1.620681208113538e-05,
"loss": 1.241,
"step": 358
},
{
"epoch": 0.30921619293712316,
"grad_norm": 0.8725491762161255,
"learning_rate": 1.6184912226270833e-05,
"loss": 1.2741,
"step": 359
},
{
"epoch": 0.31007751937984496,
"grad_norm": 1.0487327575683594,
"learning_rate": 1.6162964225848416e-05,
"loss": 1.2382,
"step": 360
},
{
"epoch": 0.31093884582256676,
"grad_norm": 0.8088880181312561,
"learning_rate": 1.6140968250719177e-05,
"loss": 1.2441,
"step": 361
},
{
"epoch": 0.31180017226528856,
"grad_norm": 0.8771573305130005,
"learning_rate": 1.611892447210761e-05,
"loss": 1.2393,
"step": 362
},
{
"epoch": 0.31266149870801035,
"grad_norm": 0.9969056248664856,
"learning_rate": 1.609683306161034e-05,
"loss": 1.2319,
"step": 363
},
{
"epoch": 0.31352282515073215,
"grad_norm": 0.9056423902511597,
"learning_rate": 1.6074694191194758e-05,
"loss": 1.2191,
"step": 364
},
{
"epoch": 0.3143841515934539,
"grad_norm": 0.8870687484741211,
"learning_rate": 1.6052508033197713e-05,
"loss": 1.249,
"step": 365
},
{
"epoch": 0.3152454780361757,
"grad_norm": 0.9680421352386475,
"learning_rate": 1.6030274760324163e-05,
"loss": 1.2299,
"step": 366
},
{
"epoch": 0.3161068044788975,
"grad_norm": 0.9495812058448792,
"learning_rate": 1.6007994545645807e-05,
"loss": 1.2574,
"step": 367
},
{
"epoch": 0.3169681309216193,
"grad_norm": 0.8676847219467163,
"learning_rate": 1.598566756259977e-05,
"loss": 1.2284,
"step": 368
},
{
"epoch": 0.3178294573643411,
"grad_norm": 0.841946005821228,
"learning_rate": 1.596329398498723e-05,
"loss": 1.2304,
"step": 369
},
{
"epoch": 0.3186907838070629,
"grad_norm": 1.0011506080627441,
"learning_rate": 1.5940873986972078e-05,
"loss": 1.2454,
"step": 370
},
{
"epoch": 0.3195521102497847,
"grad_norm": 0.9674389958381653,
"learning_rate": 1.5918407743079564e-05,
"loss": 1.241,
"step": 371
},
{
"epoch": 0.32041343669250644,
"grad_norm": 1.0047541856765747,
"learning_rate": 1.5895895428194915e-05,
"loss": 1.2541,
"step": 372
},
{
"epoch": 0.32127476313522824,
"grad_norm": 0.9397938847541809,
"learning_rate": 1.5873337217562012e-05,
"loss": 1.2182,
"step": 373
},
{
"epoch": 0.32213608957795004,
"grad_norm": 0.8715697526931763,
"learning_rate": 1.585073328678199e-05,
"loss": 1.243,
"step": 374
},
{
"epoch": 0.32299741602067183,
"grad_norm": 0.9596509337425232,
"learning_rate": 1.582808381181189e-05,
"loss": 1.2417,
"step": 375
},
{
"epoch": 0.32385874246339363,
"grad_norm": 0.8386455178260803,
"learning_rate": 1.5805388968963286e-05,
"loss": 1.2625,
"step": 376
},
{
"epoch": 0.32472006890611543,
"grad_norm": 0.897318422794342,
"learning_rate": 1.5782648934900915e-05,
"loss": 1.2233,
"step": 377
},
{
"epoch": 0.32558139534883723,
"grad_norm": 0.8364303112030029,
"learning_rate": 1.575986388664129e-05,
"loss": 1.2323,
"step": 378
},
{
"epoch": 0.326442721791559,
"grad_norm": 0.8544413447380066,
"learning_rate": 1.5737034001551336e-05,
"loss": 1.2373,
"step": 379
},
{
"epoch": 0.3273040482342808,
"grad_norm": 0.9020499587059021,
"learning_rate": 1.5714159457347007e-05,
"loss": 1.2432,
"step": 380
},
{
"epoch": 0.3281653746770026,
"grad_norm": 0.8884045481681824,
"learning_rate": 1.5691240432091892e-05,
"loss": 1.2144,
"step": 381
},
{
"epoch": 0.3290267011197244,
"grad_norm": 0.9886582493782043,
"learning_rate": 1.566827710419584e-05,
"loss": 1.2344,
"step": 382
},
{
"epoch": 0.3298880275624462,
"grad_norm": 0.8105961680412292,
"learning_rate": 1.5645269652413574e-05,
"loss": 1.2441,
"step": 383
},
{
"epoch": 0.330749354005168,
"grad_norm": 0.9423347115516663,
"learning_rate": 1.5622218255843276e-05,
"loss": 1.2172,
"step": 384
},
{
"epoch": 0.33161068044788977,
"grad_norm": 0.970775842666626,
"learning_rate": 1.559912309392523e-05,
"loss": 1.2587,
"step": 385
},
{
"epoch": 0.3324720068906115,
"grad_norm": 0.9015215039253235,
"learning_rate": 1.5575984346440393e-05,
"loss": 1.2388,
"step": 386
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.9856666326522827,
"learning_rate": 1.5552802193509003e-05,
"loss": 1.2181,
"step": 387
},
{
"epoch": 0.3341946597760551,
"grad_norm": 0.9138821959495544,
"learning_rate": 1.55295768155892e-05,
"loss": 1.2418,
"step": 388
},
{
"epoch": 0.3350559862187769,
"grad_norm": 0.8847649693489075,
"learning_rate": 1.5506308393475582e-05,
"loss": 1.1956,
"step": 389
},
{
"epoch": 0.3359173126614987,
"grad_norm": 0.847986102104187,
"learning_rate": 1.5482997108297834e-05,
"loss": 1.2041,
"step": 390
},
{
"epoch": 0.3367786391042205,
"grad_norm": 1.1017706394195557,
"learning_rate": 1.545964314151929e-05,
"loss": 1.2646,
"step": 391
},
{
"epoch": 0.3376399655469423,
"grad_norm": 0.8774247169494629,
"learning_rate": 1.5436246674935543e-05,
"loss": 1.2236,
"step": 392
},
{
"epoch": 0.3385012919896641,
"grad_norm": 0.8937813639640808,
"learning_rate": 1.5412807890673015e-05,
"loss": 1.2192,
"step": 393
},
{
"epoch": 0.33936261843238585,
"grad_norm": 0.8541170358657837,
"learning_rate": 1.5389326971187543e-05,
"loss": 1.2268,
"step": 394
},
{
"epoch": 0.34022394487510765,
"grad_norm": 0.9567473530769348,
"learning_rate": 1.536580409926296e-05,
"loss": 1.2489,
"step": 395
},
{
"epoch": 0.34108527131782945,
"grad_norm": 0.8888243436813354,
"learning_rate": 1.5342239458009675e-05,
"loss": 1.2308,
"step": 396
},
{
"epoch": 0.34194659776055125,
"grad_norm": 0.893013060092926,
"learning_rate": 1.5318633230863237e-05,
"loss": 1.2237,
"step": 397
},
{
"epoch": 0.34280792420327305,
"grad_norm": 0.874277651309967,
"learning_rate": 1.5294985601582922e-05,
"loss": 1.186,
"step": 398
},
{
"epoch": 0.34366925064599485,
"grad_norm": 0.8381382822990417,
"learning_rate": 1.5271296754250296e-05,
"loss": 1.2454,
"step": 399
},
{
"epoch": 0.34453057708871665,
"grad_norm": 0.9372345209121704,
"learning_rate": 1.524756687326777e-05,
"loss": 1.2233,
"step": 400
},
{
"epoch": 0.3453919035314384,
"grad_norm": 0.869147002696991,
"learning_rate": 1.5223796143357188e-05,
"loss": 1.2084,
"step": 401
},
{
"epoch": 0.3462532299741602,
"grad_norm": 0.8200699687004089,
"learning_rate": 1.5199984749558367e-05,
"loss": 1.2355,
"step": 402
},
{
"epoch": 0.347114556416882,
"grad_norm": 0.901074230670929,
"learning_rate": 1.5176132877227674e-05,
"loss": 1.2653,
"step": 403
},
{
"epoch": 0.3479758828596038,
"grad_norm": 0.871003270149231,
"learning_rate": 1.5152240712036573e-05,
"loss": 1.2289,
"step": 404
},
{
"epoch": 0.3488372093023256,
"grad_norm": 0.9896201491355896,
"learning_rate": 1.5128308439970174e-05,
"loss": 1.2388,
"step": 405
},
{
"epoch": 0.3496985357450474,
"grad_norm": 0.9683994054794312,
"learning_rate": 1.5104336247325803e-05,
"loss": 1.2178,
"step": 406
},
{
"epoch": 0.3505598621877692,
"grad_norm": 0.8784785270690918,
"learning_rate": 1.5080324320711542e-05,
"loss": 1.2929,
"step": 407
},
{
"epoch": 0.35142118863049093,
"grad_norm": 0.9590929746627808,
"learning_rate": 1.505627284704477e-05,
"loss": 1.2284,
"step": 408
},
{
"epoch": 0.35228251507321273,
"grad_norm": 0.8345155715942383,
"learning_rate": 1.5032182013550719e-05,
"loss": 1.2614,
"step": 409
},
{
"epoch": 0.35314384151593453,
"grad_norm": 0.8421818017959595,
"learning_rate": 1.5008052007761009e-05,
"loss": 1.2258,
"step": 410
},
{
"epoch": 0.35400516795865633,
"grad_norm": 1.0068047046661377,
"learning_rate": 1.498388301751219e-05,
"loss": 1.2686,
"step": 411
},
{
"epoch": 0.35486649440137813,
"grad_norm": 0.8941269516944885,
"learning_rate": 1.495967523094429e-05,
"loss": 1.2268,
"step": 412
},
{
"epoch": 0.35572782084409993,
"grad_norm": 0.9728747606277466,
"learning_rate": 1.4935428836499333e-05,
"loss": 1.2359,
"step": 413
},
{
"epoch": 0.35658914728682173,
"grad_norm": 0.946317732334137,
"learning_rate": 1.4911144022919879e-05,
"loss": 1.2062,
"step": 414
},
{
"epoch": 0.35745047372954347,
"grad_norm": 0.8988799452781677,
"learning_rate": 1.4886820979247561e-05,
"loss": 1.2389,
"step": 415
},
{
"epoch": 0.35831180017226527,
"grad_norm": 1.0396331548690796,
"learning_rate": 1.4862459894821606e-05,
"loss": 1.2247,
"step": 416
},
{
"epoch": 0.35917312661498707,
"grad_norm": 0.8363151550292969,
"learning_rate": 1.483806095927737e-05,
"loss": 1.1869,
"step": 417
},
{
"epoch": 0.36003445305770887,
"grad_norm": 0.9274395704269409,
"learning_rate": 1.481362436254484e-05,
"loss": 1.1593,
"step": 418
},
{
"epoch": 0.36089577950043067,
"grad_norm": 1.005424976348877,
"learning_rate": 1.4789150294847192e-05,
"loss": 1.2132,
"step": 419
},
{
"epoch": 0.36175710594315247,
"grad_norm": 0.9512991309165955,
"learning_rate": 1.4764638946699275e-05,
"loss": 1.2082,
"step": 420
},
{
"epoch": 0.36261843238587427,
"grad_norm": 0.8919705152511597,
"learning_rate": 1.4740090508906147e-05,
"loss": 1.1693,
"step": 421
},
{
"epoch": 0.363479758828596,
"grad_norm": 0.8347681760787964,
"learning_rate": 1.4715505172561577e-05,
"loss": 1.2604,
"step": 422
},
{
"epoch": 0.3643410852713178,
"grad_norm": 0.8956724405288696,
"learning_rate": 1.4690883129046585e-05,
"loss": 1.2241,
"step": 423
},
{
"epoch": 0.3652024117140396,
"grad_norm": 0.9047289490699768,
"learning_rate": 1.466622457002791e-05,
"loss": 1.2257,
"step": 424
},
{
"epoch": 0.3660637381567614,
"grad_norm": 1.0073318481445312,
"learning_rate": 1.4641529687456558e-05,
"loss": 1.2385,
"step": 425
},
{
"epoch": 0.3669250645994832,
"grad_norm": 0.9420737028121948,
"learning_rate": 1.4616798673566276e-05,
"loss": 1.2037,
"step": 426
},
{
"epoch": 0.367786391042205,
"grad_norm": 0.9980217218399048,
"learning_rate": 1.4592031720872086e-05,
"loss": 1.2163,
"step": 427
},
{
"epoch": 0.3686477174849268,
"grad_norm": 0.8870885968208313,
"learning_rate": 1.4567229022168756e-05,
"loss": 1.2356,
"step": 428
},
{
"epoch": 0.3695090439276486,
"grad_norm": 0.9896050095558167,
"learning_rate": 1.454239077052932e-05,
"loss": 1.2334,
"step": 429
},
{
"epoch": 0.37037037037037035,
"grad_norm": 1.0749104022979736,
"learning_rate": 1.4517517159303573e-05,
"loss": 1.2024,
"step": 430
},
{
"epoch": 0.37123169681309215,
"grad_norm": 0.8981916904449463,
"learning_rate": 1.4492608382116548e-05,
"loss": 1.1876,
"step": 431
},
{
"epoch": 0.37209302325581395,
"grad_norm": 0.9399816393852234,
"learning_rate": 1.4467664632867042e-05,
"loss": 1.2465,
"step": 432
},
{
"epoch": 0.37295434969853575,
"grad_norm": 0.9650474786758423,
"learning_rate": 1.4442686105726066e-05,
"loss": 1.2525,
"step": 433
},
{
"epoch": 0.37381567614125755,
"grad_norm": 0.9051154255867004,
"learning_rate": 1.4417672995135372e-05,
"loss": 1.2653,
"step": 434
},
{
"epoch": 0.37467700258397935,
"grad_norm": 0.8648033142089844,
"learning_rate": 1.4392625495805913e-05,
"loss": 1.2221,
"step": 435
},
{
"epoch": 0.37553832902670115,
"grad_norm": 0.942790687084198,
"learning_rate": 1.4367543802716334e-05,
"loss": 1.2258,
"step": 436
},
{
"epoch": 0.3763996554694229,
"grad_norm": 0.849907398223877,
"learning_rate": 1.4342428111111461e-05,
"loss": 1.2307,
"step": 437
},
{
"epoch": 0.3772609819121447,
"grad_norm": 0.9022756218910217,
"learning_rate": 1.4317278616500785e-05,
"loss": 1.2356,
"step": 438
},
{
"epoch": 0.3781223083548665,
"grad_norm": 1.0210813283920288,
"learning_rate": 1.4292095514656907e-05,
"loss": 1.2099,
"step": 439
},
{
"epoch": 0.3789836347975883,
"grad_norm": 0.8442511558532715,
"learning_rate": 1.4266879001614067e-05,
"loss": 1.2509,
"step": 440
},
{
"epoch": 0.3798449612403101,
"grad_norm": 0.8751745223999023,
"learning_rate": 1.424162927366657e-05,
"loss": 1.2039,
"step": 441
},
{
"epoch": 0.3807062876830319,
"grad_norm": 0.8547453880310059,
"learning_rate": 1.4216346527367284e-05,
"loss": 1.239,
"step": 442
},
{
"epoch": 0.3815676141257537,
"grad_norm": 0.8485442399978638,
"learning_rate": 1.4191030959526106e-05,
"loss": 1.2208,
"step": 443
},
{
"epoch": 0.38242894056847543,
"grad_norm": 0.9222948551177979,
"learning_rate": 1.4165682767208426e-05,
"loss": 1.2407,
"step": 444
},
{
"epoch": 0.3832902670111972,
"grad_norm": 0.8452631235122681,
"learning_rate": 1.4140302147733596e-05,
"loss": 1.2388,
"step": 445
},
{
"epoch": 0.384151593453919,
"grad_norm": 0.8585206270217896,
"learning_rate": 1.4114889298673383e-05,
"loss": 1.2138,
"step": 446
},
{
"epoch": 0.3850129198966408,
"grad_norm": 0.8404960036277771,
"learning_rate": 1.4089444417850455e-05,
"loss": 1.2613,
"step": 447
},
{
"epoch": 0.3858742463393626,
"grad_norm": 0.8335172533988953,
"learning_rate": 1.4063967703336814e-05,
"loss": 1.2188,
"step": 448
},
{
"epoch": 0.3867355727820844,
"grad_norm": 0.8247851729393005,
"learning_rate": 1.403845935345228e-05,
"loss": 1.2334,
"step": 449
},
{
"epoch": 0.3875968992248062,
"grad_norm": 0.9984527230262756,
"learning_rate": 1.401291956676292e-05,
"loss": 1.2272,
"step": 450
},
{
"epoch": 0.38845822566752797,
"grad_norm": 0.8594214916229248,
"learning_rate": 1.3987348542079526e-05,
"loss": 1.1752,
"step": 451
},
{
"epoch": 0.38931955211024977,
"grad_norm": 0.95940101146698,
"learning_rate": 1.396174647845605e-05,
"loss": 1.2172,
"step": 452
},
{
"epoch": 0.39018087855297157,
"grad_norm": 0.8622090816497803,
"learning_rate": 1.3936113575188074e-05,
"loss": 1.206,
"step": 453
},
{
"epoch": 0.39104220499569337,
"grad_norm": 0.8805087804794312,
"learning_rate": 1.3910450031811235e-05,
"loss": 1.2339,
"step": 454
},
{
"epoch": 0.39190353143841516,
"grad_norm": 0.8449251651763916,
"learning_rate": 1.3884756048099688e-05,
"loss": 1.2253,
"step": 455
},
{
"epoch": 0.39276485788113696,
"grad_norm": 0.9744880199432373,
"learning_rate": 1.3859031824064543e-05,
"loss": 1.237,
"step": 456
},
{
"epoch": 0.39362618432385876,
"grad_norm": 0.8672634959220886,
"learning_rate": 1.3833277559952323e-05,
"loss": 1.2199,
"step": 457
},
{
"epoch": 0.3944875107665805,
"grad_norm": 0.8973323702812195,
"learning_rate": 1.380749345624338e-05,
"loss": 1.1859,
"step": 458
},
{
"epoch": 0.3953488372093023,
"grad_norm": 0.9631936550140381,
"learning_rate": 1.3781679713650349e-05,
"loss": 1.2133,
"step": 459
},
{
"epoch": 0.3962101636520241,
"grad_norm": 0.8389044404029846,
"learning_rate": 1.3755836533116597e-05,
"loss": 1.2095,
"step": 460
},
{
"epoch": 0.3970714900947459,
"grad_norm": 0.8947293162345886,
"learning_rate": 1.3729964115814636e-05,
"loss": 1.1841,
"step": 461
},
{
"epoch": 0.3979328165374677,
"grad_norm": 0.9099428057670593,
"learning_rate": 1.3704062663144569e-05,
"loss": 1.2188,
"step": 462
},
{
"epoch": 0.3987941429801895,
"grad_norm": 0.9000226855278015,
"learning_rate": 1.3678132376732518e-05,
"loss": 1.2673,
"step": 463
},
{
"epoch": 0.3996554694229113,
"grad_norm": 0.9490253925323486,
"learning_rate": 1.3652173458429068e-05,
"loss": 1.2233,
"step": 464
},
{
"epoch": 0.4005167958656331,
"grad_norm": 0.9906533360481262,
"learning_rate": 1.3626186110307673e-05,
"loss": 1.2125,
"step": 465
},
{
"epoch": 0.40137812230835485,
"grad_norm": 0.9203736186027527,
"learning_rate": 1.3600170534663097e-05,
"loss": 1.1732,
"step": 466
},
{
"epoch": 0.40223944875107664,
"grad_norm": 0.8627713918685913,
"learning_rate": 1.3574126934009843e-05,
"loss": 1.2441,
"step": 467
},
{
"epoch": 0.40310077519379844,
"grad_norm": 0.8728564977645874,
"learning_rate": 1.3548055511080568e-05,
"loss": 1.1814,
"step": 468
},
{
"epoch": 0.40396210163652024,
"grad_norm": 0.9065693020820618,
"learning_rate": 1.3521956468824505e-05,
"loss": 1.2281,
"step": 469
},
{
"epoch": 0.40482342807924204,
"grad_norm": 0.860071063041687,
"learning_rate": 1.3495830010405884e-05,
"loss": 1.2438,
"step": 470
},
{
"epoch": 0.40568475452196384,
"grad_norm": 0.8716009855270386,
"learning_rate": 1.346967633920236e-05,
"loss": 1.2185,
"step": 471
},
{
"epoch": 0.40654608096468564,
"grad_norm": 1.0129085779190063,
"learning_rate": 1.344349565880341e-05,
"loss": 1.2253,
"step": 472
},
{
"epoch": 0.4074074074074074,
"grad_norm": 0.8441084623336792,
"learning_rate": 1.3417288173008778e-05,
"loss": 1.2128,
"step": 473
},
{
"epoch": 0.4082687338501292,
"grad_norm": 0.9617881178855896,
"learning_rate": 1.339105408582685e-05,
"loss": 1.2085,
"step": 474
},
{
"epoch": 0.409130060292851,
"grad_norm": 0.8780672550201416,
"learning_rate": 1.3364793601473105e-05,
"loss": 1.1989,
"step": 475
},
{
"epoch": 0.4099913867355728,
"grad_norm": 0.9372087717056274,
"learning_rate": 1.3338506924368494e-05,
"loss": 1.2343,
"step": 476
},
{
"epoch": 0.4108527131782946,
"grad_norm": 0.8718287348747253,
"learning_rate": 1.331219425913787e-05,
"loss": 1.2009,
"step": 477
},
{
"epoch": 0.4117140396210164,
"grad_norm": 0.8041454553604126,
"learning_rate": 1.3285855810608377e-05,
"loss": 1.2006,
"step": 478
},
{
"epoch": 0.4125753660637382,
"grad_norm": 0.8639675378799438,
"learning_rate": 1.325949178380788e-05,
"loss": 1.2244,
"step": 479
},
{
"epoch": 0.4134366925064599,
"grad_norm": 0.9752419590950012,
"learning_rate": 1.3233102383963341e-05,
"loss": 1.2374,
"step": 480
},
{
"epoch": 0.4142980189491817,
"grad_norm": 0.9429941177368164,
"learning_rate": 1.3206687816499242e-05,
"loss": 1.2437,
"step": 481
},
{
"epoch": 0.4151593453919035,
"grad_norm": 0.830390214920044,
"learning_rate": 1.3180248287035977e-05,
"loss": 1.2173,
"step": 482
},
{
"epoch": 0.4160206718346253,
"grad_norm": 0.8312661051750183,
"learning_rate": 1.3153784001388249e-05,
"loss": 1.2432,
"step": 483
},
{
"epoch": 0.4168819982773471,
"grad_norm": 0.8787814378738403,
"learning_rate": 1.3127295165563476e-05,
"loss": 1.2465,
"step": 484
},
{
"epoch": 0.4177433247200689,
"grad_norm": 0.8632099628448486,
"learning_rate": 1.3100781985760188e-05,
"loss": 1.2347,
"step": 485
},
{
"epoch": 0.4186046511627907,
"grad_norm": 1.0281778573989868,
"learning_rate": 1.3074244668366412e-05,
"loss": 1.2193,
"step": 486
},
{
"epoch": 0.41946597760551246,
"grad_norm": 0.9415022730827332,
"learning_rate": 1.3047683419958062e-05,
"loss": 1.2021,
"step": 487
},
{
"epoch": 0.42032730404823426,
"grad_norm": 0.9123542904853821,
"learning_rate": 1.3021098447297358e-05,
"loss": 1.216,
"step": 488
},
{
"epoch": 0.42118863049095606,
"grad_norm": 0.9520506858825684,
"learning_rate": 1.2994489957331183e-05,
"loss": 1.2148,
"step": 489
},
{
"epoch": 0.42204995693367786,
"grad_norm": 0.8072666525840759,
"learning_rate": 1.2967858157189495e-05,
"loss": 1.2404,
"step": 490
},
{
"epoch": 0.42291128337639966,
"grad_norm": 0.9025439620018005,
"learning_rate": 1.29412032541837e-05,
"loss": 1.2183,
"step": 491
},
{
"epoch": 0.42377260981912146,
"grad_norm": 0.8652177453041077,
"learning_rate": 1.2914525455805056e-05,
"loss": 1.1991,
"step": 492
},
{
"epoch": 0.42463393626184326,
"grad_norm": 1.0470143556594849,
"learning_rate": 1.2887824969723035e-05,
"loss": 1.2286,
"step": 493
},
{
"epoch": 0.425495262704565,
"grad_norm": 0.9123655557632446,
"learning_rate": 1.2861102003783722e-05,
"loss": 1.2405,
"step": 494
},
{
"epoch": 0.4263565891472868,
"grad_norm": 0.8399021625518799,
"learning_rate": 1.2834356766008198e-05,
"loss": 1.1718,
"step": 495
},
{
"epoch": 0.4272179155900086,
"grad_norm": 1.0004807710647583,
"learning_rate": 1.2807589464590908e-05,
"loss": 1.1792,
"step": 496
},
{
"epoch": 0.4280792420327304,
"grad_norm": 0.8504483103752136,
"learning_rate": 1.2780800307898057e-05,
"loss": 1.1962,
"step": 497
},
{
"epoch": 0.4289405684754522,
"grad_norm": 0.9600708484649658,
"learning_rate": 1.2753989504465967e-05,
"loss": 1.2396,
"step": 498
},
{
"epoch": 0.429801894918174,
"grad_norm": 0.8892411589622498,
"learning_rate": 1.2727157262999481e-05,
"loss": 1.1949,
"step": 499
},
{
"epoch": 0.4306632213608958,
"grad_norm": 0.9092352986335754,
"learning_rate": 1.270030379237031e-05,
"loss": 1.2467,
"step": 500
},
{
"epoch": 0.4315245478036176,
"grad_norm": 0.8833507299423218,
"learning_rate": 1.2673429301615431e-05,
"loss": 1.1803,
"step": 501
},
{
"epoch": 0.43238587424633934,
"grad_norm": 0.8737507462501526,
"learning_rate": 1.2646533999935442e-05,
"loss": 1.2517,
"step": 502
},
{
"epoch": 0.43324720068906114,
"grad_norm": 0.8569416999816895,
"learning_rate": 1.2619618096692942e-05,
"loss": 1.2096,
"step": 503
},
{
"epoch": 0.43410852713178294,
"grad_norm": 0.9263505935668945,
"learning_rate": 1.25926818014109e-05,
"loss": 1.1993,
"step": 504
},
{
"epoch": 0.43496985357450474,
"grad_norm": 1.0186364650726318,
"learning_rate": 1.256572532377103e-05,
"loss": 1.2233,
"step": 505
},
{
"epoch": 0.43583118001722654,
"grad_norm": 0.83504319190979,
"learning_rate": 1.253874887361214e-05,
"loss": 1.2251,
"step": 506
},
{
"epoch": 0.43669250645994834,
"grad_norm": 0.9106087684631348,
"learning_rate": 1.2511752660928523e-05,
"loss": 1.2103,
"step": 507
},
{
"epoch": 0.43755383290267014,
"grad_norm": 0.8864356279373169,
"learning_rate": 1.2484736895868306e-05,
"loss": 1.2015,
"step": 508
},
{
"epoch": 0.4384151593453919,
"grad_norm": 0.9549910426139832,
"learning_rate": 1.2457701788731812e-05,
"loss": 1.2394,
"step": 509
},
{
"epoch": 0.4392764857881137,
"grad_norm": 0.9277191758155823,
"learning_rate": 1.2430647549969949e-05,
"loss": 1.2186,
"step": 510
},
{
"epoch": 0.4401378122308355,
"grad_norm": 0.9281412363052368,
"learning_rate": 1.2403574390182529e-05,
"loss": 1.2122,
"step": 511
},
{
"epoch": 0.4409991386735573,
"grad_norm": 0.8452510833740234,
"learning_rate": 1.2376482520116666e-05,
"loss": 1.2203,
"step": 512
},
{
"epoch": 0.4418604651162791,
"grad_norm": 0.9457308650016785,
"learning_rate": 1.2349372150665117e-05,
"loss": 1.2073,
"step": 513
},
{
"epoch": 0.4427217915590009,
"grad_norm": 0.8949629664421082,
"learning_rate": 1.2322243492864651e-05,
"loss": 1.2078,
"step": 514
},
{
"epoch": 0.4435831180017227,
"grad_norm": 0.9043914079666138,
"learning_rate": 1.2295096757894389e-05,
"loss": 1.2159,
"step": 515
},
{
"epoch": 0.4444444444444444,
"grad_norm": 0.8685716390609741,
"learning_rate": 1.2267932157074178e-05,
"loss": 1.2262,
"step": 516
},
{
"epoch": 0.4453057708871662,
"grad_norm": 0.8798421621322632,
"learning_rate": 1.224074990186294e-05,
"loss": 1.2551,
"step": 517
},
{
"epoch": 0.446167097329888,
"grad_norm": 0.9253307580947876,
"learning_rate": 1.2213550203857025e-05,
"loss": 1.2072,
"step": 518
},
{
"epoch": 0.4470284237726098,
"grad_norm": 0.9633350372314453,
"learning_rate": 1.2186333274788558e-05,
"loss": 1.2122,
"step": 519
},
{
"epoch": 0.4478897502153316,
"grad_norm": 0.8635453581809998,
"learning_rate": 1.21590993265238e-05,
"loss": 1.2107,
"step": 520
},
{
"epoch": 0.4487510766580534,
"grad_norm": 0.8736230134963989,
"learning_rate": 1.2131848571061501e-05,
"loss": 1.2315,
"step": 521
},
{
"epoch": 0.4496124031007752,
"grad_norm": 0.8366631269454956,
"learning_rate": 1.2104581220531237e-05,
"loss": 1.2226,
"step": 522
},
{
"epoch": 0.45047372954349696,
"grad_norm": 0.9264794588088989,
"learning_rate": 1.2077297487191771e-05,
"loss": 1.2,
"step": 523
},
{
"epoch": 0.45133505598621876,
"grad_norm": 0.8290119171142578,
"learning_rate": 1.2049997583429389e-05,
"loss": 1.2188,
"step": 524
},
{
"epoch": 0.45219638242894056,
"grad_norm": 0.9655864238739014,
"learning_rate": 1.202268172175626e-05,
"loss": 1.2326,
"step": 525
},
{
"epoch": 0.45305770887166236,
"grad_norm": 0.8329289555549622,
"learning_rate": 1.1995350114808772e-05,
"loss": 1.2218,
"step": 526
},
{
"epoch": 0.45391903531438416,
"grad_norm": 0.9534193873405457,
"learning_rate": 1.1968002975345882e-05,
"loss": 1.2072,
"step": 527
},
{
"epoch": 0.45478036175710596,
"grad_norm": 0.8719503283500671,
"learning_rate": 1.194064051624745e-05,
"loss": 1.2469,
"step": 528
},
{
"epoch": 0.45564168819982775,
"grad_norm": 0.9249786138534546,
"learning_rate": 1.1913262950512605e-05,
"loss": 1.2378,
"step": 529
},
{
"epoch": 0.45650301464254955,
"grad_norm": 0.8721954226493835,
"learning_rate": 1.1885870491258054e-05,
"loss": 1.1767,
"step": 530
},
{
"epoch": 0.4573643410852713,
"grad_norm": 0.9456660747528076,
"learning_rate": 1.185846335171645e-05,
"loss": 1.1892,
"step": 531
},
{
"epoch": 0.4582256675279931,
"grad_norm": 0.856704592704773,
"learning_rate": 1.1831041745234728e-05,
"loss": 1.2102,
"step": 532
},
{
"epoch": 0.4590869939707149,
"grad_norm": 0.8333457112312317,
"learning_rate": 1.180360588527242e-05,
"loss": 1.2255,
"step": 533
},
{
"epoch": 0.4599483204134367,
"grad_norm": 0.9460641741752625,
"learning_rate": 1.177615598540003e-05,
"loss": 1.1819,
"step": 534
},
{
"epoch": 0.4608096468561585,
"grad_norm": 0.830569326877594,
"learning_rate": 1.1748692259297347e-05,
"loss": 1.2227,
"step": 535
},
{
"epoch": 0.4616709732988803,
"grad_norm": 0.8055482506752014,
"learning_rate": 1.172121492075179e-05,
"loss": 1.2011,
"step": 536
},
{
"epoch": 0.4625322997416021,
"grad_norm": 0.9891955256462097,
"learning_rate": 1.169372418365674e-05,
"loss": 1.1889,
"step": 537
},
{
"epoch": 0.46339362618432384,
"grad_norm": 0.9944434762001038,
"learning_rate": 1.1666220262009877e-05,
"loss": 1.185,
"step": 538
},
{
"epoch": 0.46425495262704564,
"grad_norm": 0.8905592560768127,
"learning_rate": 1.1638703369911517e-05,
"loss": 1.2404,
"step": 539
},
{
"epoch": 0.46511627906976744,
"grad_norm": 0.8993557691574097,
"learning_rate": 1.161117372156294e-05,
"loss": 1.1786,
"step": 540
},
{
"epoch": 0.46597760551248923,
"grad_norm": 0.9197692275047302,
"learning_rate": 1.1583631531264723e-05,
"loss": 1.2178,
"step": 541
},
{
"epoch": 0.46683893195521103,
"grad_norm": 0.8710423111915588,
"learning_rate": 1.1556077013415084e-05,
"loss": 1.2422,
"step": 542
},
{
"epoch": 0.46770025839793283,
"grad_norm": 0.8500446081161499,
"learning_rate": 1.152851038250819e-05,
"loss": 1.2196,
"step": 543
},
{
"epoch": 0.46856158484065463,
"grad_norm": 0.88962721824646,
"learning_rate": 1.150093185313251e-05,
"loss": 1.1858,
"step": 544
},
{
"epoch": 0.4694229112833764,
"grad_norm": 0.907476544380188,
"learning_rate": 1.147334163996913e-05,
"loss": 1.1906,
"step": 545
},
{
"epoch": 0.4702842377260982,
"grad_norm": 0.9100911617279053,
"learning_rate": 1.1445739957790087e-05,
"loss": 1.2185,
"step": 546
},
{
"epoch": 0.47114556416882,
"grad_norm": 0.9100723266601562,
"learning_rate": 1.14181270214567e-05,
"loss": 1.2539,
"step": 547
},
{
"epoch": 0.4720068906115418,
"grad_norm": 0.9603404998779297,
"learning_rate": 1.1390503045917892e-05,
"loss": 1.1917,
"step": 548
},
{
"epoch": 0.4728682170542636,
"grad_norm": 0.8922184109687805,
"learning_rate": 1.1362868246208519e-05,
"loss": 1.1583,
"step": 549
},
{
"epoch": 0.4737295434969854,
"grad_norm": 0.9883030652999878,
"learning_rate": 1.1335222837447692e-05,
"loss": 1.2329,
"step": 550
},
{
"epoch": 0.47459086993970717,
"grad_norm": 0.9090227484703064,
"learning_rate": 1.1307567034837123e-05,
"loss": 1.2424,
"step": 551
},
{
"epoch": 0.4754521963824289,
"grad_norm": 0.8161377906799316,
"learning_rate": 1.127990105365941e-05,
"loss": 1.2166,
"step": 552
},
{
"epoch": 0.4763135228251507,
"grad_norm": 0.9323464632034302,
"learning_rate": 1.1252225109276404e-05,
"loss": 1.1941,
"step": 553
},
{
"epoch": 0.4771748492678725,
"grad_norm": 0.9303016662597656,
"learning_rate": 1.12245394171275e-05,
"loss": 1.1916,
"step": 554
},
{
"epoch": 0.4780361757105943,
"grad_norm": 0.9320595264434814,
"learning_rate": 1.1196844192727984e-05,
"loss": 1.2087,
"step": 555
},
{
"epoch": 0.4788975021533161,
"grad_norm": 0.848597526550293,
"learning_rate": 1.1169139651667334e-05,
"loss": 1.1857,
"step": 556
},
{
"epoch": 0.4797588285960379,
"grad_norm": 0.9183049201965332,
"learning_rate": 1.1141426009607562e-05,
"loss": 1.2185,
"step": 557
},
{
"epoch": 0.4806201550387597,
"grad_norm": 0.9825764298439026,
"learning_rate": 1.1113703482281515e-05,
"loss": 1.2045,
"step": 558
},
{
"epoch": 0.48148148148148145,
"grad_norm": 0.8843861818313599,
"learning_rate": 1.1085972285491213e-05,
"loss": 1.198,
"step": 559
},
{
"epoch": 0.48234280792420325,
"grad_norm": 0.8294288516044617,
"learning_rate": 1.1058232635106167e-05,
"loss": 1.1729,
"step": 560
},
{
"epoch": 0.48320413436692505,
"grad_norm": 0.810230553150177,
"learning_rate": 1.103048474706168e-05,
"loss": 1.2055,
"step": 561
},
{
"epoch": 0.48406546080964685,
"grad_norm": 0.8811335563659668,
"learning_rate": 1.1002728837357192e-05,
"loss": 1.2168,
"step": 562
},
{
"epoch": 0.48492678725236865,
"grad_norm": 0.9530275464057922,
"learning_rate": 1.097496512205458e-05,
"loss": 1.2179,
"step": 563
},
{
"epoch": 0.48578811369509045,
"grad_norm": 0.9637301564216614,
"learning_rate": 1.0947193817276485e-05,
"loss": 1.1692,
"step": 564
},
{
"epoch": 0.48664944013781225,
"grad_norm": 0.8479545712471008,
"learning_rate": 1.0919415139204625e-05,
"loss": 1.2172,
"step": 565
},
{
"epoch": 0.48751076658053405,
"grad_norm": 0.9883643388748169,
"learning_rate": 1.089162930407812e-05,
"loss": 1.2697,
"step": 566
},
{
"epoch": 0.4883720930232558,
"grad_norm": 0.8231867551803589,
"learning_rate": 1.0863836528191795e-05,
"loss": 1.215,
"step": 567
},
{
"epoch": 0.4892334194659776,
"grad_norm": 0.8607746362686157,
"learning_rate": 1.0836037027894515e-05,
"loss": 1.2322,
"step": 568
},
{
"epoch": 0.4900947459086994,
"grad_norm": 0.85414719581604,
"learning_rate": 1.0808231019587472e-05,
"loss": 1.2044,
"step": 569
},
{
"epoch": 0.4909560723514212,
"grad_norm": 0.835567831993103,
"learning_rate": 1.0780418719722544e-05,
"loss": 1.218,
"step": 570
},
{
"epoch": 0.491817398794143,
"grad_norm": 0.841223418712616,
"learning_rate": 1.075260034480056e-05,
"loss": 1.2253,
"step": 571
},
{
"epoch": 0.4926787252368648,
"grad_norm": 0.8959380984306335,
"learning_rate": 1.0724776111369654e-05,
"loss": 1.1758,
"step": 572
},
{
"epoch": 0.4935400516795866,
"grad_norm": 0.9385613799095154,
"learning_rate": 1.0696946236023566e-05,
"loss": 1.2174,
"step": 573
},
{
"epoch": 0.49440137812230833,
"grad_norm": 0.9247633814811707,
"learning_rate": 1.0669110935399944e-05,
"loss": 1.2104,
"step": 574
},
{
"epoch": 0.49526270456503013,
"grad_norm": 0.8738383650779724,
"learning_rate": 1.0641270426178677e-05,
"loss": 1.1825,
"step": 575
},
{
"epoch": 0.49612403100775193,
"grad_norm": 1.0910409688949585,
"learning_rate": 1.0613424925080194e-05,
"loss": 1.1731,
"step": 576
},
{
"epoch": 0.49698535745047373,
"grad_norm": 0.8407815098762512,
"learning_rate": 1.058557464886379e-05,
"loss": 1.2002,
"step": 577
},
{
"epoch": 0.49784668389319553,
"grad_norm": 0.9195674657821655,
"learning_rate": 1.055771981432592e-05,
"loss": 1.2013,
"step": 578
},
{
"epoch": 0.49870801033591733,
"grad_norm": 0.8779056072235107,
"learning_rate": 1.0529860638298535e-05,
"loss": 1.2098,
"step": 579
},
{
"epoch": 0.49956933677863913,
"grad_norm": 0.8528910875320435,
"learning_rate": 1.0501997337647372e-05,
"loss": 1.1676,
"step": 580
},
{
"epoch": 0.5004306632213609,
"grad_norm": 0.8757944703102112,
"learning_rate": 1.0474130129270281e-05,
"loss": 1.1858,
"step": 581
},
{
"epoch": 0.5012919896640827,
"grad_norm": 0.8935431241989136,
"learning_rate": 1.0446259230095531e-05,
"loss": 1.1776,
"step": 582
},
{
"epoch": 0.5021533161068045,
"grad_norm": 0.9777861833572388,
"learning_rate": 1.0418384857080118e-05,
"loss": 1.1756,
"step": 583
},
{
"epoch": 0.5030146425495263,
"grad_norm": 0.9448710680007935,
"learning_rate": 1.039050722720808e-05,
"loss": 1.2015,
"step": 584
},
{
"epoch": 0.5038759689922481,
"grad_norm": 0.8520198464393616,
"learning_rate": 1.0362626557488811e-05,
"loss": 1.1893,
"step": 585
},
{
"epoch": 0.5047372954349698,
"grad_norm": 0.9623661041259766,
"learning_rate": 1.0334743064955367e-05,
"loss": 1.1882,
"step": 586
},
{
"epoch": 0.5055986218776917,
"grad_norm": 0.8741092681884766,
"learning_rate": 1.0306856966662776e-05,
"loss": 1.17,
"step": 587
},
{
"epoch": 0.5064599483204134,
"grad_norm": 0.8488198518753052,
"learning_rate": 1.027896847968635e-05,
"loss": 1.2303,
"step": 588
},
{
"epoch": 0.5073212747631353,
"grad_norm": 0.9547082185745239,
"learning_rate": 1.0251077821119998e-05,
"loss": 1.2269,
"step": 589
},
{
"epoch": 0.508182601205857,
"grad_norm": 1.0101442337036133,
"learning_rate": 1.0223185208074538e-05,
"loss": 1.191,
"step": 590
},
{
"epoch": 0.5090439276485789,
"grad_norm": 0.8840098977088928,
"learning_rate": 1.0195290857675982e-05,
"loss": 1.1732,
"step": 591
},
{
"epoch": 0.5099052540913006,
"grad_norm": 0.7868524789810181,
"learning_rate": 1.0167394987063894e-05,
"loss": 1.1934,
"step": 592
},
{
"epoch": 0.5107665805340224,
"grad_norm": 0.8917614817619324,
"learning_rate": 1.0139497813389654e-05,
"loss": 1.2006,
"step": 593
},
{
"epoch": 0.5116279069767442,
"grad_norm": 0.8367542624473572,
"learning_rate": 1.0111599553814788e-05,
"loss": 1.1675,
"step": 594
},
{
"epoch": 0.512489233419466,
"grad_norm": 0.8534975647926331,
"learning_rate": 1.008370042550928e-05,
"loss": 1.1979,
"step": 595
},
{
"epoch": 0.5133505598621878,
"grad_norm": 0.8273769617080688,
"learning_rate": 1.0055800645649874e-05,
"loss": 1.1826,
"step": 596
},
{
"epoch": 0.5142118863049095,
"grad_norm": 0.9318427443504333,
"learning_rate": 1.002790043141838e-05,
"loss": 1.18,
"step": 597
},
{
"epoch": 0.5150732127476314,
"grad_norm": 0.9464846849441528,
"learning_rate": 1e-05,
"loss": 1.1945,
"step": 598
},
{
"epoch": 0.5159345391903531,
"grad_norm": 0.893747866153717,
"learning_rate": 9.972099568581621e-06,
"loss": 1.2057,
"step": 599
},
{
"epoch": 0.5167958656330749,
"grad_norm": 0.9831318259239197,
"learning_rate": 9.94419935435013e-06,
"loss": 1.2543,
"step": 600
},
{
"epoch": 0.5176571920757967,
"grad_norm": 0.8675307035446167,
"learning_rate": 9.916299574490722e-06,
"loss": 1.1924,
"step": 601
},
{
"epoch": 0.5185185185185185,
"grad_norm": 0.8235703706741333,
"learning_rate": 9.888400446185212e-06,
"loss": 1.205,
"step": 602
},
{
"epoch": 0.5193798449612403,
"grad_norm": 0.9362935423851013,
"learning_rate": 9.860502186610349e-06,
"loss": 1.2045,
"step": 603
},
{
"epoch": 0.5202411714039621,
"grad_norm": 1.0003681182861328,
"learning_rate": 9.832605012936107e-06,
"loss": 1.1725,
"step": 604
},
{
"epoch": 0.5211024978466839,
"grad_norm": 0.8889096975326538,
"learning_rate": 9.80470914232402e-06,
"loss": 1.2078,
"step": 605
},
{
"epoch": 0.5219638242894057,
"grad_norm": 0.9049688577651978,
"learning_rate": 9.77681479192547e-06,
"loss": 1.1603,
"step": 606
},
{
"epoch": 0.5228251507321274,
"grad_norm": 0.8464852571487427,
"learning_rate": 9.748922178880005e-06,
"loss": 1.2056,
"step": 607
},
{
"epoch": 0.5236864771748493,
"grad_norm": 0.9175320267677307,
"learning_rate": 9.721031520313653e-06,
"loss": 1.2323,
"step": 608
},
{
"epoch": 0.524547803617571,
"grad_norm": 0.8868247270584106,
"learning_rate": 9.693143033337228e-06,
"loss": 1.186,
"step": 609
},
{
"epoch": 0.5254091300602929,
"grad_norm": 0.8364291191101074,
"learning_rate": 9.665256935044636e-06,
"loss": 1.1761,
"step": 610
},
{
"epoch": 0.5262704565030146,
"grad_norm": 0.8297803401947021,
"learning_rate": 9.637373442511192e-06,
"loss": 1.1775,
"step": 611
},
{
"epoch": 0.5271317829457365,
"grad_norm": 0.9279587268829346,
"learning_rate": 9.609492772791924e-06,
"loss": 1.1789,
"step": 612
},
{
"epoch": 0.5279931093884582,
"grad_norm": 0.8592475056648254,
"learning_rate": 9.581615142919887e-06,
"loss": 1.2299,
"step": 613
},
{
"epoch": 0.52885443583118,
"grad_norm": 0.8591251373291016,
"learning_rate": 9.55374076990447e-06,
"loss": 1.1792,
"step": 614
},
{
"epoch": 0.5297157622739018,
"grad_norm": 0.9070160388946533,
"learning_rate": 9.525869870729719e-06,
"loss": 1.1989,
"step": 615
},
{
"epoch": 0.5305770887166236,
"grad_norm": 0.8992422223091125,
"learning_rate": 9.49800266235263e-06,
"loss": 1.1947,
"step": 616
},
{
"epoch": 0.5314384151593454,
"grad_norm": 0.8558951616287231,
"learning_rate": 9.470139361701469e-06,
"loss": 1.2101,
"step": 617
},
{
"epoch": 0.5322997416020672,
"grad_norm": 0.9297447800636292,
"learning_rate": 9.442280185674084e-06,
"loss": 1.1918,
"step": 618
},
{
"epoch": 0.533161068044789,
"grad_norm": 0.8094662427902222,
"learning_rate": 9.414425351136215e-06,
"loss": 1.2102,
"step": 619
},
{
"epoch": 0.5340223944875108,
"grad_norm": 0.8350943326950073,
"learning_rate": 9.386575074919806e-06,
"loss": 1.1809,
"step": 620
},
{
"epoch": 0.5348837209302325,
"grad_norm": 0.9019107818603516,
"learning_rate": 9.358729573821325e-06,
"loss": 1.1814,
"step": 621
},
{
"epoch": 0.5357450473729544,
"grad_norm": 0.8287387490272522,
"learning_rate": 9.330889064600058e-06,
"loss": 1.2297,
"step": 622
},
{
"epoch": 0.5366063738156761,
"grad_norm": 0.8505203723907471,
"learning_rate": 9.303053763976436e-06,
"loss": 1.2028,
"step": 623
},
{
"epoch": 0.537467700258398,
"grad_norm": 0.8712747097015381,
"learning_rate": 9.275223888630348e-06,
"loss": 1.1718,
"step": 624
},
{
"epoch": 0.5383290267011197,
"grad_norm": 0.907810389995575,
"learning_rate": 9.247399655199444e-06,
"loss": 1.2403,
"step": 625
},
{
"epoch": 0.5391903531438416,
"grad_norm": 0.8972229957580566,
"learning_rate": 9.219581280277463e-06,
"loss": 1.1864,
"step": 626
},
{
"epoch": 0.5400516795865633,
"grad_norm": 0.897445023059845,
"learning_rate": 9.191768980412528e-06,
"loss": 1.1663,
"step": 627
},
{
"epoch": 0.540913006029285,
"grad_norm": 0.813290536403656,
"learning_rate": 9.163962972105488e-06,
"loss": 1.2084,
"step": 628
},
{
"epoch": 0.5417743324720069,
"grad_norm": 0.8626779913902283,
"learning_rate": 9.136163471808207e-06,
"loss": 1.2222,
"step": 629
},
{
"epoch": 0.5426356589147286,
"grad_norm": 0.949863076210022,
"learning_rate": 9.108370695921884e-06,
"loss": 1.2068,
"step": 630
},
{
"epoch": 0.5434969853574505,
"grad_norm": 1.0227597951889038,
"learning_rate": 9.080584860795378e-06,
"loss": 1.2174,
"step": 631
},
{
"epoch": 0.5443583118001722,
"grad_norm": 0.8536685705184937,
"learning_rate": 9.05280618272352e-06,
"loss": 1.1978,
"step": 632
},
{
"epoch": 0.5452196382428941,
"grad_norm": 0.8163442015647888,
"learning_rate": 9.025034877945422e-06,
"loss": 1.1749,
"step": 633
},
{
"epoch": 0.5460809646856158,
"grad_norm": 0.8537796139717102,
"learning_rate": 8.99727116264281e-06,
"loss": 1.1979,
"step": 634
},
{
"epoch": 0.5469422911283376,
"grad_norm": 0.8340730667114258,
"learning_rate": 8.969515252938323e-06,
"loss": 1.1756,
"step": 635
},
{
"epoch": 0.5478036175710594,
"grad_norm": 0.8549203276634216,
"learning_rate": 8.941767364893836e-06,
"loss": 1.1931,
"step": 636
},
{
"epoch": 0.5486649440137812,
"grad_norm": 0.8984569311141968,
"learning_rate": 8.914027714508788e-06,
"loss": 1.167,
"step": 637
},
{
"epoch": 0.549526270456503,
"grad_norm": 0.8956097960472107,
"learning_rate": 8.88629651771849e-06,
"loss": 1.1972,
"step": 638
},
{
"epoch": 0.5503875968992248,
"grad_norm": 0.889928936958313,
"learning_rate": 8.85857399039244e-06,
"loss": 1.1852,
"step": 639
},
{
"epoch": 0.5512489233419466,
"grad_norm": 0.8526130318641663,
"learning_rate": 8.830860348332666e-06,
"loss": 1.1825,
"step": 640
},
{
"epoch": 0.5521102497846684,
"grad_norm": 0.9533372521400452,
"learning_rate": 8.803155807272019e-06,
"loss": 1.208,
"step": 641
},
{
"epoch": 0.5529715762273901,
"grad_norm": 1.0128767490386963,
"learning_rate": 8.775460582872502e-06,
"loss": 1.2077,
"step": 642
},
{
"epoch": 0.553832902670112,
"grad_norm": 0.9490034580230713,
"learning_rate": 8.7477748907236e-06,
"loss": 1.2078,
"step": 643
},
{
"epoch": 0.5546942291128337,
"grad_norm": 0.9446279406547546,
"learning_rate": 8.720098946340594e-06,
"loss": 1.1819,
"step": 644
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.9068703055381775,
"learning_rate": 8.69243296516288e-06,
"loss": 1.2096,
"step": 645
},
{
"epoch": 0.5564168819982773,
"grad_norm": 0.8627210259437561,
"learning_rate": 8.664777162552308e-06,
"loss": 1.1981,
"step": 646
},
{
"epoch": 0.5572782084409992,
"grad_norm": 0.8679218292236328,
"learning_rate": 8.637131753791485e-06,
"loss": 1.2136,
"step": 647
},
{
"epoch": 0.5581395348837209,
"grad_norm": 0.8559207916259766,
"learning_rate": 8.609496954082113e-06,
"loss": 1.184,
"step": 648
},
{
"epoch": 0.5590008613264428,
"grad_norm": 0.8170911073684692,
"learning_rate": 8.581872978543305e-06,
"loss": 1.1849,
"step": 649
},
{
"epoch": 0.5598621877691645,
"grad_norm": 0.9225674867630005,
"learning_rate": 8.554260042209918e-06,
"loss": 1.1612,
"step": 650
},
{
"epoch": 0.5607235142118863,
"grad_norm": 0.8442004919052124,
"learning_rate": 8.526658360030876e-06,
"loss": 1.195,
"step": 651
},
{
"epoch": 0.5615848406546081,
"grad_norm": 0.9129129648208618,
"learning_rate": 8.499068146867492e-06,
"loss": 1.2332,
"step": 652
},
{
"epoch": 0.5624461670973299,
"grad_norm": 0.8818443417549133,
"learning_rate": 8.471489617491813e-06,
"loss": 1.1633,
"step": 653
},
{
"epoch": 0.5633074935400517,
"grad_norm": 0.8677276968955994,
"learning_rate": 8.44392298658492e-06,
"loss": 1.2062,
"step": 654
},
{
"epoch": 0.5641688199827735,
"grad_norm": 0.9707540273666382,
"learning_rate": 8.41636846873528e-06,
"loss": 1.1585,
"step": 655
},
{
"epoch": 0.5650301464254953,
"grad_norm": 0.929904580116272,
"learning_rate": 8.388826278437066e-06,
"loss": 1.1807,
"step": 656
},
{
"epoch": 0.5658914728682171,
"grad_norm": 0.8509595394134521,
"learning_rate": 8.361296630088488e-06,
"loss": 1.1799,
"step": 657
},
{
"epoch": 0.5667527993109388,
"grad_norm": 0.9297439455986023,
"learning_rate": 8.333779737990124e-06,
"loss": 1.2112,
"step": 658
},
{
"epoch": 0.5676141257536607,
"grad_norm": 0.913957953453064,
"learning_rate": 8.306275816343262e-06,
"loss": 1.1909,
"step": 659
},
{
"epoch": 0.5684754521963824,
"grad_norm": 0.8199489116668701,
"learning_rate": 8.278785079248211e-06,
"loss": 1.1718,
"step": 660
},
{
"epoch": 0.5693367786391043,
"grad_norm": 0.9394577741622925,
"learning_rate": 8.251307740702656e-06,
"loss": 1.1927,
"step": 661
},
{
"epoch": 0.570198105081826,
"grad_norm": 1.024397611618042,
"learning_rate": 8.223844014599973e-06,
"loss": 1.1849,
"step": 662
},
{
"epoch": 0.5710594315245479,
"grad_norm": 0.9490551352500916,
"learning_rate": 8.196394114727586e-06,
"loss": 1.1655,
"step": 663
},
{
"epoch": 0.5719207579672696,
"grad_norm": 0.8966484069824219,
"learning_rate": 8.168958254765275e-06,
"loss": 1.1916,
"step": 664
},
{
"epoch": 0.5727820844099913,
"grad_norm": 0.8795093894004822,
"learning_rate": 8.14153664828355e-06,
"loss": 1.2319,
"step": 665
},
{
"epoch": 0.5736434108527132,
"grad_norm": 0.8979016542434692,
"learning_rate": 8.114129508741947e-06,
"loss": 1.1894,
"step": 666
},
{
"epoch": 0.5745047372954349,
"grad_norm": 0.9347522258758545,
"learning_rate": 8.086737049487398e-06,
"loss": 1.1941,
"step": 667
},
{
"epoch": 0.5753660637381568,
"grad_norm": 1.0714643001556396,
"learning_rate": 8.059359483752551e-06,
"loss": 1.2061,
"step": 668
},
{
"epoch": 0.5762273901808785,
"grad_norm": 0.8999515175819397,
"learning_rate": 8.031997024654123e-06,
"loss": 1.1827,
"step": 669
},
{
"epoch": 0.5770887166236004,
"grad_norm": 0.8698625564575195,
"learning_rate": 8.00464988519123e-06,
"loss": 1.1879,
"step": 670
},
{
"epoch": 0.5779500430663221,
"grad_norm": 0.8773601651191711,
"learning_rate": 7.977318278243742e-06,
"loss": 1.1733,
"step": 671
},
{
"epoch": 0.5788113695090439,
"grad_norm": 0.8871725797653198,
"learning_rate": 7.950002416570614e-06,
"loss": 1.1988,
"step": 672
},
{
"epoch": 0.5796726959517657,
"grad_norm": 0.8334397673606873,
"learning_rate": 7.92270251280823e-06,
"loss": 1.1846,
"step": 673
},
{
"epoch": 0.5805340223944875,
"grad_norm": 0.9135998487472534,
"learning_rate": 7.895418779468766e-06,
"loss": 1.1776,
"step": 674
},
{
"epoch": 0.5813953488372093,
"grad_norm": 1.04806649684906,
"learning_rate": 7.868151428938502e-06,
"loss": 1.1788,
"step": 675
},
{
"epoch": 0.5822566752799311,
"grad_norm": 0.8940375447273254,
"learning_rate": 7.840900673476204e-06,
"loss": 1.1911,
"step": 676
},
{
"epoch": 0.5831180017226529,
"grad_norm": 0.8850206136703491,
"learning_rate": 7.813666725211445e-06,
"loss": 1.1773,
"step": 677
},
{
"epoch": 0.5839793281653747,
"grad_norm": 0.9374780654907227,
"learning_rate": 7.786449796142979e-06,
"loss": 1.2115,
"step": 678
},
{
"epoch": 0.5848406546080964,
"grad_norm": 0.9776993989944458,
"learning_rate": 7.759250098137061e-06,
"loss": 1.1713,
"step": 679
},
{
"epoch": 0.5857019810508183,
"grad_norm": 0.8467763662338257,
"learning_rate": 7.732067842925823e-06,
"loss": 1.1584,
"step": 680
},
{
"epoch": 0.58656330749354,
"grad_norm": 0.8266571164131165,
"learning_rate": 7.704903242105616e-06,
"loss": 1.1749,
"step": 681
},
{
"epoch": 0.5874246339362619,
"grad_norm": 0.8675100803375244,
"learning_rate": 7.677756507135354e-06,
"loss": 1.1706,
"step": 682
},
{
"epoch": 0.5882859603789836,
"grad_norm": 0.8470680713653564,
"learning_rate": 7.650627849334881e-06,
"loss": 1.186,
"step": 683
},
{
"epoch": 0.5891472868217055,
"grad_norm": 0.8331758975982666,
"learning_rate": 7.623517479883335e-06,
"loss": 1.1826,
"step": 684
},
{
"epoch": 0.5900086132644272,
"grad_norm": 0.8689389228820801,
"learning_rate": 7.596425609817474e-06,
"loss": 1.1931,
"step": 685
},
{
"epoch": 0.590869939707149,
"grad_norm": 0.9240257740020752,
"learning_rate": 7.569352450030054e-06,
"loss": 1.188,
"step": 686
},
{
"epoch": 0.5917312661498708,
"grad_norm": 0.9663352370262146,
"learning_rate": 7.542298211268189e-06,
"loss": 1.1903,
"step": 687
},
{
"epoch": 0.5925925925925926,
"grad_norm": 0.9322826266288757,
"learning_rate": 7.515263104131699e-06,
"loss": 1.1991,
"step": 688
},
{
"epoch": 0.5934539190353144,
"grad_norm": 0.8683158159255981,
"learning_rate": 7.488247339071478e-06,
"loss": 1.2022,
"step": 689
},
{
"epoch": 0.5943152454780362,
"grad_norm": 0.9323744177818298,
"learning_rate": 7.461251126387863e-06,
"loss": 1.1404,
"step": 690
},
{
"epoch": 0.595176571920758,
"grad_norm": 0.9025394320487976,
"learning_rate": 7.434274676228973e-06,
"loss": 1.2,
"step": 691
},
{
"epoch": 0.5960378983634798,
"grad_norm": 0.858193039894104,
"learning_rate": 7.407318198589102e-06,
"loss": 1.1964,
"step": 692
},
{
"epoch": 0.5968992248062015,
"grad_norm": 0.8841907382011414,
"learning_rate": 7.380381903307061e-06,
"loss": 1.1968,
"step": 693
},
{
"epoch": 0.5977605512489234,
"grad_norm": 0.8853846788406372,
"learning_rate": 7.353466000064563e-06,
"loss": 1.222,
"step": 694
},
{
"epoch": 0.5986218776916451,
"grad_norm": 0.9315546154975891,
"learning_rate": 7.326570698384569e-06,
"loss": 1.1806,
"step": 695
},
{
"epoch": 0.599483204134367,
"grad_norm": 0.8450863361358643,
"learning_rate": 7.299696207629692e-06,
"loss": 1.1658,
"step": 696
},
{
"epoch": 0.6003445305770887,
"grad_norm": 0.8357823491096497,
"learning_rate": 7.2728427370005205e-06,
"loss": 1.21,
"step": 697
},
{
"epoch": 0.6012058570198106,
"grad_norm": 0.8645495772361755,
"learning_rate": 7.246010495534036e-06,
"loss": 1.2054,
"step": 698
},
{
"epoch": 0.6020671834625323,
"grad_norm": 0.9765591621398926,
"learning_rate": 7.2191996921019485e-06,
"loss": 1.2005,
"step": 699
},
{
"epoch": 0.602928509905254,
"grad_norm": 0.8903208374977112,
"learning_rate": 7.1924105354090955e-06,
"loss": 1.1458,
"step": 700
},
{
"epoch": 0.6037898363479759,
"grad_norm": 0.8536180853843689,
"learning_rate": 7.165643233991806e-06,
"loss": 1.1817,
"step": 701
},
{
"epoch": 0.6046511627906976,
"grad_norm": 0.9730960726737976,
"learning_rate": 7.138897996216278e-06,
"loss": 1.1883,
"step": 702
},
{
"epoch": 0.6055124892334195,
"grad_norm": 0.9186335802078247,
"learning_rate": 7.1121750302769685e-06,
"loss": 1.1818,
"step": 703
},
{
"epoch": 0.6063738156761412,
"grad_norm": 0.8939233422279358,
"learning_rate": 7.085474544194946e-06,
"loss": 1.1446,
"step": 704
},
{
"epoch": 0.6072351421188631,
"grad_norm": 0.9298137426376343,
"learning_rate": 7.058796745816303e-06,
"loss": 1.1787,
"step": 705
},
{
"epoch": 0.6080964685615848,
"grad_norm": 0.8468939065933228,
"learning_rate": 7.03214184281051e-06,
"loss": 1.2003,
"step": 706
},
{
"epoch": 0.6089577950043066,
"grad_norm": 0.8879836201667786,
"learning_rate": 7.0055100426688205e-06,
"loss": 1.1677,
"step": 707
},
{
"epoch": 0.6098191214470284,
"grad_norm": 0.8849911093711853,
"learning_rate": 6.978901552702643e-06,
"loss": 1.2002,
"step": 708
},
{
"epoch": 0.6106804478897502,
"grad_norm": 0.9499090313911438,
"learning_rate": 6.95231658004194e-06,
"loss": 1.2391,
"step": 709
},
{
"epoch": 0.611541774332472,
"grad_norm": 0.9845093488693237,
"learning_rate": 6.925755331633592e-06,
"loss": 1.1686,
"step": 710
},
{
"epoch": 0.6124031007751938,
"grad_norm": 0.8965116739273071,
"learning_rate": 6.899218014239815e-06,
"loss": 1.1862,
"step": 711
},
{
"epoch": 0.6132644272179156,
"grad_norm": 0.8552091121673584,
"learning_rate": 6.872704834436526e-06,
"loss": 1.2219,
"step": 712
},
{
"epoch": 0.6141257536606374,
"grad_norm": 0.853168785572052,
"learning_rate": 6.846215998611757e-06,
"loss": 1.1754,
"step": 713
},
{
"epoch": 0.6149870801033591,
"grad_norm": 0.8855804204940796,
"learning_rate": 6.8197517129640265e-06,
"loss": 1.222,
"step": 714
},
{
"epoch": 0.615848406546081,
"grad_norm": 1.0299283266067505,
"learning_rate": 6.79331218350076e-06,
"loss": 1.1431,
"step": 715
},
{
"epoch": 0.6167097329888027,
"grad_norm": 0.9015288352966309,
"learning_rate": 6.766897616036661e-06,
"loss": 1.1595,
"step": 716
},
{
"epoch": 0.6175710594315246,
"grad_norm": 0.9094573855400085,
"learning_rate": 6.740508216192121e-06,
"loss": 1.202,
"step": 717
},
{
"epoch": 0.6184323858742463,
"grad_norm": 0.8764570355415344,
"learning_rate": 6.714144189391625e-06,
"loss": 1.1975,
"step": 718
},
{
"epoch": 0.6192937123169682,
"grad_norm": 0.8982758522033691,
"learning_rate": 6.6878057408621345e-06,
"loss": 1.1991,
"step": 719
},
{
"epoch": 0.6201550387596899,
"grad_norm": 0.8324359059333801,
"learning_rate": 6.661493075631506e-06,
"loss": 1.1908,
"step": 720
},
{
"epoch": 0.6210163652024118,
"grad_norm": 0.8555594682693481,
"learning_rate": 6.635206398526895e-06,
"loss": 1.1635,
"step": 721
},
{
"epoch": 0.6218776916451335,
"grad_norm": 0.9507735371589661,
"learning_rate": 6.60894591417315e-06,
"loss": 1.2447,
"step": 722
},
{
"epoch": 0.6227390180878553,
"grad_norm": 0.8586412072181702,
"learning_rate": 6.582711826991226e-06,
"loss": 1.1975,
"step": 723
},
{
"epoch": 0.6236003445305771,
"grad_norm": 0.8518699407577515,
"learning_rate": 6.556504341196592e-06,
"loss": 1.2073,
"step": 724
},
{
"epoch": 0.6244616709732989,
"grad_norm": 0.8891175389289856,
"learning_rate": 6.5303236607976465e-06,
"loss": 1.1805,
"step": 725
},
{
"epoch": 0.6253229974160207,
"grad_norm": 1.035131812095642,
"learning_rate": 6.504169989594121e-06,
"loss": 1.1953,
"step": 726
},
{
"epoch": 0.6261843238587425,
"grad_norm": 1.0852066278457642,
"learning_rate": 6.4780435311754986e-06,
"loss": 1.1504,
"step": 727
},
{
"epoch": 0.6270456503014643,
"grad_norm": 0.848870575428009,
"learning_rate": 6.451944488919433e-06,
"loss": 1.1743,
"step": 728
},
{
"epoch": 0.627906976744186,
"grad_norm": 0.8880192041397095,
"learning_rate": 6.425873065990158e-06,
"loss": 1.156,
"step": 729
},
{
"epoch": 0.6287683031869078,
"grad_norm": 0.9287259578704834,
"learning_rate": 6.3998294653369046e-06,
"loss": 1.2049,
"step": 730
},
{
"epoch": 0.6296296296296297,
"grad_norm": 0.9466278553009033,
"learning_rate": 6.373813889692331e-06,
"loss": 1.209,
"step": 731
},
{
"epoch": 0.6304909560723514,
"grad_norm": 0.8213219046592712,
"learning_rate": 6.347826541570936e-06,
"loss": 1.1807,
"step": 732
},
{
"epoch": 0.6313522825150732,
"grad_norm": 0.8916311860084534,
"learning_rate": 6.3218676232674815e-06,
"loss": 1.1974,
"step": 733
},
{
"epoch": 0.632213608957795,
"grad_norm": 0.8614788055419922,
"learning_rate": 6.295937336855433e-06,
"loss": 1.1858,
"step": 734
},
{
"epoch": 0.6330749354005168,
"grad_norm": 0.8437885642051697,
"learning_rate": 6.270035884185367e-06,
"loss": 1.1858,
"step": 735
},
{
"epoch": 0.6339362618432386,
"grad_norm": 0.8793601989746094,
"learning_rate": 6.244163466883405e-06,
"loss": 1.1722,
"step": 736
},
{
"epoch": 0.6347975882859603,
"grad_norm": 0.9624020457267761,
"learning_rate": 6.218320286349655e-06,
"loss": 1.1796,
"step": 737
},
{
"epoch": 0.6356589147286822,
"grad_norm": 0.8338882923126221,
"learning_rate": 6.192506543756626e-06,
"loss": 1.2217,
"step": 738
},
{
"epoch": 0.6365202411714039,
"grad_norm": 0.8388656377792358,
"learning_rate": 6.1667224400476785e-06,
"loss": 1.1981,
"step": 739
},
{
"epoch": 0.6373815676141258,
"grad_norm": 0.8913929462432861,
"learning_rate": 6.140968175935458e-06,
"loss": 1.1906,
"step": 740
},
{
"epoch": 0.6382428940568475,
"grad_norm": 0.8406807780265808,
"learning_rate": 6.115243951900316e-06,
"loss": 1.2023,
"step": 741
},
{
"epoch": 0.6391042204995694,
"grad_norm": 0.9768598079681396,
"learning_rate": 6.089549968188767e-06,
"loss": 1.1909,
"step": 742
},
{
"epoch": 0.6399655469422911,
"grad_norm": 0.9822099804878235,
"learning_rate": 6.063886424811929e-06,
"loss": 1.1945,
"step": 743
},
{
"epoch": 0.6408268733850129,
"grad_norm": 0.8699774742126465,
"learning_rate": 6.038253521543951e-06,
"loss": 1.1947,
"step": 744
},
{
"epoch": 0.6416881998277347,
"grad_norm": 0.875484049320221,
"learning_rate": 6.01265145792048e-06,
"loss": 1.1623,
"step": 745
},
{
"epoch": 0.6425495262704565,
"grad_norm": 0.8913766145706177,
"learning_rate": 5.987080433237082e-06,
"loss": 1.1771,
"step": 746
},
{
"epoch": 0.6434108527131783,
"grad_norm": 0.9033095240592957,
"learning_rate": 5.961540646547722e-06,
"loss": 1.1619,
"step": 747
},
{
"epoch": 0.6442721791559001,
"grad_norm": 0.9094845056533813,
"learning_rate": 5.936032296663188e-06,
"loss": 1.1788,
"step": 748
},
{
"epoch": 0.6451335055986219,
"grad_norm": 0.8750199675559998,
"learning_rate": 5.9105555821495486e-06,
"loss": 1.1971,
"step": 749
},
{
"epoch": 0.6459948320413437,
"grad_norm": 0.8594929575920105,
"learning_rate": 5.885110701326621e-06,
"loss": 1.1961,
"step": 750
},
{
"epoch": 0.6468561584840654,
"grad_norm": 0.8807885050773621,
"learning_rate": 5.859697852266409e-06,
"loss": 1.1702,
"step": 751
},
{
"epoch": 0.6477174849267873,
"grad_norm": 0.8879327178001404,
"learning_rate": 5.834317232791575e-06,
"loss": 1.2105,
"step": 752
},
{
"epoch": 0.648578811369509,
"grad_norm": 0.8946269154548645,
"learning_rate": 5.808969040473893e-06,
"loss": 1.1466,
"step": 753
},
{
"epoch": 0.6494401378122309,
"grad_norm": 0.8713740706443787,
"learning_rate": 5.7836534726327175e-06,
"loss": 1.1555,
"step": 754
},
{
"epoch": 0.6503014642549526,
"grad_norm": 0.9063122272491455,
"learning_rate": 5.758370726333434e-06,
"loss": 1.2304,
"step": 755
},
{
"epoch": 0.6511627906976745,
"grad_norm": 0.9117863774299622,
"learning_rate": 5.733120998385935e-06,
"loss": 1.1567,
"step": 756
},
{
"epoch": 0.6520241171403962,
"grad_norm": 0.8670936822891235,
"learning_rate": 5.707904485343094e-06,
"loss": 1.1848,
"step": 757
},
{
"epoch": 0.652885443583118,
"grad_norm": 0.8828465342521667,
"learning_rate": 5.682721383499217e-06,
"loss": 1.1945,
"step": 758
},
{
"epoch": 0.6537467700258398,
"grad_norm": 0.8751693964004517,
"learning_rate": 5.657571888888538e-06,
"loss": 1.1815,
"step": 759
},
{
"epoch": 0.6546080964685616,
"grad_norm": 0.9212945699691772,
"learning_rate": 5.63245619728367e-06,
"loss": 1.1823,
"step": 760
},
{
"epoch": 0.6554694229112834,
"grad_norm": 0.9038519263267517,
"learning_rate": 5.60737450419409e-06,
"loss": 1.1918,
"step": 761
},
{
"epoch": 0.6563307493540051,
"grad_norm": 0.9209284782409668,
"learning_rate": 5.582327004864631e-06,
"loss": 1.1645,
"step": 762
},
{
"epoch": 0.657192075796727,
"grad_norm": 0.8759058713912964,
"learning_rate": 5.557313894273937e-06,
"loss": 1.1895,
"step": 763
},
{
"epoch": 0.6580534022394487,
"grad_norm": 1.0104118585586548,
"learning_rate": 5.532335367132962e-06,
"loss": 1.1919,
"step": 764
},
{
"epoch": 0.6589147286821705,
"grad_norm": 0.8833430409431458,
"learning_rate": 5.507391617883454e-06,
"loss": 1.2118,
"step": 765
},
{
"epoch": 0.6597760551248923,
"grad_norm": 0.9313235878944397,
"learning_rate": 5.4824828406964305e-06,
"loss": 1.1817,
"step": 766
},
{
"epoch": 0.6606373815676141,
"grad_norm": 0.8975321054458618,
"learning_rate": 5.457609229470681e-06,
"loss": 1.182,
"step": 767
},
{
"epoch": 0.661498708010336,
"grad_norm": 0.8778689503669739,
"learning_rate": 5.4327709778312484e-06,
"loss": 1.1567,
"step": 768
},
{
"epoch": 0.6623600344530577,
"grad_norm": 0.9323556423187256,
"learning_rate": 5.407968279127915e-06,
"loss": 1.1592,
"step": 769
},
{
"epoch": 0.6632213608957795,
"grad_norm": 0.8266853094100952,
"learning_rate": 5.383201326433727e-06,
"loss": 1.1793,
"step": 770
},
{
"epoch": 0.6640826873385013,
"grad_norm": 0.8756425976753235,
"learning_rate": 5.358470312543445e-06,
"loss": 1.1877,
"step": 771
},
{
"epoch": 0.664944013781223,
"grad_norm": 0.8671419024467468,
"learning_rate": 5.3337754299720925e-06,
"loss": 1.2267,
"step": 772
},
{
"epoch": 0.6658053402239449,
"grad_norm": 0.9318386912345886,
"learning_rate": 5.30911687095342e-06,
"loss": 1.1453,
"step": 773
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.8705496191978455,
"learning_rate": 5.284494827438423e-06,
"loss": 1.1715,
"step": 774
},
{
"epoch": 0.6675279931093885,
"grad_norm": 0.8844230771064758,
"learning_rate": 5.25990949109386e-06,
"loss": 1.1755,
"step": 775
},
{
"epoch": 0.6683893195521102,
"grad_norm": 0.8812039494514465,
"learning_rate": 5.2353610533007305e-06,
"loss": 1.1888,
"step": 776
},
{
"epoch": 0.6692506459948321,
"grad_norm": 0.9645406603813171,
"learning_rate": 5.210849705152809e-06,
"loss": 1.1828,
"step": 777
},
{
"epoch": 0.6701119724375538,
"grad_norm": 0.9021162390708923,
"learning_rate": 5.186375637455159e-06,
"loss": 1.158,
"step": 778
},
{
"epoch": 0.6709732988802756,
"grad_norm": 0.9339265823364258,
"learning_rate": 5.161939040722634e-06,
"loss": 1.1629,
"step": 779
},
{
"epoch": 0.6718346253229974,
"grad_norm": 0.8927223682403564,
"learning_rate": 5.137540105178396e-06,
"loss": 1.1745,
"step": 780
},
{
"epoch": 0.6726959517657192,
"grad_norm": 0.9335219264030457,
"learning_rate": 5.113179020752443e-06,
"loss": 1.1858,
"step": 781
},
{
"epoch": 0.673557278208441,
"grad_norm": 0.8412618041038513,
"learning_rate": 5.088855977080123e-06,
"loss": 1.1759,
"step": 782
},
{
"epoch": 0.6744186046511628,
"grad_norm": 0.9003159999847412,
"learning_rate": 5.064571163500667e-06,
"loss": 1.1958,
"step": 783
},
{
"epoch": 0.6752799310938846,
"grad_norm": 0.9444640278816223,
"learning_rate": 5.040324769055709e-06,
"loss": 1.1988,
"step": 784
},
{
"epoch": 0.6761412575366064,
"grad_norm": 0.833273708820343,
"learning_rate": 5.016116982487811e-06,
"loss": 1.1886,
"step": 785
},
{
"epoch": 0.6770025839793282,
"grad_norm": 0.9385854005813599,
"learning_rate": 4.991947992238997e-06,
"loss": 1.1769,
"step": 786
},
{
"epoch": 0.67786391042205,
"grad_norm": 0.8889097571372986,
"learning_rate": 4.967817986449284e-06,
"loss": 1.1898,
"step": 787
},
{
"epoch": 0.6787252368647717,
"grad_norm": 0.8416062593460083,
"learning_rate": 4.943727152955235e-06,
"loss": 1.1899,
"step": 788
},
{
"epoch": 0.6795865633074936,
"grad_norm": 0.9640506505966187,
"learning_rate": 4.9196756792884605e-06,
"loss": 1.1683,
"step": 789
},
{
"epoch": 0.6804478897502153,
"grad_norm": 0.8979779481887817,
"learning_rate": 4.8956637526742e-06,
"loss": 1.1926,
"step": 790
},
{
"epoch": 0.6813092161929372,
"grad_norm": 0.8395998477935791,
"learning_rate": 4.871691560029828e-06,
"loss": 1.1593,
"step": 791
},
{
"epoch": 0.6821705426356589,
"grad_norm": 0.8391218185424805,
"learning_rate": 4.847759287963432e-06,
"loss": 1.1615,
"step": 792
},
{
"epoch": 0.6830318690783808,
"grad_norm": 0.8740763068199158,
"learning_rate": 4.8238671227723285e-06,
"loss": 1.1717,
"step": 793
},
{
"epoch": 0.6838931955211025,
"grad_norm": 0.8377690315246582,
"learning_rate": 4.800015250441638e-06,
"loss": 1.1757,
"step": 794
},
{
"epoch": 0.6847545219638242,
"grad_norm": 0.8415879011154175,
"learning_rate": 4.7762038566428155e-06,
"loss": 1.2087,
"step": 795
},
{
"epoch": 0.6856158484065461,
"grad_norm": 0.8535274863243103,
"learning_rate": 4.752433126732231e-06,
"loss": 1.1714,
"step": 796
},
{
"epoch": 0.6864771748492678,
"grad_norm": 0.8958263397216797,
"learning_rate": 4.728703245749707e-06,
"loss": 1.1477,
"step": 797
},
{
"epoch": 0.6873385012919897,
"grad_norm": 0.9385268092155457,
"learning_rate": 4.7050143984170805e-06,
"loss": 1.2119,
"step": 798
},
{
"epoch": 0.6881998277347114,
"grad_norm": 0.9213505983352661,
"learning_rate": 4.681366769136769e-06,
"loss": 1.1759,
"step": 799
},
{
"epoch": 0.6890611541774333,
"grad_norm": 0.9221978187561035,
"learning_rate": 4.657760541990329e-06,
"loss": 1.1704,
"step": 800
},
{
"epoch": 0.689922480620155,
"grad_norm": 0.8620073795318604,
"learning_rate": 4.634195900737045e-06,
"loss": 1.197,
"step": 801
},
{
"epoch": 0.6907838070628768,
"grad_norm": 0.8191630244255066,
"learning_rate": 4.610673028812459e-06,
"loss": 1.1385,
"step": 802
},
{
"epoch": 0.6916451335055986,
"grad_norm": 0.8765779137611389,
"learning_rate": 4.587192109326988e-06,
"loss": 1.1687,
"step": 803
},
{
"epoch": 0.6925064599483204,
"grad_norm": 0.7997327446937561,
"learning_rate": 4.563753325064457e-06,
"loss": 1.1897,
"step": 804
},
{
"epoch": 0.6933677863910422,
"grad_norm": 0.8961016535758972,
"learning_rate": 4.540356858480711e-06,
"loss": 1.1849,
"step": 805
},
{
"epoch": 0.694229112833764,
"grad_norm": 0.8605460524559021,
"learning_rate": 4.5170028917021705e-06,
"loss": 1.1793,
"step": 806
},
{
"epoch": 0.6950904392764858,
"grad_norm": 0.9754102826118469,
"learning_rate": 4.493691606524423e-06,
"loss": 1.2003,
"step": 807
},
{
"epoch": 0.6959517657192076,
"grad_norm": 0.7908036708831787,
"learning_rate": 4.470423184410804e-06,
"loss": 1.1333,
"step": 808
},
{
"epoch": 0.6968130921619293,
"grad_norm": 0.8532270789146423,
"learning_rate": 4.447197806490996e-06,
"loss": 1.1468,
"step": 809
},
{
"epoch": 0.6976744186046512,
"grad_norm": 0.8650361895561218,
"learning_rate": 4.424015653559611e-06,
"loss": 1.1958,
"step": 810
},
{
"epoch": 0.6985357450473729,
"grad_norm": 1.0336636304855347,
"learning_rate": 4.400876906074772e-06,
"loss": 1.2279,
"step": 811
},
{
"epoch": 0.6993970714900948,
"grad_norm": 0.8429791331291199,
"learning_rate": 4.377781744156727e-06,
"loss": 1.2041,
"step": 812
},
{
"epoch": 0.7002583979328165,
"grad_norm": 0.8677304983139038,
"learning_rate": 4.35473034758643e-06,
"loss": 1.1873,
"step": 813
},
{
"epoch": 0.7011197243755384,
"grad_norm": 0.9752134680747986,
"learning_rate": 4.331722895804158e-06,
"loss": 1.1734,
"step": 814
},
{
"epoch": 0.7019810508182601,
"grad_norm": 0.8603991270065308,
"learning_rate": 4.30875956790811e-06,
"loss": 1.1984,
"step": 815
},
{
"epoch": 0.7028423772609819,
"grad_norm": 0.8431304693222046,
"learning_rate": 4.285840542652997e-06,
"loss": 1.1635,
"step": 816
},
{
"epoch": 0.7037037037037037,
"grad_norm": 0.8751398324966431,
"learning_rate": 4.262965998448665e-06,
"loss": 1.1715,
"step": 817
},
{
"epoch": 0.7045650301464255,
"grad_norm": 0.8455151915550232,
"learning_rate": 4.240136113358714e-06,
"loss": 1.212,
"step": 818
},
{
"epoch": 0.7054263565891473,
"grad_norm": 0.9403558373451233,
"learning_rate": 4.2173510650990905e-06,
"loss": 1.1627,
"step": 819
},
{
"epoch": 0.7062876830318691,
"grad_norm": 0.941561222076416,
"learning_rate": 4.194611031036718e-06,
"loss": 1.1861,
"step": 820
},
{
"epoch": 0.7071490094745909,
"grad_norm": 0.9220948815345764,
"learning_rate": 4.171916188188113e-06,
"loss": 1.1696,
"step": 821
},
{
"epoch": 0.7080103359173127,
"grad_norm": 0.8759995698928833,
"learning_rate": 4.149266713218011e-06,
"loss": 1.1863,
"step": 822
},
{
"epoch": 0.7088716623600344,
"grad_norm": 0.8697245717048645,
"learning_rate": 4.12666278243799e-06,
"loss": 1.1579,
"step": 823
},
{
"epoch": 0.7097329888027563,
"grad_norm": 0.8867110013961792,
"learning_rate": 4.104104571805088e-06,
"loss": 1.1722,
"step": 824
},
{
"epoch": 0.710594315245478,
"grad_norm": 0.8431983590126038,
"learning_rate": 4.0815922569204435e-06,
"loss": 1.1983,
"step": 825
},
{
"epoch": 0.7114556416881999,
"grad_norm": 0.891090452671051,
"learning_rate": 4.059126013027924e-06,
"loss": 1.1585,
"step": 826
},
{
"epoch": 0.7123169681309216,
"grad_norm": 0.8412421345710754,
"learning_rate": 4.0367060150127726e-06,
"loss": 1.1664,
"step": 827
},
{
"epoch": 0.7131782945736435,
"grad_norm": 0.8604631423950195,
"learning_rate": 4.014332437400235e-06,
"loss": 1.163,
"step": 828
},
{
"epoch": 0.7140396210163652,
"grad_norm": 0.9067566394805908,
"learning_rate": 3.9920054543541976e-06,
"loss": 1.1838,
"step": 829
},
{
"epoch": 0.7149009474590869,
"grad_norm": 0.8477993607521057,
"learning_rate": 3.96972523967584e-06,
"loss": 1.1958,
"step": 830
},
{
"epoch": 0.7157622739018088,
"grad_norm": 0.8343601226806641,
"learning_rate": 3.947491966802288e-06,
"loss": 1.1784,
"step": 831
},
{
"epoch": 0.7166236003445305,
"grad_norm": 0.8563992381095886,
"learning_rate": 3.925305808805247e-06,
"loss": 1.1488,
"step": 832
},
{
"epoch": 0.7174849267872524,
"grad_norm": 0.9131225943565369,
"learning_rate": 3.903166938389664e-06,
"loss": 1.1762,
"step": 833
},
{
"epoch": 0.7183462532299741,
"grad_norm": 0.872132420539856,
"learning_rate": 3.881075527892391e-06,
"loss": 1.1717,
"step": 834
},
{
"epoch": 0.719207579672696,
"grad_norm": 0.8504886627197266,
"learning_rate": 3.859031749280824e-06,
"loss": 1.1479,
"step": 835
},
{
"epoch": 0.7200689061154177,
"grad_norm": 0.8588838577270508,
"learning_rate": 3.837035774151585e-06,
"loss": 1.19,
"step": 836
},
{
"epoch": 0.7209302325581395,
"grad_norm": 0.8740001916885376,
"learning_rate": 3.815087773729171e-06,
"loss": 1.186,
"step": 837
},
{
"epoch": 0.7217915590008613,
"grad_norm": 0.8781384825706482,
"learning_rate": 3.793187918864627e-06,
"loss": 1.1913,
"step": 838
},
{
"epoch": 0.7226528854435831,
"grad_norm": 0.8898410201072693,
"learning_rate": 3.771336380034211e-06,
"loss": 1.1711,
"step": 839
},
{
"epoch": 0.7235142118863049,
"grad_norm": 0.8932210206985474,
"learning_rate": 3.749533327338091e-06,
"loss": 1.1818,
"step": 840
},
{
"epoch": 0.7243755383290267,
"grad_norm": 0.8856852650642395,
"learning_rate": 3.727778930498982e-06,
"loss": 1.1913,
"step": 841
},
{
"epoch": 0.7252368647717485,
"grad_norm": 0.8199148774147034,
"learning_rate": 3.706073358860851e-06,
"loss": 1.1862,
"step": 842
},
{
"epoch": 0.7260981912144703,
"grad_norm": 0.8369928002357483,
"learning_rate": 3.6844167813875888e-06,
"loss": 1.2055,
"step": 843
},
{
"epoch": 0.726959517657192,
"grad_norm": 0.8797032237052917,
"learning_rate": 3.6628093666617083e-06,
"loss": 1.1767,
"step": 844
},
{
"epoch": 0.7278208440999139,
"grad_norm": 0.8433752059936523,
"learning_rate": 3.6412512828830114e-06,
"loss": 1.1601,
"step": 845
},
{
"epoch": 0.7286821705426356,
"grad_norm": 0.8134076595306396,
"learning_rate": 3.6197426978672923e-06,
"loss": 1.1797,
"step": 846
},
{
"epoch": 0.7295434969853575,
"grad_norm": 0.8705214858055115,
"learning_rate": 3.5982837790450376e-06,
"loss": 1.1506,
"step": 847
},
{
"epoch": 0.7304048234280792,
"grad_norm": 0.9077462553977966,
"learning_rate": 3.5768746934601007e-06,
"loss": 1.1796,
"step": 848
},
{
"epoch": 0.7312661498708011,
"grad_norm": 0.8746139407157898,
"learning_rate": 3.5555156077684317e-06,
"loss": 1.1535,
"step": 849
},
{
"epoch": 0.7321274763135228,
"grad_norm": 0.9012914896011353,
"learning_rate": 3.5342066882367488e-06,
"loss": 1.2069,
"step": 850
},
{
"epoch": 0.7329888027562446,
"grad_norm": 0.8886313438415527,
"learning_rate": 3.5129481007412634e-06,
"loss": 1.1654,
"step": 851
},
{
"epoch": 0.7338501291989664,
"grad_norm": 0.8336722254753113,
"learning_rate": 3.491740010766389e-06,
"loss": 1.1773,
"step": 852
},
{
"epoch": 0.7347114556416882,
"grad_norm": 0.8749405741691589,
"learning_rate": 3.47058258340345e-06,
"loss": 1.18,
"step": 853
},
{
"epoch": 0.73557278208441,
"grad_norm": 0.8950448632240295,
"learning_rate": 3.449475983349385e-06,
"loss": 1.1257,
"step": 854
},
{
"epoch": 0.7364341085271318,
"grad_norm": 0.8262083530426025,
"learning_rate": 3.4284203749054834e-06,
"loss": 1.1863,
"step": 855
},
{
"epoch": 0.7372954349698536,
"grad_norm": 0.8869133591651917,
"learning_rate": 3.4074159219760884e-06,
"loss": 1.1659,
"step": 856
},
{
"epoch": 0.7381567614125754,
"grad_norm": 0.8032558560371399,
"learning_rate": 3.386462788067344e-06,
"loss": 1.1862,
"step": 857
},
{
"epoch": 0.7390180878552972,
"grad_norm": 0.8882765173912048,
"learning_rate": 3.3655611362858966e-06,
"loss": 1.1694,
"step": 858
},
{
"epoch": 0.739879414298019,
"grad_norm": 0.9683348536491394,
"learning_rate": 3.3447111293376468e-06,
"loss": 1.1428,
"step": 859
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.823493480682373,
"learning_rate": 3.323912929526465e-06,
"loss": 1.1393,
"step": 860
},
{
"epoch": 0.7416020671834626,
"grad_norm": 0.8513652682304382,
"learning_rate": 3.30316669875294e-06,
"loss": 1.1785,
"step": 861
},
{
"epoch": 0.7424633936261843,
"grad_norm": 0.8614821434020996,
"learning_rate": 3.282472598513119e-06,
"loss": 1.1626,
"step": 862
},
{
"epoch": 0.7433247200689062,
"grad_norm": 0.8872314095497131,
"learning_rate": 3.2618307898972413e-06,
"loss": 1.1466,
"step": 863
},
{
"epoch": 0.7441860465116279,
"grad_norm": 0.8613788485527039,
"learning_rate": 3.2412414335884866e-06,
"loss": 1.1586,
"step": 864
},
{
"epoch": 0.7450473729543498,
"grad_norm": 0.8109983205795288,
"learning_rate": 3.2207046898617365e-06,
"loss": 1.1701,
"step": 865
},
{
"epoch": 0.7459086993970715,
"grad_norm": 0.9684152603149414,
"learning_rate": 3.2002207185823155e-06,
"loss": 1.1888,
"step": 866
},
{
"epoch": 0.7467700258397932,
"grad_norm": 1.0356762409210205,
"learning_rate": 3.1797896792047422e-06,
"loss": 1.2229,
"step": 867
},
{
"epoch": 0.7476313522825151,
"grad_norm": 0.9049926996231079,
"learning_rate": 3.1594117307714977e-06,
"loss": 1.2125,
"step": 868
},
{
"epoch": 0.7484926787252368,
"grad_norm": 0.9765392541885376,
"learning_rate": 3.1390870319117838e-06,
"loss": 1.1854,
"step": 869
},
{
"epoch": 0.7493540051679587,
"grad_norm": 0.8243566155433655,
"learning_rate": 3.118815740840294e-06,
"loss": 1.1737,
"step": 870
},
{
"epoch": 0.7502153316106804,
"grad_norm": 0.8770548105239868,
"learning_rate": 3.098598015355967e-06,
"loss": 1.1927,
"step": 871
},
{
"epoch": 0.7510766580534023,
"grad_norm": 0.8610149025917053,
"learning_rate": 3.0784340128407786e-06,
"loss": 1.1686,
"step": 872
},
{
"epoch": 0.751937984496124,
"grad_norm": 1.0247671604156494,
"learning_rate": 3.0583238902584976e-06,
"loss": 1.1861,
"step": 873
},
{
"epoch": 0.7527993109388458,
"grad_norm": 0.8867828845977783,
"learning_rate": 3.038267804153472e-06,
"loss": 1.1948,
"step": 874
},
{
"epoch": 0.7536606373815676,
"grad_norm": 0.902263879776001,
"learning_rate": 3.0182659106494195e-06,
"loss": 1.1778,
"step": 875
},
{
"epoch": 0.7545219638242894,
"grad_norm": 0.9579049348831177,
"learning_rate": 2.998318365448194e-06,
"loss": 1.188,
"step": 876
},
{
"epoch": 0.7553832902670112,
"grad_norm": 0.8788950443267822,
"learning_rate": 2.9784253238285844e-06,
"loss": 1.1451,
"step": 877
},
{
"epoch": 0.756244616709733,
"grad_norm": 0.9002311825752258,
"learning_rate": 2.9585869406451083e-06,
"loss": 1.1493,
"step": 878
},
{
"epoch": 0.7571059431524548,
"grad_norm": 0.9176036715507507,
"learning_rate": 2.938803370326804e-06,
"loss": 1.1789,
"step": 879
},
{
"epoch": 0.7579672695951766,
"grad_norm": 0.87285977602005,
"learning_rate": 2.9190747668760213e-06,
"loss": 1.1633,
"step": 880
},
{
"epoch": 0.7588285960378983,
"grad_norm": 0.8504787087440491,
"learning_rate": 2.899401283867229e-06,
"loss": 1.1753,
"step": 881
},
{
"epoch": 0.7596899224806202,
"grad_norm": 0.8374565243721008,
"learning_rate": 2.8797830744458177e-06,
"loss": 1.168,
"step": 882
},
{
"epoch": 0.7605512489233419,
"grad_norm": 0.8359323143959045,
"learning_rate": 2.860220291326915e-06,
"loss": 1.1788,
"step": 883
},
{
"epoch": 0.7614125753660638,
"grad_norm": 0.8868809342384338,
"learning_rate": 2.840713086794189e-06,
"loss": 1.1907,
"step": 884
},
{
"epoch": 0.7622739018087855,
"grad_norm": 0.874671220779419,
"learning_rate": 2.8212616126986604e-06,
"loss": 1.1735,
"step": 885
},
{
"epoch": 0.7631352282515074,
"grad_norm": 0.9140809774398804,
"learning_rate": 2.801866020457521e-06,
"loss": 1.1776,
"step": 886
},
{
"epoch": 0.7639965546942291,
"grad_norm": 0.8395150303840637,
"learning_rate": 2.7825264610529703e-06,
"loss": 1.1691,
"step": 887
},
{
"epoch": 0.7648578811369509,
"grad_norm": 0.9220564961433411,
"learning_rate": 2.7632430850310175e-06,
"loss": 1.1921,
"step": 888
},
{
"epoch": 0.7657192075796727,
"grad_norm": 0.8539912104606628,
"learning_rate": 2.7440160425003236e-06,
"loss": 1.1454,
"step": 889
},
{
"epoch": 0.7665805340223945,
"grad_norm": 0.8318409323692322,
"learning_rate": 2.7248454831310335e-06,
"loss": 1.1648,
"step": 890
},
{
"epoch": 0.7674418604651163,
"grad_norm": 0.8465113639831543,
"learning_rate": 2.7057315561536e-06,
"loss": 1.1529,
"step": 891
},
{
"epoch": 0.768303186907838,
"grad_norm": 0.8205885887145996,
"learning_rate": 2.686674410357637e-06,
"loss": 1.2069,
"step": 892
},
{
"epoch": 0.7691645133505599,
"grad_norm": 0.9047953486442566,
"learning_rate": 2.667674194090748e-06,
"loss": 1.1727,
"step": 893
},
{
"epoch": 0.7700258397932817,
"grad_norm": 0.928527295589447,
"learning_rate": 2.6487310552573776e-06,
"loss": 1.1687,
"step": 894
},
{
"epoch": 0.7708871662360034,
"grad_norm": 0.9136389493942261,
"learning_rate": 2.6298451413176564e-06,
"loss": 1.1628,
"step": 895
},
{
"epoch": 0.7717484926787253,
"grad_norm": 0.8879351615905762,
"learning_rate": 2.6110165992862635e-06,
"loss": 1.1576,
"step": 896
},
{
"epoch": 0.772609819121447,
"grad_norm": 0.8699619770050049,
"learning_rate": 2.592245575731274e-06,
"loss": 1.2179,
"step": 897
},
{
"epoch": 0.7734711455641688,
"grad_norm": 0.9080535173416138,
"learning_rate": 2.5735322167730116e-06,
"loss": 1.1618,
"step": 898
},
{
"epoch": 0.7743324720068906,
"grad_norm": 0.8573684096336365,
"learning_rate": 2.5548766680829207e-06,
"loss": 1.141,
"step": 899
},
{
"epoch": 0.7751937984496124,
"grad_norm": 0.9463301301002502,
"learning_rate": 2.5362790748824363e-06,
"loss": 1.1608,
"step": 900
},
{
"epoch": 0.7760551248923342,
"grad_norm": 0.8480750918388367,
"learning_rate": 2.517739581941839e-06,
"loss": 1.1811,
"step": 901
},
{
"epoch": 0.7769164513350559,
"grad_norm": 0.9230459928512573,
"learning_rate": 2.4992583335791375e-06,
"loss": 1.1404,
"step": 902
},
{
"epoch": 0.7777777777777778,
"grad_norm": 0.8646366000175476,
"learning_rate": 2.4808354736589525e-06,
"loss": 1.1808,
"step": 903
},
{
"epoch": 0.7786391042204995,
"grad_norm": 0.8544058203697205,
"learning_rate": 2.4624711455913764e-06,
"loss": 1.1333,
"step": 904
},
{
"epoch": 0.7795004306632214,
"grad_norm": 0.8751500248908997,
"learning_rate": 2.444165492330879e-06,
"loss": 1.1674,
"step": 905
},
{
"epoch": 0.7803617571059431,
"grad_norm": 0.8781688213348389,
"learning_rate": 2.425918656375177e-06,
"loss": 1.1802,
"step": 906
},
{
"epoch": 0.781223083548665,
"grad_norm": 1.0020009279251099,
"learning_rate": 2.4077307797641357e-06,
"loss": 1.1823,
"step": 907
},
{
"epoch": 0.7820844099913867,
"grad_norm": 0.7854005098342896,
"learning_rate": 2.389602004078657e-06,
"loss": 1.1864,
"step": 908
},
{
"epoch": 0.7829457364341085,
"grad_norm": 0.8276894688606262,
"learning_rate": 2.3715324704395846e-06,
"loss": 1.1667,
"step": 909
},
{
"epoch": 0.7838070628768303,
"grad_norm": 0.8462375402450562,
"learning_rate": 2.3535223195066025e-06,
"loss": 1.1318,
"step": 910
},
{
"epoch": 0.7846683893195521,
"grad_norm": 0.8951948881149292,
"learning_rate": 2.335571691477132e-06,
"loss": 1.189,
"step": 911
},
{
"epoch": 0.7855297157622739,
"grad_norm": 0.8499693870544434,
"learning_rate": 2.3176807260852475e-06,
"loss": 1.1322,
"step": 912
},
{
"epoch": 0.7863910422049957,
"grad_norm": 0.8753166198730469,
"learning_rate": 2.2998495626005955e-06,
"loss": 1.186,
"step": 913
},
{
"epoch": 0.7872523686477175,
"grad_norm": 0.8034504652023315,
"learning_rate": 2.282078339827293e-06,
"loss": 1.174,
"step": 914
},
{
"epoch": 0.7881136950904393,
"grad_norm": 0.8719635605812073,
"learning_rate": 2.264367196102869e-06,
"loss": 1.2179,
"step": 915
},
{
"epoch": 0.788975021533161,
"grad_norm": 0.8509535193443298,
"learning_rate": 2.2467162692971655e-06,
"loss": 1.1951,
"step": 916
},
{
"epoch": 0.7898363479758829,
"grad_norm": 0.9040639996528625,
"learning_rate": 2.229125696811275e-06,
"loss": 1.2126,
"step": 917
},
{
"epoch": 0.7906976744186046,
"grad_norm": 0.9148096442222595,
"learning_rate": 2.2115956155764817e-06,
"loss": 1.1558,
"step": 918
},
{
"epoch": 0.7915590008613265,
"grad_norm": 0.8161072134971619,
"learning_rate": 2.1941261620531718e-06,
"loss": 1.1379,
"step": 919
},
{
"epoch": 0.7924203273040482,
"grad_norm": 0.8694244027137756,
"learning_rate": 2.17671747222979e-06,
"loss": 1.1576,
"step": 920
},
{
"epoch": 0.7932816537467701,
"grad_norm": 0.830352246761322,
"learning_rate": 2.1593696816217667e-06,
"loss": 1.1848,
"step": 921
},
{
"epoch": 0.7941429801894918,
"grad_norm": 0.8742688298225403,
"learning_rate": 2.142082925270489e-06,
"loss": 1.1939,
"step": 922
},
{
"epoch": 0.7950043066322137,
"grad_norm": 0.8883207440376282,
"learning_rate": 2.1248573377422155e-06,
"loss": 1.1894,
"step": 923
},
{
"epoch": 0.7958656330749354,
"grad_norm": 0.9029132127761841,
"learning_rate": 2.107693053127049e-06,
"loss": 1.1283,
"step": 924
},
{
"epoch": 0.7967269595176572,
"grad_norm": 0.9062528014183044,
"learning_rate": 2.090590205037888e-06,
"loss": 1.2179,
"step": 925
},
{
"epoch": 0.797588285960379,
"grad_norm": 0.8248071670532227,
"learning_rate": 2.0735489266093923e-06,
"loss": 1.1756,
"step": 926
},
{
"epoch": 0.7984496124031008,
"grad_norm": 0.8426958918571472,
"learning_rate": 2.056569350496933e-06,
"loss": 1.1924,
"step": 927
},
{
"epoch": 0.7993109388458226,
"grad_norm": 0.8882315158843994,
"learning_rate": 2.0396516088755804e-06,
"loss": 1.1843,
"step": 928
},
{
"epoch": 0.8001722652885443,
"grad_norm": 0.9714380502700806,
"learning_rate": 2.0227958334390506e-06,
"loss": 1.1617,
"step": 929
},
{
"epoch": 0.8010335917312662,
"grad_norm": 0.9388904571533203,
"learning_rate": 2.0060021553986974e-06,
"loss": 1.1715,
"step": 930
},
{
"epoch": 0.801894918173988,
"grad_norm": 0.9136961698532104,
"learning_rate": 1.989270705482492e-06,
"loss": 1.1402,
"step": 931
},
{
"epoch": 0.8027562446167097,
"grad_norm": 0.9584447145462036,
"learning_rate": 1.9726016139339934e-06,
"loss": 1.1393,
"step": 932
},
{
"epoch": 0.8036175710594315,
"grad_norm": 0.8596916198730469,
"learning_rate": 1.955995010511338e-06,
"loss": 1.1759,
"step": 933
},
{
"epoch": 0.8044788975021533,
"grad_norm": 0.8455019593238831,
"learning_rate": 1.9394510244862397e-06,
"loss": 1.1616,
"step": 934
},
{
"epoch": 0.8053402239448751,
"grad_norm": 0.9066017270088196,
"learning_rate": 1.9229697846429773e-06,
"loss": 1.1546,
"step": 935
},
{
"epoch": 0.8062015503875969,
"grad_norm": 0.8392831683158875,
"learning_rate": 1.9065514192773848e-06,
"loss": 1.1998,
"step": 936
},
{
"epoch": 0.8070628768303187,
"grad_norm": 0.9439470767974854,
"learning_rate": 1.8901960561958588e-06,
"loss": 1.1673,
"step": 937
},
{
"epoch": 0.8079242032730405,
"grad_norm": 0.8691911697387695,
"learning_rate": 1.8739038227143658e-06,
"loss": 1.1632,
"step": 938
},
{
"epoch": 0.8087855297157622,
"grad_norm": 0.8843985199928284,
"learning_rate": 1.8576748456574512e-06,
"loss": 1.1743,
"step": 939
},
{
"epoch": 0.8096468561584841,
"grad_norm": 0.8459395170211792,
"learning_rate": 1.8415092513572498e-06,
"loss": 1.1606,
"step": 940
},
{
"epoch": 0.8105081826012058,
"grad_norm": 0.8578802943229675,
"learning_rate": 1.8254071656524997e-06,
"loss": 1.1418,
"step": 941
},
{
"epoch": 0.8113695090439277,
"grad_norm": 0.8318899869918823,
"learning_rate": 1.8093687138875648e-06,
"loss": 1.1623,
"step": 942
},
{
"epoch": 0.8122308354866494,
"grad_norm": 0.9500533938407898,
"learning_rate": 1.7933940209114597e-06,
"loss": 1.1926,
"step": 943
},
{
"epoch": 0.8130921619293713,
"grad_norm": 0.8835731148719788,
"learning_rate": 1.7774832110768847e-06,
"loss": 1.1874,
"step": 944
},
{
"epoch": 0.813953488372093,
"grad_norm": 0.8308874368667603,
"learning_rate": 1.7616364082392446e-06,
"loss": 1.1762,
"step": 945
},
{
"epoch": 0.8148148148148148,
"grad_norm": 0.9012060761451721,
"learning_rate": 1.745853735755687e-06,
"loss": 1.1723,
"step": 946
},
{
"epoch": 0.8156761412575366,
"grad_norm": 0.8090865015983582,
"learning_rate": 1.7301353164841562e-06,
"loss": 1.1905,
"step": 947
},
{
"epoch": 0.8165374677002584,
"grad_norm": 0.9133245348930359,
"learning_rate": 1.7144812727824233e-06,
"loss": 1.1314,
"step": 948
},
{
"epoch": 0.8173987941429802,
"grad_norm": 0.817896842956543,
"learning_rate": 1.6988917265071337e-06,
"loss": 1.2059,
"step": 949
},
{
"epoch": 0.818260120585702,
"grad_norm": 0.8479551076889038,
"learning_rate": 1.6833667990128622e-06,
"loss": 1.1256,
"step": 950
},
{
"epoch": 0.8191214470284238,
"grad_norm": 0.9293598532676697,
"learning_rate": 1.6679066111511677e-06,
"loss": 1.1371,
"step": 951
},
{
"epoch": 0.8199827734711456,
"grad_norm": 0.8253700733184814,
"learning_rate": 1.6525112832696576e-06,
"loss": 1.1672,
"step": 952
},
{
"epoch": 0.8208440999138673,
"grad_norm": 0.8608076572418213,
"learning_rate": 1.6371809352110446e-06,
"loss": 1.1383,
"step": 953
},
{
"epoch": 0.8217054263565892,
"grad_norm": 0.9392169117927551,
"learning_rate": 1.6219156863122121e-06,
"loss": 1.1867,
"step": 954
},
{
"epoch": 0.8225667527993109,
"grad_norm": 0.8883967399597168,
"learning_rate": 1.6067156554032893e-06,
"loss": 1.2018,
"step": 955
},
{
"epoch": 0.8234280792420328,
"grad_norm": 0.8919565677642822,
"learning_rate": 1.5915809608067245e-06,
"loss": 1.1873,
"step": 956
},
{
"epoch": 0.8242894056847545,
"grad_norm": 0.8182902932167053,
"learning_rate": 1.5765117203363723e-06,
"loss": 1.1613,
"step": 957
},
{
"epoch": 0.8251507321274764,
"grad_norm": 0.8579580783843994,
"learning_rate": 1.5615080512965563e-06,
"loss": 1.1883,
"step": 958
},
{
"epoch": 0.8260120585701981,
"grad_norm": 0.8626570105552673,
"learning_rate": 1.5465700704811825e-06,
"loss": 1.171,
"step": 959
},
{
"epoch": 0.8268733850129198,
"grad_norm": 0.8572131395339966,
"learning_rate": 1.5316978941728045e-06,
"loss": 1.1429,
"step": 960
},
{
"epoch": 0.8277347114556417,
"grad_norm": 0.8980193734169006,
"learning_rate": 1.5168916381417387e-06,
"loss": 1.1485,
"step": 961
},
{
"epoch": 0.8285960378983634,
"grad_norm": 0.9097509384155273,
"learning_rate": 1.5021514176451514e-06,
"loss": 1.1834,
"step": 962
},
{
"epoch": 0.8294573643410853,
"grad_norm": 1.002036213874817,
"learning_rate": 1.487477347426164e-06,
"loss": 1.1611,
"step": 963
},
{
"epoch": 0.830318690783807,
"grad_norm": 0.967788815498352,
"learning_rate": 1.4728695417129579e-06,
"loss": 1.1471,
"step": 964
},
{
"epoch": 0.8311800172265289,
"grad_norm": 0.9008656740188599,
"learning_rate": 1.458328114217894e-06,
"loss": 1.1956,
"step": 965
},
{
"epoch": 0.8320413436692506,
"grad_norm": 0.927203893661499,
"learning_rate": 1.4438531781366226e-06,
"loss": 1.1646,
"step": 966
},
{
"epoch": 0.8329026701119724,
"grad_norm": 0.877873957157135,
"learning_rate": 1.4294448461471943e-06,
"loss": 1.1799,
"step": 967
},
{
"epoch": 0.8337639965546942,
"grad_norm": 0.8068222999572754,
"learning_rate": 1.4151032304091928e-06,
"loss": 1.1723,
"step": 968
},
{
"epoch": 0.834625322997416,
"grad_norm": 0.8391594290733337,
"learning_rate": 1.400828442562857e-06,
"loss": 1.1462,
"step": 969
},
{
"epoch": 0.8354866494401378,
"grad_norm": 0.8191964030265808,
"learning_rate": 1.3866205937282195e-06,
"loss": 1.1757,
"step": 970
},
{
"epoch": 0.8363479758828596,
"grad_norm": 0.8616624474525452,
"learning_rate": 1.372479794504229e-06,
"loss": 1.1719,
"step": 971
},
{
"epoch": 0.8372093023255814,
"grad_norm": 0.9123625755310059,
"learning_rate": 1.3584061549679017e-06,
"loss": 1.1638,
"step": 972
},
{
"epoch": 0.8380706287683032,
"grad_norm": 0.8329593539237976,
"learning_rate": 1.3443997846734535e-06,
"loss": 1.1267,
"step": 973
},
{
"epoch": 0.8389319552110249,
"grad_norm": 0.9073025584220886,
"learning_rate": 1.330460792651459e-06,
"loss": 1.1523,
"step": 974
},
{
"epoch": 0.8397932816537468,
"grad_norm": 0.8376272916793823,
"learning_rate": 1.31658928740799e-06,
"loss": 1.1472,
"step": 975
},
{
"epoch": 0.8406546080964685,
"grad_norm": 0.8478926420211792,
"learning_rate": 1.3027853769237808e-06,
"loss": 1.1705,
"step": 976
},
{
"epoch": 0.8415159345391904,
"grad_norm": 0.9134843945503235,
"learning_rate": 1.2890491686533812e-06,
"loss": 1.1835,
"step": 977
},
{
"epoch": 0.8423772609819121,
"grad_norm": 0.8817895650863647,
"learning_rate": 1.2753807695243258e-06,
"loss": 1.1549,
"step": 978
},
{
"epoch": 0.843238587424634,
"grad_norm": 0.9673689603805542,
"learning_rate": 1.2617802859363016e-06,
"loss": 1.1211,
"step": 979
},
{
"epoch": 0.8440999138673557,
"grad_norm": 0.8391812443733215,
"learning_rate": 1.2482478237603102e-06,
"loss": 1.1272,
"step": 980
},
{
"epoch": 0.8449612403100775,
"grad_norm": 0.9365954399108887,
"learning_rate": 1.2347834883378518e-06,
"loss": 1.1997,
"step": 981
},
{
"epoch": 0.8458225667527993,
"grad_norm": 0.8548864722251892,
"learning_rate": 1.2213873844801049e-06,
"loss": 1.1804,
"step": 982
},
{
"epoch": 0.8466838931955211,
"grad_norm": 0.7988507151603699,
"learning_rate": 1.2080596164671098e-06,
"loss": 1.1528,
"step": 983
},
{
"epoch": 0.8475452196382429,
"grad_norm": 0.8993740677833557,
"learning_rate": 1.1948002880469601e-06,
"loss": 1.1818,
"step": 984
},
{
"epoch": 0.8484065460809647,
"grad_norm": 1.0622564554214478,
"learning_rate": 1.1816095024349828e-06,
"loss": 1.2045,
"step": 985
},
{
"epoch": 0.8492678725236865,
"grad_norm": 0.9041065573692322,
"learning_rate": 1.1684873623129457e-06,
"loss": 1.1534,
"step": 986
},
{
"epoch": 0.8501291989664083,
"grad_norm": 0.8370366096496582,
"learning_rate": 1.1554339698282623e-06,
"loss": 1.1714,
"step": 987
},
{
"epoch": 0.85099052540913,
"grad_norm": 0.8588913679122925,
"learning_rate": 1.1424494265931829e-06,
"loss": 1.1604,
"step": 988
},
{
"epoch": 0.8518518518518519,
"grad_norm": 0.9080969095230103,
"learning_rate": 1.1295338336840113e-06,
"loss": 1.1827,
"step": 989
},
{
"epoch": 0.8527131782945736,
"grad_norm": 0.8849515318870544,
"learning_rate": 1.1166872916403226e-06,
"loss": 1.2205,
"step": 990
},
{
"epoch": 0.8535745047372955,
"grad_norm": 0.8305906653404236,
"learning_rate": 1.1039099004641684e-06,
"loss": 1.165,
"step": 991
},
{
"epoch": 0.8544358311800172,
"grad_norm": 0.8653746843338013,
"learning_rate": 1.0912017596193115e-06,
"loss": 1.1975,
"step": 992
},
{
"epoch": 0.8552971576227391,
"grad_norm": 0.9006616473197937,
"learning_rate": 1.0785629680304433e-06,
"loss": 1.1512,
"step": 993
},
{
"epoch": 0.8561584840654608,
"grad_norm": 0.8347445726394653,
"learning_rate": 1.065993624082411e-06,
"loss": 1.162,
"step": 994
},
{
"epoch": 0.8570198105081827,
"grad_norm": 0.8577432632446289,
"learning_rate": 1.053493825619467e-06,
"loss": 1.1691,
"step": 995
},
{
"epoch": 0.8578811369509044,
"grad_norm": 0.8580631613731384,
"learning_rate": 1.0410636699444855e-06,
"loss": 1.1569,
"step": 996
},
{
"epoch": 0.8587424633936261,
"grad_norm": 0.8457480669021606,
"learning_rate": 1.0287032538182262e-06,
"loss": 1.1585,
"step": 997
},
{
"epoch": 0.859603789836348,
"grad_norm": 0.8660425543785095,
"learning_rate": 1.0164126734585667e-06,
"loss": 1.1755,
"step": 998
},
{
"epoch": 0.8604651162790697,
"grad_norm": 0.8238218426704407,
"learning_rate": 1.0041920245397552e-06,
"loss": 1.1735,
"step": 999
},
{
"epoch": 0.8613264427217916,
"grad_norm": 0.8318968415260315,
"learning_rate": 9.920414021916747e-07,
"loss": 1.1299,
"step": 1000
},
{
"epoch": 0.8621877691645133,
"grad_norm": 0.8238946199417114,
"learning_rate": 9.79960900999094e-07,
"loss": 1.1459,
"step": 1001
},
{
"epoch": 0.8630490956072352,
"grad_norm": 0.9450215101242065,
"learning_rate": 9.67950615000931e-07,
"loss": 1.1746,
"step": 1002
},
{
"epoch": 0.8639104220499569,
"grad_norm": 0.8457410335540771,
"learning_rate": 9.560106376895305e-07,
"loss": 1.1797,
"step": 1003
},
{
"epoch": 0.8647717484926787,
"grad_norm": 0.8758255243301392,
"learning_rate": 9.441410620099201e-07,
"loss": 1.1639,
"step": 1004
},
{
"epoch": 0.8656330749354005,
"grad_norm": 0.9426845908164978,
"learning_rate": 9.32341980359105e-07,
"loss": 1.1763,
"step": 1005
},
{
"epoch": 0.8664944013781223,
"grad_norm": 0.7973933219909668,
"learning_rate": 9.206134845853343e-07,
"loss": 1.172,
"step": 1006
},
{
"epoch": 0.8673557278208441,
"grad_norm": 0.8713229298591614,
"learning_rate": 9.089556659873921e-07,
"loss": 1.1684,
"step": 1007
},
{
"epoch": 0.8682170542635659,
"grad_norm": 0.8608222603797913,
"learning_rate": 8.973686153138872e-07,
"loss": 1.1564,
"step": 1008
},
{
"epoch": 0.8690783807062877,
"grad_norm": 0.8522865176200867,
"learning_rate": 8.858524227625498e-07,
"loss": 1.1644,
"step": 1009
},
{
"epoch": 0.8699397071490095,
"grad_norm": 0.8249273896217346,
"learning_rate": 8.744071779795171e-07,
"loss": 1.1637,
"step": 1010
},
{
"epoch": 0.8708010335917312,
"grad_norm": 0.9221320152282715,
"learning_rate": 8.630329700586481e-07,
"loss": 1.1803,
"step": 1011
},
{
"epoch": 0.8716623600344531,
"grad_norm": 0.8558515906333923,
"learning_rate": 8.517298875408253e-07,
"loss": 1.1779,
"step": 1012
},
{
"epoch": 0.8725236864771748,
"grad_norm": 0.8073273301124573,
"learning_rate": 8.40498018413266e-07,
"loss": 1.1561,
"step": 1013
},
{
"epoch": 0.8733850129198967,
"grad_norm": 0.7956110835075378,
"learning_rate": 8.293374501088358e-07,
"loss": 1.1593,
"step": 1014
},
{
"epoch": 0.8742463393626184,
"grad_norm": 0.8590007424354553,
"learning_rate": 8.182482695053728e-07,
"loss": 1.1656,
"step": 1015
},
{
"epoch": 0.8751076658053403,
"grad_norm": 0.867030143737793,
"learning_rate": 8.072305629250033e-07,
"loss": 1.1469,
"step": 1016
},
{
"epoch": 0.875968992248062,
"grad_norm": 0.8188326358795166,
"learning_rate": 7.962844161334748e-07,
"loss": 1.1654,
"step": 1017
},
{
"epoch": 0.8768303186907838,
"grad_norm": 0.8768230676651001,
"learning_rate": 7.854099143394933e-07,
"loss": 1.1844,
"step": 1018
},
{
"epoch": 0.8776916451335056,
"grad_norm": 0.840861976146698,
"learning_rate": 7.746071421940482e-07,
"loss": 1.1787,
"step": 1019
},
{
"epoch": 0.8785529715762274,
"grad_norm": 0.8956863880157471,
"learning_rate": 7.638761837897612e-07,
"loss": 1.1662,
"step": 1020
},
{
"epoch": 0.8794142980189492,
"grad_norm": 0.867888867855072,
"learning_rate": 7.532171226602336e-07,
"loss": 1.1606,
"step": 1021
},
{
"epoch": 0.880275624461671,
"grad_norm": 0.8646805286407471,
"learning_rate": 7.426300417793919e-07,
"loss": 1.1673,
"step": 1022
},
{
"epoch": 0.8811369509043928,
"grad_norm": 0.8361663818359375,
"learning_rate": 7.321150235608398e-07,
"loss": 1.1581,
"step": 1023
},
{
"epoch": 0.8819982773471146,
"grad_norm": 0.8646005988121033,
"learning_rate": 7.21672149857221e-07,
"loss": 1.2194,
"step": 1024
},
{
"epoch": 0.8828596037898363,
"grad_norm": 0.829197883605957,
"learning_rate": 7.113015019595793e-07,
"loss": 1.1772,
"step": 1025
},
{
"epoch": 0.8837209302325582,
"grad_norm": 0.8020138144493103,
"learning_rate": 7.010031605967316e-07,
"loss": 1.1722,
"step": 1026
},
{
"epoch": 0.8845822566752799,
"grad_norm": 0.9305247664451599,
"learning_rate": 6.907772059346285e-07,
"loss": 1.1753,
"step": 1027
},
{
"epoch": 0.8854435831180018,
"grad_norm": 0.8150957226753235,
"learning_rate": 6.806237175757457e-07,
"loss": 1.1573,
"step": 1028
},
{
"epoch": 0.8863049095607235,
"grad_norm": 0.8378347158432007,
"learning_rate": 6.705427745584469e-07,
"loss": 1.1683,
"step": 1029
},
{
"epoch": 0.8871662360034454,
"grad_norm": 0.8095634579658508,
"learning_rate": 6.605344553563775e-07,
"loss": 1.1681,
"step": 1030
},
{
"epoch": 0.8880275624461671,
"grad_norm": 0.8543710708618164,
"learning_rate": 6.505988378778616e-07,
"loss": 1.1875,
"step": 1031
},
{
"epoch": 0.8888888888888888,
"grad_norm": 0.9036997556686401,
"learning_rate": 6.407359994652773e-07,
"loss": 1.218,
"step": 1032
},
{
"epoch": 0.8897502153316107,
"grad_norm": 0.8567870855331421,
"learning_rate": 6.309460168944692e-07,
"loss": 1.1726,
"step": 1033
},
{
"epoch": 0.8906115417743324,
"grad_norm": 0.827688455581665,
"learning_rate": 6.212289663741477e-07,
"loss": 1.181,
"step": 1034
},
{
"epoch": 0.8914728682170543,
"grad_norm": 0.8630082011222839,
"learning_rate": 6.11584923545292e-07,
"loss": 1.1624,
"step": 1035
},
{
"epoch": 0.892334194659776,
"grad_norm": 0.8756182789802551,
"learning_rate": 6.020139634805622e-07,
"loss": 1.164,
"step": 1036
},
{
"epoch": 0.8931955211024979,
"grad_norm": 0.8342011570930481,
"learning_rate": 5.925161606837182e-07,
"loss": 1.1552,
"step": 1037
},
{
"epoch": 0.8940568475452196,
"grad_norm": 0.8205728530883789,
"learning_rate": 5.830915890890366e-07,
"loss": 1.1652,
"step": 1038
},
{
"epoch": 0.8949181739879414,
"grad_norm": 0.8547372221946716,
"learning_rate": 5.737403220607374e-07,
"loss": 1.1837,
"step": 1039
},
{
"epoch": 0.8957795004306632,
"grad_norm": 0.8426522612571716,
"learning_rate": 5.644624323924108e-07,
"loss": 1.2164,
"step": 1040
},
{
"epoch": 0.896640826873385,
"grad_norm": 0.8302751183509827,
"learning_rate": 5.552579923064516e-07,
"loss": 1.1782,
"step": 1041
},
{
"epoch": 0.8975021533161068,
"grad_norm": 0.8574025630950928,
"learning_rate": 5.461270734534973e-07,
"loss": 1.1599,
"step": 1042
},
{
"epoch": 0.8983634797588286,
"grad_norm": 0.8473777770996094,
"learning_rate": 5.370697469118713e-07,
"loss": 1.1562,
"step": 1043
},
{
"epoch": 0.8992248062015504,
"grad_norm": 0.8828932046890259,
"learning_rate": 5.28086083187025e-07,
"loss": 1.1839,
"step": 1044
},
{
"epoch": 0.9000861326442722,
"grad_norm": 0.9018280506134033,
"learning_rate": 5.191761522109939e-07,
"loss": 1.1884,
"step": 1045
},
{
"epoch": 0.9009474590869939,
"grad_norm": 0.8722638487815857,
"learning_rate": 5.10340023341851e-07,
"loss": 1.1788,
"step": 1046
},
{
"epoch": 0.9018087855297158,
"grad_norm": 0.8644421696662903,
"learning_rate": 5.015777653631693e-07,
"loss": 1.1753,
"step": 1047
},
{
"epoch": 0.9026701119724375,
"grad_norm": 0.892296552658081,
"learning_rate": 4.928894464834843e-07,
"loss": 1.1831,
"step": 1048
},
{
"epoch": 0.9035314384151594,
"grad_norm": 0.8241122364997864,
"learning_rate": 4.84275134335761e-07,
"loss": 1.1732,
"step": 1049
},
{
"epoch": 0.9043927648578811,
"grad_norm": 0.8443476557731628,
"learning_rate": 4.757348959768704e-07,
"loss": 1.2135,
"step": 1050
},
{
"epoch": 0.905254091300603,
"grad_norm": 0.8274143934249878,
"learning_rate": 4.67268797887066e-07,
"loss": 1.1765,
"step": 1051
},
{
"epoch": 0.9061154177433247,
"grad_norm": 0.8572297096252441,
"learning_rate": 4.5887690596946975e-07,
"loss": 1.1602,
"step": 1052
},
{
"epoch": 0.9069767441860465,
"grad_norm": 0.7972581386566162,
"learning_rate": 4.5055928554955665e-07,
"loss": 1.1749,
"step": 1053
},
{
"epoch": 0.9078380706287683,
"grad_norm": 0.8930532932281494,
"learning_rate": 4.4231600137464305e-07,
"loss": 1.16,
"step": 1054
},
{
"epoch": 0.9086993970714901,
"grad_norm": 0.866405189037323,
"learning_rate": 4.341471176133838e-07,
"loss": 1.1627,
"step": 1055
},
{
"epoch": 0.9095607235142119,
"grad_norm": 0.8310356736183167,
"learning_rate": 4.2605269785528037e-07,
"loss": 1.1672,
"step": 1056
},
{
"epoch": 0.9104220499569337,
"grad_norm": 0.8165515661239624,
"learning_rate": 4.1803280511017564e-07,
"loss": 1.2008,
"step": 1057
},
{
"epoch": 0.9112833763996555,
"grad_norm": 0.9229612946510315,
"learning_rate": 4.100875018077688e-07,
"loss": 1.1724,
"step": 1058
},
{
"epoch": 0.9121447028423773,
"grad_norm": 0.8320847749710083,
"learning_rate": 4.0221684979712906e-07,
"loss": 1.167,
"step": 1059
},
{
"epoch": 0.9130060292850991,
"grad_norm": 0.8543427586555481,
"learning_rate": 3.9442091034621156e-07,
"loss": 1.1767,
"step": 1060
},
{
"epoch": 0.9138673557278209,
"grad_norm": 0.9095268249511719,
"learning_rate": 3.8669974414138553e-07,
"loss": 1.1781,
"step": 1061
},
{
"epoch": 0.9147286821705426,
"grad_norm": 0.8602164387702942,
"learning_rate": 3.7905341128695484e-07,
"loss": 1.1204,
"step": 1062
},
{
"epoch": 0.9155900086132644,
"grad_norm": 0.8160679936408997,
"learning_rate": 3.7148197130469574e-07,
"loss": 1.1729,
"step": 1063
},
{
"epoch": 0.9164513350559862,
"grad_norm": 0.8283565640449524,
"learning_rate": 3.639854831333911e-07,
"loss": 1.1956,
"step": 1064
},
{
"epoch": 0.917312661498708,
"grad_norm": 0.8678607940673828,
"learning_rate": 3.5656400512837365e-07,
"loss": 1.1625,
"step": 1065
},
{
"epoch": 0.9181739879414298,
"grad_norm": 0.9065176844596863,
"learning_rate": 3.4921759506106876e-07,
"loss": 1.1178,
"step": 1066
},
{
"epoch": 0.9190353143841516,
"grad_norm": 0.8343356251716614,
"learning_rate": 3.4194631011854827e-07,
"loss": 1.1498,
"step": 1067
},
{
"epoch": 0.9198966408268734,
"grad_norm": 0.840149998664856,
"learning_rate": 3.347502069030795e-07,
"loss": 1.148,
"step": 1068
},
{
"epoch": 0.9207579672695951,
"grad_norm": 1.0363847017288208,
"learning_rate": 3.2762934143169333e-07,
"loss": 1.1507,
"step": 1069
},
{
"epoch": 0.921619293712317,
"grad_norm": 0.8863590955734253,
"learning_rate": 3.205837691357405e-07,
"loss": 1.1516,
"step": 1070
},
{
"epoch": 0.9224806201550387,
"grad_norm": 0.8128695487976074,
"learning_rate": 3.136135448604594e-07,
"loss": 1.1545,
"step": 1071
},
{
"epoch": 0.9233419465977606,
"grad_norm": 0.793180525302887,
"learning_rate": 3.067187228645618e-07,
"loss": 1.1439,
"step": 1072
},
{
"epoch": 0.9242032730404823,
"grad_norm": 0.890590488910675,
"learning_rate": 2.9989935681979165e-07,
"loss": 1.1357,
"step": 1073
},
{
"epoch": 0.9250645994832042,
"grad_norm": 0.840506374835968,
"learning_rate": 2.931554998105235e-07,
"loss": 1.1369,
"step": 1074
},
{
"epoch": 0.9259259259259259,
"grad_norm": 0.9699150919914246,
"learning_rate": 2.8648720433334e-07,
"loss": 1.1735,
"step": 1075
},
{
"epoch": 0.9267872523686477,
"grad_norm": 0.9378847479820251,
"learning_rate": 2.798945222966265e-07,
"loss": 1.1793,
"step": 1076
},
{
"epoch": 0.9276485788113695,
"grad_norm": 0.9439303278923035,
"learning_rate": 2.733775050201626e-07,
"loss": 1.145,
"step": 1077
},
{
"epoch": 0.9285099052540913,
"grad_norm": 0.9288724660873413,
"learning_rate": 2.6693620323473556e-07,
"loss": 1.1739,
"step": 1078
},
{
"epoch": 0.9293712316968131,
"grad_norm": 0.8163039088249207,
"learning_rate": 2.605706670817276e-07,
"loss": 1.1643,
"step": 1079
},
{
"epoch": 0.9302325581395349,
"grad_norm": 0.824459433555603,
"learning_rate": 2.5428094611273825e-07,
"loss": 1.1689,
"step": 1080
},
{
"epoch": 0.9310938845822567,
"grad_norm": 0.8641778826713562,
"learning_rate": 2.480670892891934e-07,
"loss": 1.1534,
"step": 1081
},
{
"epoch": 0.9319552110249785,
"grad_norm": 0.8537282943725586,
"learning_rate": 2.419291449819683e-07,
"loss": 1.195,
"step": 1082
},
{
"epoch": 0.9328165374677002,
"grad_norm": 0.9170420169830322,
"learning_rate": 2.358671609710017e-07,
"loss": 1.1937,
"step": 1083
},
{
"epoch": 0.9336778639104221,
"grad_norm": 0.973427951335907,
"learning_rate": 2.298811844449389e-07,
"loss": 1.1538,
"step": 1084
},
{
"epoch": 0.9345391903531438,
"grad_norm": 0.9759657382965088,
"learning_rate": 2.2397126200074837e-07,
"loss": 1.171,
"step": 1085
},
{
"epoch": 0.9354005167958657,
"grad_norm": 0.8713492751121521,
"learning_rate": 2.1813743964336998e-07,
"loss": 1.1599,
"step": 1086
},
{
"epoch": 0.9362618432385874,
"grad_norm": 0.8439943790435791,
"learning_rate": 2.1237976278535522e-07,
"loss": 1.1923,
"step": 1087
},
{
"epoch": 0.9371231696813093,
"grad_norm": 0.8235443234443665,
"learning_rate": 2.066982762465075e-07,
"loss": 1.207,
"step": 1088
},
{
"epoch": 0.937984496124031,
"grad_norm": 0.8441301584243774,
"learning_rate": 2.0109302425354139e-07,
"loss": 1.1855,
"step": 1089
},
{
"epoch": 0.9388458225667528,
"grad_norm": 0.8680086731910706,
"learning_rate": 1.9556405043973158e-07,
"loss": 1.1622,
"step": 1090
},
{
"epoch": 0.9397071490094746,
"grad_norm": 1.0325438976287842,
"learning_rate": 1.901113978445801e-07,
"loss": 1.1902,
"step": 1091
},
{
"epoch": 0.9405684754521964,
"grad_norm": 0.908032238483429,
"learning_rate": 1.8473510891347412e-07,
"loss": 1.1998,
"step": 1092
},
{
"epoch": 0.9414298018949182,
"grad_norm": 0.8270619511604309,
"learning_rate": 1.794352254973597e-07,
"loss": 1.1663,
"step": 1093
},
{
"epoch": 0.94229112833764,
"grad_norm": 0.8700262308120728,
"learning_rate": 1.742117888524153e-07,
"loss": 1.1732,
"step": 1094
},
{
"epoch": 0.9431524547803618,
"grad_norm": 0.8367083668708801,
"learning_rate": 1.6906483963973207e-07,
"loss": 1.1595,
"step": 1095
},
{
"epoch": 0.9440137812230835,
"grad_norm": 0.9255692958831787,
"learning_rate": 1.6399441792499305e-07,
"loss": 1.1828,
"step": 1096
},
{
"epoch": 0.9448751076658053,
"grad_norm": 0.8711833357810974,
"learning_rate": 1.590005631781666e-07,
"loss": 1.189,
"step": 1097
},
{
"epoch": 0.9457364341085271,
"grad_norm": 0.8770515322685242,
"learning_rate": 1.5408331427319345e-07,
"loss": 1.1903,
"step": 1098
},
{
"epoch": 0.9465977605512489,
"grad_norm": 0.8291750550270081,
"learning_rate": 1.4924270948769027e-07,
"loss": 1.1713,
"step": 1099
},
{
"epoch": 0.9474590869939707,
"grad_norm": 0.9927818179130554,
"learning_rate": 1.4447878650264867e-07,
"loss": 1.2139,
"step": 1100
},
{
"epoch": 0.9483204134366925,
"grad_norm": 0.8271976709365845,
"learning_rate": 1.3979158240213787e-07,
"loss": 1.1788,
"step": 1101
},
{
"epoch": 0.9491817398794143,
"grad_norm": 0.8746073842048645,
"learning_rate": 1.3518113367302356e-07,
"loss": 1.149,
"step": 1102
},
{
"epoch": 0.9500430663221361,
"grad_norm": 0.8062018752098083,
"learning_rate": 1.3064747620468053e-07,
"loss": 1.1709,
"step": 1103
},
{
"epoch": 0.9509043927648578,
"grad_norm": 0.8557183146476746,
"learning_rate": 1.2619064528871185e-07,
"loss": 1.1872,
"step": 1104
},
{
"epoch": 0.9517657192075797,
"grad_norm": 0.8311978578567505,
"learning_rate": 1.218106756186743e-07,
"loss": 1.1717,
"step": 1105
},
{
"epoch": 0.9526270456503014,
"grad_norm": 0.8271847367286682,
"learning_rate": 1.1750760128981131e-07,
"loss": 1.1682,
"step": 1106
},
{
"epoch": 0.9534883720930233,
"grad_norm": 0.8393845558166504,
"learning_rate": 1.132814557987838e-07,
"loss": 1.1971,
"step": 1107
},
{
"epoch": 0.954349698535745,
"grad_norm": 0.8846034407615662,
"learning_rate": 1.0913227204341292e-07,
"loss": 1.1751,
"step": 1108
},
{
"epoch": 0.9552110249784669,
"grad_norm": 0.7971451282501221,
"learning_rate": 1.0506008232242348e-07,
"loss": 1.1872,
"step": 1109
},
{
"epoch": 0.9560723514211886,
"grad_norm": 0.8897345066070557,
"learning_rate": 1.010649183351875e-07,
"loss": 1.1562,
"step": 1110
},
{
"epoch": 0.9569336778639104,
"grad_norm": 0.8979521989822388,
"learning_rate": 9.714681118148329e-08,
"loss": 1.1412,
"step": 1111
},
{
"epoch": 0.9577950043066322,
"grad_norm": 0.836390495300293,
"learning_rate": 9.330579136125117e-08,
"loss": 1.1585,
"step": 1112
},
{
"epoch": 0.958656330749354,
"grad_norm": 0.8353316187858582,
"learning_rate": 8.95418887743571e-08,
"loss": 1.1722,
"step": 1113
},
{
"epoch": 0.9595176571920758,
"grad_norm": 0.9047684073448181,
"learning_rate": 8.58551327203594e-08,
"loss": 1.1509,
"step": 1114
},
{
"epoch": 0.9603789836347976,
"grad_norm": 0.8520369529724121,
"learning_rate": 8.224555189827565e-08,
"loss": 1.1899,
"step": 1115
},
{
"epoch": 0.9612403100775194,
"grad_norm": 0.8386793732643127,
"learning_rate": 7.871317440637072e-08,
"loss": 1.1716,
"step": 1116
},
{
"epoch": 0.9621016365202412,
"grad_norm": 0.819486677646637,
"learning_rate": 7.525802774192791e-08,
"loss": 1.1616,
"step": 1117
},
{
"epoch": 0.9629629629629629,
"grad_norm": 0.8136334419250488,
"learning_rate": 7.188013880103817e-08,
"loss": 1.1653,
"step": 1118
},
{
"epoch": 0.9638242894056848,
"grad_norm": 0.8472459316253662,
"learning_rate": 6.857953387839012e-08,
"loss": 1.1666,
"step": 1119
},
{
"epoch": 0.9646856158484065,
"grad_norm": 0.924691915512085,
"learning_rate": 6.535623866706698e-08,
"loss": 1.1448,
"step": 1120
},
{
"epoch": 0.9655469422911284,
"grad_norm": 0.7984655499458313,
"learning_rate": 6.22102782583478e-08,
"loss": 1.1922,
"step": 1121
},
{
"epoch": 0.9664082687338501,
"grad_norm": 0.9025313854217529,
"learning_rate": 5.9141677141506536e-08,
"loss": 1.1607,
"step": 1122
},
{
"epoch": 0.967269595176572,
"grad_norm": 0.8492075204849243,
"learning_rate": 5.61504592036255e-08,
"loss": 1.1702,
"step": 1123
},
{
"epoch": 0.9681309216192937,
"grad_norm": 0.8583032488822937,
"learning_rate": 5.323664772941217e-08,
"loss": 1.1547,
"step": 1124
},
{
"epoch": 0.9689922480620154,
"grad_norm": 0.8614605665206909,
"learning_rate": 5.0400265401009395e-08,
"loss": 1.2079,
"step": 1125
},
{
"epoch": 0.9698535745047373,
"grad_norm": 0.7994558215141296,
"learning_rate": 4.76413342978288e-08,
"loss": 1.1677,
"step": 1126
},
{
"epoch": 0.970714900947459,
"grad_norm": 0.8343400955200195,
"learning_rate": 4.4959875896370965e-08,
"loss": 1.1701,
"step": 1127
},
{
"epoch": 0.9715762273901809,
"grad_norm": 0.8843151330947876,
"learning_rate": 4.2355911070062205e-08,
"loss": 1.1443,
"step": 1128
},
{
"epoch": 0.9724375538329026,
"grad_norm": 0.8479021191596985,
"learning_rate": 3.982946008909139e-08,
"loss": 1.1963,
"step": 1129
},
{
"epoch": 0.9732988802756245,
"grad_norm": 0.834740936756134,
"learning_rate": 3.738054262025226e-08,
"loss": 1.1788,
"step": 1130
},
{
"epoch": 0.9741602067183462,
"grad_norm": 0.8662601709365845,
"learning_rate": 3.500917772679025e-08,
"loss": 1.1637,
"step": 1131
},
{
"epoch": 0.9750215331610681,
"grad_norm": 1.2002208232879639,
"learning_rate": 3.271538386825257e-08,
"loss": 1.1841,
"step": 1132
},
{
"epoch": 0.9758828596037898,
"grad_norm": 0.8810811638832092,
"learning_rate": 3.049917890034837e-08,
"loss": 1.1845,
"step": 1133
},
{
"epoch": 0.9767441860465116,
"grad_norm": 0.8231048583984375,
"learning_rate": 2.8360580074804355e-08,
"loss": 1.1378,
"step": 1134
},
{
"epoch": 0.9776055124892334,
"grad_norm": 0.8419777750968933,
"learning_rate": 2.6299604039237147e-08,
"loss": 1.1737,
"step": 1135
},
{
"epoch": 0.9784668389319552,
"grad_norm": 0.8421429991722107,
"learning_rate": 2.43162668370156e-08,
"loss": 1.1403,
"step": 1136
},
{
"epoch": 0.979328165374677,
"grad_norm": 0.8512549996376038,
"learning_rate": 2.2410583907142012e-08,
"loss": 1.1771,
"step": 1137
},
{
"epoch": 0.9801894918173988,
"grad_norm": 0.8299239873886108,
"learning_rate": 2.0582570084132224e-08,
"loss": 1.1864,
"step": 1138
},
{
"epoch": 0.9810508182601206,
"grad_norm": 0.8365635871887207,
"learning_rate": 1.883223959789571e-08,
"loss": 1.1988,
"step": 1139
},
{
"epoch": 0.9819121447028424,
"grad_norm": 0.8790571689605713,
"learning_rate": 1.7159606073627875e-08,
"loss": 1.1613,
"step": 1140
},
{
"epoch": 0.9827734711455641,
"grad_norm": 0.8469464778900146,
"learning_rate": 1.5564682531702402e-08,
"loss": 1.1488,
"step": 1141
},
{
"epoch": 0.983634797588286,
"grad_norm": 0.9134259223937988,
"learning_rate": 1.4047481387573503e-08,
"loss": 1.1662,
"step": 1142
},
{
"epoch": 0.9844961240310077,
"grad_norm": 0.8976234793663025,
"learning_rate": 1.2608014451672701e-08,
"loss": 1.2034,
"step": 1143
},
{
"epoch": 0.9853574504737296,
"grad_norm": 1.0268605947494507,
"learning_rate": 1.1246292929325552e-08,
"loss": 1.1368,
"step": 1144
},
{
"epoch": 0.9862187769164513,
"grad_norm": 0.8144389986991882,
"learning_rate": 9.96232742065506e-09,
"loss": 1.1789,
"step": 1145
},
{
"epoch": 0.9870801033591732,
"grad_norm": 0.9007599353790283,
"learning_rate": 8.756127920505065e-09,
"loss": 1.1993,
"step": 1146
},
{
"epoch": 0.9879414298018949,
"grad_norm": 0.8165833950042725,
"learning_rate": 7.627703818363642e-09,
"loss": 1.17,
"step": 1147
},
{
"epoch": 0.9888027562446167,
"grad_norm": 0.9031330347061157,
"learning_rate": 6.577063898285385e-09,
"loss": 1.1961,
"step": 1148
},
{
"epoch": 0.9896640826873385,
"grad_norm": 0.8834061026573181,
"learning_rate": 5.604216338824797e-09,
"loss": 1.2096,
"step": 1149
},
{
"epoch": 0.9905254091300603,
"grad_norm": 0.8041283488273621,
"learning_rate": 4.7091687129718896e-09,
"loss": 1.1761,
"step": 1150
},
{
"epoch": 0.9913867355727821,
"grad_norm": 0.9003199338912964,
"learning_rate": 3.891927988098898e-09,
"loss": 1.206,
"step": 1151
},
{
"epoch": 0.9922480620155039,
"grad_norm": 0.82999587059021,
"learning_rate": 3.1525005258969953e-09,
"loss": 1.1214,
"step": 1152
},
{
"epoch": 0.9931093884582257,
"grad_norm": 0.8493615984916687,
"learning_rate": 2.490892082331886e-09,
"loss": 1.2059,
"step": 1153
},
{
"epoch": 0.9939707149009475,
"grad_norm": 0.8477335572242737,
"learning_rate": 1.907107807600506e-09,
"loss": 1.1827,
"step": 1154
},
{
"epoch": 0.9948320413436692,
"grad_norm": 0.8850772380828857,
"learning_rate": 1.4011522460866122e-09,
"loss": 1.1666,
"step": 1155
},
{
"epoch": 0.9956933677863911,
"grad_norm": 0.8809195756912231,
"learning_rate": 9.730293363297006e-10,
"loss": 1.1649,
"step": 1156
},
{
"epoch": 0.9965546942291128,
"grad_norm": 0.891930103302002,
"learning_rate": 6.227424109883639e-10,
"loss": 1.1778,
"step": 1157
},
{
"epoch": 0.9974160206718347,
"grad_norm": 0.8007426261901855,
"learning_rate": 3.502941968225315e-10,
"loss": 1.1696,
"step": 1158
},
{
"epoch": 0.9982773471145564,
"grad_norm": 0.8914885520935059,
"learning_rate": 1.5568681466682223e-10,
"loss": 1.1262,
"step": 1159
},
{
"epoch": 0.9991386735572783,
"grad_norm": 0.8544787168502808,
"learning_rate": 3.8921779411671414e-11,
"loss": 1.1751,
"step": 1160
},
{
"epoch": 1.0,
"grad_norm": 0.9441701769828796,
"learning_rate": 0.0,
"loss": 1.1453,
"step": 1161
},
{
"epoch": 1.0,
"step": 1161,
"total_flos": 2.598284398506174e+19,
"train_loss": 1.285096526043257,
"train_runtime": 25375.9482,
"train_samples_per_second": 35.131,
"train_steps_per_second": 0.046
}
],
"logging_steps": 1.0,
"max_steps": 1161,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 3000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.598284398506174e+19,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}