Quasar-1 / trainer_state.json
eyad-silx's picture
Upload trainer_state.json with huggingface_hub
13dc1ac verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.49438202247191,
"eval_steps": 500,
"global_step": 400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.011235955056179775,
"grad_norm": 3.1517910957336426,
"learning_rate": 2.2222222222222224e-07,
"loss": 0.4045,
"step": 1
},
{
"epoch": 0.02247191011235955,
"grad_norm": 3.8104264736175537,
"learning_rate": 4.444444444444445e-07,
"loss": 0.4326,
"step": 2
},
{
"epoch": 0.033707865168539325,
"grad_norm": 3.410100221633911,
"learning_rate": 6.666666666666667e-07,
"loss": 0.4196,
"step": 3
},
{
"epoch": 0.0449438202247191,
"grad_norm": 3.2662692070007324,
"learning_rate": 8.88888888888889e-07,
"loss": 0.3744,
"step": 4
},
{
"epoch": 0.056179775280898875,
"grad_norm": 3.052116632461548,
"learning_rate": 1.111111111111111e-06,
"loss": 0.4275,
"step": 5
},
{
"epoch": 0.06741573033707865,
"grad_norm": 2.697885513305664,
"learning_rate": 1.3333333333333334e-06,
"loss": 0.3805,
"step": 6
},
{
"epoch": 0.07865168539325842,
"grad_norm": 2.9053075313568115,
"learning_rate": 1.5555555555555558e-06,
"loss": 0.3949,
"step": 7
},
{
"epoch": 0.0898876404494382,
"grad_norm": 2.0928916931152344,
"learning_rate": 1.777777777777778e-06,
"loss": 0.3856,
"step": 8
},
{
"epoch": 0.10112359550561797,
"grad_norm": 1.903058409690857,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.3749,
"step": 9
},
{
"epoch": 0.11235955056179775,
"grad_norm": 1.6796547174453735,
"learning_rate": 2.222222222222222e-06,
"loss": 0.3811,
"step": 10
},
{
"epoch": 0.12359550561797752,
"grad_norm": 1.761483907699585,
"learning_rate": 2.4444444444444447e-06,
"loss": 0.3902,
"step": 11
},
{
"epoch": 0.1348314606741573,
"grad_norm": 1.6522550582885742,
"learning_rate": 2.666666666666667e-06,
"loss": 0.3334,
"step": 12
},
{
"epoch": 0.14606741573033707,
"grad_norm": 1.5196235179901123,
"learning_rate": 2.888888888888889e-06,
"loss": 0.3483,
"step": 13
},
{
"epoch": 0.15730337078651685,
"grad_norm": 1.3063578605651855,
"learning_rate": 3.1111111111111116e-06,
"loss": 0.318,
"step": 14
},
{
"epoch": 0.16853932584269662,
"grad_norm": 1.255723237991333,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.3313,
"step": 15
},
{
"epoch": 0.1797752808988764,
"grad_norm": 1.020233392715454,
"learning_rate": 3.555555555555556e-06,
"loss": 0.2873,
"step": 16
},
{
"epoch": 0.19101123595505617,
"grad_norm": 1.083106279373169,
"learning_rate": 3.777777777777778e-06,
"loss": 0.318,
"step": 17
},
{
"epoch": 0.20224719101123595,
"grad_norm": 0.828940749168396,
"learning_rate": 4.000000000000001e-06,
"loss": 0.2989,
"step": 18
},
{
"epoch": 0.21348314606741572,
"grad_norm": 0.7749581336975098,
"learning_rate": 4.222222222222223e-06,
"loss": 0.3038,
"step": 19
},
{
"epoch": 0.2247191011235955,
"grad_norm": 0.7293257117271423,
"learning_rate": 4.444444444444444e-06,
"loss": 0.2886,
"step": 20
},
{
"epoch": 0.23595505617977527,
"grad_norm": 0.8117908835411072,
"learning_rate": 4.666666666666667e-06,
"loss": 0.3169,
"step": 21
},
{
"epoch": 0.24719101123595505,
"grad_norm": 0.7527502179145813,
"learning_rate": 4.888888888888889e-06,
"loss": 0.2727,
"step": 22
},
{
"epoch": 0.25842696629213485,
"grad_norm": 0.8296247124671936,
"learning_rate": 5.1111111111111115e-06,
"loss": 0.2934,
"step": 23
},
{
"epoch": 0.2696629213483146,
"grad_norm": 0.773894727230072,
"learning_rate": 5.333333333333334e-06,
"loss": 0.262,
"step": 24
},
{
"epoch": 0.2808988764044944,
"grad_norm": 0.8243966698646545,
"learning_rate": 5.555555555555557e-06,
"loss": 0.2648,
"step": 25
},
{
"epoch": 0.29213483146067415,
"grad_norm": 0.7368326783180237,
"learning_rate": 5.777777777777778e-06,
"loss": 0.3076,
"step": 26
},
{
"epoch": 0.30337078651685395,
"grad_norm": 0.7896155714988708,
"learning_rate": 6e-06,
"loss": 0.3105,
"step": 27
},
{
"epoch": 0.3146067415730337,
"grad_norm": 0.7478722333908081,
"learning_rate": 6.222222222222223e-06,
"loss": 0.311,
"step": 28
},
{
"epoch": 0.3258426966292135,
"grad_norm": 0.6260741949081421,
"learning_rate": 6.444444444444445e-06,
"loss": 0.2695,
"step": 29
},
{
"epoch": 0.33707865168539325,
"grad_norm": 0.7037319540977478,
"learning_rate": 6.666666666666667e-06,
"loss": 0.2642,
"step": 30
},
{
"epoch": 0.34831460674157305,
"grad_norm": 0.64321368932724,
"learning_rate": 6.88888888888889e-06,
"loss": 0.2843,
"step": 31
},
{
"epoch": 0.3595505617977528,
"grad_norm": 0.699616014957428,
"learning_rate": 7.111111111111112e-06,
"loss": 0.2844,
"step": 32
},
{
"epoch": 0.3707865168539326,
"grad_norm": 0.641768217086792,
"learning_rate": 7.333333333333333e-06,
"loss": 0.2978,
"step": 33
},
{
"epoch": 0.38202247191011235,
"grad_norm": 0.6910679936408997,
"learning_rate": 7.555555555555556e-06,
"loss": 0.2671,
"step": 34
},
{
"epoch": 0.39325842696629215,
"grad_norm": 0.5495407581329346,
"learning_rate": 7.77777777777778e-06,
"loss": 0.2497,
"step": 35
},
{
"epoch": 0.4044943820224719,
"grad_norm": 0.5832553505897522,
"learning_rate": 8.000000000000001e-06,
"loss": 0.2556,
"step": 36
},
{
"epoch": 0.4157303370786517,
"grad_norm": 0.5854374170303345,
"learning_rate": 8.222222222222222e-06,
"loss": 0.2596,
"step": 37
},
{
"epoch": 0.42696629213483145,
"grad_norm": 0.5847079753875732,
"learning_rate": 8.444444444444446e-06,
"loss": 0.2666,
"step": 38
},
{
"epoch": 0.43820224719101125,
"grad_norm": 0.5612176060676575,
"learning_rate": 8.666666666666668e-06,
"loss": 0.231,
"step": 39
},
{
"epoch": 0.449438202247191,
"grad_norm": 0.5835954546928406,
"learning_rate": 8.888888888888888e-06,
"loss": 0.2748,
"step": 40
},
{
"epoch": 0.4606741573033708,
"grad_norm": 0.6874232888221741,
"learning_rate": 9.111111111111112e-06,
"loss": 0.294,
"step": 41
},
{
"epoch": 0.47191011235955055,
"grad_norm": 0.5141873955726624,
"learning_rate": 9.333333333333334e-06,
"loss": 0.2565,
"step": 42
},
{
"epoch": 0.48314606741573035,
"grad_norm": 0.5754961371421814,
"learning_rate": 9.555555555555556e-06,
"loss": 0.248,
"step": 43
},
{
"epoch": 0.4943820224719101,
"grad_norm": 0.6071674823760986,
"learning_rate": 9.777777777777779e-06,
"loss": 0.2864,
"step": 44
},
{
"epoch": 0.5056179775280899,
"grad_norm": 0.5854678153991699,
"learning_rate": 1e-05,
"loss": 0.2873,
"step": 45
},
{
"epoch": 0.5168539325842697,
"grad_norm": 0.5334392189979553,
"learning_rate": 9.99984578822395e-06,
"loss": 0.2508,
"step": 46
},
{
"epoch": 0.5280898876404494,
"grad_norm": 0.6400332450866699,
"learning_rate": 9.999383162408303e-06,
"loss": 0.2412,
"step": 47
},
{
"epoch": 0.5393258426966292,
"grad_norm": 0.6003167033195496,
"learning_rate": 9.998612151090004e-06,
"loss": 0.2551,
"step": 48
},
{
"epoch": 0.550561797752809,
"grad_norm": 0.5445740222930908,
"learning_rate": 9.997532801828659e-06,
"loss": 0.2519,
"step": 49
},
{
"epoch": 0.5617977528089888,
"grad_norm": 0.563345730304718,
"learning_rate": 9.996145181203616e-06,
"loss": 0.2639,
"step": 50
},
{
"epoch": 0.5730337078651685,
"grad_norm": 0.623471200466156,
"learning_rate": 9.994449374809851e-06,
"loss": 0.2351,
"step": 51
},
{
"epoch": 0.5842696629213483,
"grad_norm": 0.5658097863197327,
"learning_rate": 9.992445487252692e-06,
"loss": 0.2528,
"step": 52
},
{
"epoch": 0.5955056179775281,
"grad_norm": 0.5382084250450134,
"learning_rate": 9.990133642141359e-06,
"loss": 0.2433,
"step": 53
},
{
"epoch": 0.6067415730337079,
"grad_norm": 0.5690349340438843,
"learning_rate": 9.987513982081352e-06,
"loss": 0.2658,
"step": 54
},
{
"epoch": 0.6179775280898876,
"grad_norm": 0.5317168235778809,
"learning_rate": 9.984586668665641e-06,
"loss": 0.2267,
"step": 55
},
{
"epoch": 0.6292134831460674,
"grad_norm": 0.5717993974685669,
"learning_rate": 9.981351882464706e-06,
"loss": 0.2767,
"step": 56
},
{
"epoch": 0.6404494382022472,
"grad_norm": 0.5172840356826782,
"learning_rate": 9.9778098230154e-06,
"loss": 0.2374,
"step": 57
},
{
"epoch": 0.651685393258427,
"grad_norm": 0.6190145611763,
"learning_rate": 9.973960708808633e-06,
"loss": 0.286,
"step": 58
},
{
"epoch": 0.6629213483146067,
"grad_norm": 0.589586079120636,
"learning_rate": 9.9698047772759e-06,
"loss": 0.3017,
"step": 59
},
{
"epoch": 0.6741573033707865,
"grad_norm": 0.4945083260536194,
"learning_rate": 9.965342284774633e-06,
"loss": 0.2324,
"step": 60
},
{
"epoch": 0.6853932584269663,
"grad_norm": 0.5352405905723572,
"learning_rate": 9.960573506572391e-06,
"loss": 0.2714,
"step": 61
},
{
"epoch": 0.6966292134831461,
"grad_norm": 0.5440102219581604,
"learning_rate": 9.955498736829876e-06,
"loss": 0.2758,
"step": 62
},
{
"epoch": 0.7078651685393258,
"grad_norm": 0.5334731340408325,
"learning_rate": 9.95011828858279e-06,
"loss": 0.2648,
"step": 63
},
{
"epoch": 0.7191011235955056,
"grad_norm": 0.506775975227356,
"learning_rate": 9.944432493722525e-06,
"loss": 0.2445,
"step": 64
},
{
"epoch": 0.7303370786516854,
"grad_norm": 0.5548930168151855,
"learning_rate": 9.938441702975689e-06,
"loss": 0.2597,
"step": 65
},
{
"epoch": 0.7415730337078652,
"grad_norm": 0.5340548753738403,
"learning_rate": 9.932146285882478e-06,
"loss": 0.2499,
"step": 66
},
{
"epoch": 0.7528089887640449,
"grad_norm": 0.6233317255973816,
"learning_rate": 9.92554663077387e-06,
"loss": 0.2964,
"step": 67
},
{
"epoch": 0.7640449438202247,
"grad_norm": 0.5946013331413269,
"learning_rate": 9.918643144747681e-06,
"loss": 0.2892,
"step": 68
},
{
"epoch": 0.7752808988764045,
"grad_norm": 0.5025975704193115,
"learning_rate": 9.911436253643445e-06,
"loss": 0.2569,
"step": 69
},
{
"epoch": 0.7865168539325843,
"grad_norm": 0.5510412454605103,
"learning_rate": 9.903926402016153e-06,
"loss": 0.2298,
"step": 70
},
{
"epoch": 0.797752808988764,
"grad_norm": 0.5809914469718933,
"learning_rate": 9.89611405310883e-06,
"loss": 0.2564,
"step": 71
},
{
"epoch": 0.8089887640449438,
"grad_norm": 0.670417845249176,
"learning_rate": 9.887999688823955e-06,
"loss": 0.2906,
"step": 72
},
{
"epoch": 0.8202247191011236,
"grad_norm": 0.5335014462471008,
"learning_rate": 9.879583809693737e-06,
"loss": 0.2452,
"step": 73
},
{
"epoch": 0.8314606741573034,
"grad_norm": 0.5259139537811279,
"learning_rate": 9.870866934849248e-06,
"loss": 0.2807,
"step": 74
},
{
"epoch": 0.8426966292134831,
"grad_norm": 0.5847296714782715,
"learning_rate": 9.861849601988384e-06,
"loss": 0.2384,
"step": 75
},
{
"epoch": 0.8539325842696629,
"grad_norm": 0.549369752407074,
"learning_rate": 9.852532367342712e-06,
"loss": 0.245,
"step": 76
},
{
"epoch": 0.8651685393258427,
"grad_norm": 0.5300970673561096,
"learning_rate": 9.842915805643156e-06,
"loss": 0.2581,
"step": 77
},
{
"epoch": 0.8764044943820225,
"grad_norm": 0.5090511441230774,
"learning_rate": 9.833000510084537e-06,
"loss": 0.2508,
"step": 78
},
{
"epoch": 0.8876404494382022,
"grad_norm": 0.5674286484718323,
"learning_rate": 9.822787092288991e-06,
"loss": 0.242,
"step": 79
},
{
"epoch": 0.898876404494382,
"grad_norm": 0.5166727900505066,
"learning_rate": 9.812276182268236e-06,
"loss": 0.2441,
"step": 80
},
{
"epoch": 0.9101123595505618,
"grad_norm": 0.45713502168655396,
"learning_rate": 9.801468428384716e-06,
"loss": 0.2431,
"step": 81
},
{
"epoch": 0.9213483146067416,
"grad_norm": 0.5643267631530762,
"learning_rate": 9.790364497311597e-06,
"loss": 0.2433,
"step": 82
},
{
"epoch": 0.9325842696629213,
"grad_norm": 0.5387106537818909,
"learning_rate": 9.778965073991652e-06,
"loss": 0.2838,
"step": 83
},
{
"epoch": 0.9438202247191011,
"grad_norm": 0.4815730154514313,
"learning_rate": 9.767270861595006e-06,
"loss": 0.2499,
"step": 84
},
{
"epoch": 0.9550561797752809,
"grad_norm": 0.6593579053878784,
"learning_rate": 9.755282581475769e-06,
"loss": 0.2873,
"step": 85
},
{
"epoch": 0.9662921348314607,
"grad_norm": 0.579657793045044,
"learning_rate": 9.743000973127523e-06,
"loss": 0.2539,
"step": 86
},
{
"epoch": 0.9775280898876404,
"grad_norm": 0.5057744979858398,
"learning_rate": 9.730426794137727e-06,
"loss": 0.2628,
"step": 87
},
{
"epoch": 0.9887640449438202,
"grad_norm": 0.595583438873291,
"learning_rate": 9.717560820140968e-06,
"loss": 0.2391,
"step": 88
},
{
"epoch": 1.0,
"grad_norm": 0.5546755194664001,
"learning_rate": 9.704403844771128e-06,
"loss": 0.2439,
"step": 89
},
{
"epoch": 1.0112359550561798,
"grad_norm": 0.6112611889839172,
"learning_rate": 9.690956679612422e-06,
"loss": 0.2126,
"step": 90
},
{
"epoch": 1.0224719101123596,
"grad_norm": 0.5053119659423828,
"learning_rate": 9.677220154149338e-06,
"loss": 0.195,
"step": 91
},
{
"epoch": 1.0337078651685394,
"grad_norm": 0.48375797271728516,
"learning_rate": 9.663195115715472e-06,
"loss": 0.2014,
"step": 92
},
{
"epoch": 1.0449438202247192,
"grad_norm": 0.4847674071788788,
"learning_rate": 9.648882429441258e-06,
"loss": 0.2242,
"step": 93
},
{
"epoch": 1.0561797752808988,
"grad_norm": 0.5149394869804382,
"learning_rate": 9.634282978200605e-06,
"loss": 0.2434,
"step": 94
},
{
"epoch": 1.0674157303370786,
"grad_norm": 0.5693023800849915,
"learning_rate": 9.619397662556434e-06,
"loss": 0.1967,
"step": 95
},
{
"epoch": 1.0786516853932584,
"grad_norm": 0.5208962559700012,
"learning_rate": 9.604227400705134e-06,
"loss": 0.2029,
"step": 96
},
{
"epoch": 1.0898876404494382,
"grad_norm": 0.46080583333969116,
"learning_rate": 9.588773128419907e-06,
"loss": 0.1977,
"step": 97
},
{
"epoch": 1.101123595505618,
"grad_norm": 0.46797895431518555,
"learning_rate": 9.57303579899307e-06,
"loss": 0.1958,
"step": 98
},
{
"epoch": 1.1123595505617978,
"grad_norm": 0.5413700938224792,
"learning_rate": 9.557016383177226e-06,
"loss": 0.1997,
"step": 99
},
{
"epoch": 1.1235955056179776,
"grad_norm": 0.5698537826538086,
"learning_rate": 9.540715869125407e-06,
"loss": 0.2097,
"step": 100
},
{
"epoch": 1.1348314606741572,
"grad_norm": 0.5729172825813293,
"learning_rate": 9.524135262330098e-06,
"loss": 0.191,
"step": 101
},
{
"epoch": 1.146067415730337,
"grad_norm": 0.47994568943977356,
"learning_rate": 9.507275585561228e-06,
"loss": 0.2,
"step": 102
},
{
"epoch": 1.1573033707865168,
"grad_norm": 0.4811231195926666,
"learning_rate": 9.490137878803078e-06,
"loss": 0.2024,
"step": 103
},
{
"epoch": 1.1685393258426966,
"grad_norm": 0.5804319977760315,
"learning_rate": 9.472723199190126e-06,
"loss": 0.2213,
"step": 104
},
{
"epoch": 1.1797752808988764,
"grad_norm": 0.5076270699501038,
"learning_rate": 9.45503262094184e-06,
"loss": 0.1958,
"step": 105
},
{
"epoch": 1.1910112359550562,
"grad_norm": 0.5329607725143433,
"learning_rate": 9.437067235296418e-06,
"loss": 0.1977,
"step": 106
},
{
"epoch": 1.202247191011236,
"grad_norm": 0.4917041063308716,
"learning_rate": 9.418828150443469e-06,
"loss": 0.2078,
"step": 107
},
{
"epoch": 1.2134831460674158,
"grad_norm": 0.4627450406551361,
"learning_rate": 9.40031649145566e-06,
"loss": 0.186,
"step": 108
},
{
"epoch": 1.2247191011235956,
"grad_norm": 0.4185917377471924,
"learning_rate": 9.381533400219319e-06,
"loss": 0.1792,
"step": 109
},
{
"epoch": 1.2359550561797752,
"grad_norm": 0.5121137499809265,
"learning_rate": 9.362480035363987e-06,
"loss": 0.2222,
"step": 110
},
{
"epoch": 1.247191011235955,
"grad_norm": 0.5793914198875427,
"learning_rate": 9.343157572190957e-06,
"loss": 0.2305,
"step": 111
},
{
"epoch": 1.2584269662921348,
"grad_norm": 0.5325943231582642,
"learning_rate": 9.323567202600777e-06,
"loss": 0.2148,
"step": 112
},
{
"epoch": 1.2696629213483146,
"grad_norm": 0.42034098505973816,
"learning_rate": 9.30371013501972e-06,
"loss": 0.178,
"step": 113
},
{
"epoch": 1.2808988764044944,
"grad_norm": 0.43560945987701416,
"learning_rate": 9.28358759432525e-06,
"loss": 0.1882,
"step": 114
},
{
"epoch": 1.2921348314606742,
"grad_norm": 0.4617180824279785,
"learning_rate": 9.263200821770462e-06,
"loss": 0.1825,
"step": 115
},
{
"epoch": 1.303370786516854,
"grad_norm": 0.5678491592407227,
"learning_rate": 9.242551074907519e-06,
"loss": 0.2219,
"step": 116
},
{
"epoch": 1.3146067415730336,
"grad_norm": 0.48582446575164795,
"learning_rate": 9.221639627510076e-06,
"loss": 0.1835,
"step": 117
},
{
"epoch": 1.3258426966292136,
"grad_norm": 0.44229453802108765,
"learning_rate": 9.20046776949471e-06,
"loss": 0.1699,
"step": 118
},
{
"epoch": 1.3370786516853932,
"grad_norm": 0.4907465875148773,
"learning_rate": 9.179036806841352e-06,
"loss": 0.2226,
"step": 119
},
{
"epoch": 1.348314606741573,
"grad_norm": 0.4677446782588959,
"learning_rate": 9.157348061512728e-06,
"loss": 0.1924,
"step": 120
},
{
"epoch": 1.3595505617977528,
"grad_norm": 0.491872102022171,
"learning_rate": 9.13540287137281e-06,
"loss": 0.2151,
"step": 121
},
{
"epoch": 1.3707865168539326,
"grad_norm": 0.4544215798377991,
"learning_rate": 9.1132025901043e-06,
"loss": 0.1898,
"step": 122
},
{
"epoch": 1.3820224719101124,
"grad_norm": 0.4714052677154541,
"learning_rate": 9.090748587125118e-06,
"loss": 0.2279,
"step": 123
},
{
"epoch": 1.3932584269662922,
"grad_norm": 0.4766599237918854,
"learning_rate": 9.068042247503937e-06,
"loss": 0.2151,
"step": 124
},
{
"epoch": 1.404494382022472,
"grad_norm": 0.47084319591522217,
"learning_rate": 9.045084971874738e-06,
"loss": 0.1948,
"step": 125
},
{
"epoch": 1.4157303370786516,
"grad_norm": 0.4725306034088135,
"learning_rate": 9.021878176350422e-06,
"loss": 0.2025,
"step": 126
},
{
"epoch": 1.4269662921348314,
"grad_norm": 0.4789423942565918,
"learning_rate": 8.998423292435455e-06,
"loss": 0.1997,
"step": 127
},
{
"epoch": 1.4382022471910112,
"grad_norm": 0.4451747238636017,
"learning_rate": 8.97472176693755e-06,
"loss": 0.1687,
"step": 128
},
{
"epoch": 1.449438202247191,
"grad_norm": 0.5205036401748657,
"learning_rate": 8.950775061878453e-06,
"loss": 0.2318,
"step": 129
},
{
"epoch": 1.4606741573033708,
"grad_norm": 0.4217935800552368,
"learning_rate": 8.926584654403725e-06,
"loss": 0.1644,
"step": 130
},
{
"epoch": 1.4719101123595506,
"grad_norm": 0.5485341548919678,
"learning_rate": 8.902152036691649e-06,
"loss": 0.209,
"step": 131
},
{
"epoch": 1.4831460674157304,
"grad_norm": 0.48224788904190063,
"learning_rate": 8.877478715861173e-06,
"loss": 0.2065,
"step": 132
},
{
"epoch": 1.49438202247191,
"grad_norm": 0.5133256316184998,
"learning_rate": 8.852566213878947e-06,
"loss": 0.1999,
"step": 133
},
{
"epoch": 1.50561797752809,
"grad_norm": 0.5534752011299133,
"learning_rate": 8.827416067465442e-06,
"loss": 0.2067,
"step": 134
},
{
"epoch": 1.5168539325842696,
"grad_norm": 0.49837929010391235,
"learning_rate": 8.802029828000157e-06,
"loss": 0.1921,
"step": 135
},
{
"epoch": 1.5280898876404494,
"grad_norm": 0.45644959807395935,
"learning_rate": 8.776409061425919e-06,
"loss": 0.1966,
"step": 136
},
{
"epoch": 1.5393258426966292,
"grad_norm": 0.5083698630332947,
"learning_rate": 8.750555348152299e-06,
"loss": 0.2129,
"step": 137
},
{
"epoch": 1.550561797752809,
"grad_norm": 0.45993131399154663,
"learning_rate": 8.72447028295811e-06,
"loss": 0.2094,
"step": 138
},
{
"epoch": 1.5617977528089888,
"grad_norm": 0.5535938143730164,
"learning_rate": 8.69815547489305e-06,
"loss": 0.2375,
"step": 139
},
{
"epoch": 1.5730337078651684,
"grad_norm": 0.4734334349632263,
"learning_rate": 8.671612547178428e-06,
"loss": 0.1985,
"step": 140
},
{
"epoch": 1.5842696629213484,
"grad_norm": 0.47312143445014954,
"learning_rate": 8.644843137107058e-06,
"loss": 0.1891,
"step": 141
},
{
"epoch": 1.595505617977528,
"grad_norm": 0.44169965386390686,
"learning_rate": 8.617848895942246e-06,
"loss": 0.189,
"step": 142
},
{
"epoch": 1.606741573033708,
"grad_norm": 0.48934826254844666,
"learning_rate": 8.590631488815945e-06,
"loss": 0.2177,
"step": 143
},
{
"epoch": 1.6179775280898876,
"grad_norm": 0.5859401822090149,
"learning_rate": 8.563192594626027e-06,
"loss": 0.2244,
"step": 144
},
{
"epoch": 1.6292134831460674,
"grad_norm": 0.5161330103874207,
"learning_rate": 8.535533905932739e-06,
"loss": 0.2016,
"step": 145
},
{
"epoch": 1.6404494382022472,
"grad_norm": 0.5324240922927856,
"learning_rate": 8.50765712885428e-06,
"loss": 0.2082,
"step": 146
},
{
"epoch": 1.651685393258427,
"grad_norm": 0.5002866983413696,
"learning_rate": 8.479563982961572e-06,
"loss": 0.2072,
"step": 147
},
{
"epoch": 1.6629213483146068,
"grad_norm": 0.5366979241371155,
"learning_rate": 8.451256201172186e-06,
"loss": 0.2255,
"step": 148
},
{
"epoch": 1.6741573033707864,
"grad_norm": 0.5217868685722351,
"learning_rate": 8.422735529643445e-06,
"loss": 0.1992,
"step": 149
},
{
"epoch": 1.6853932584269664,
"grad_norm": 0.4968906342983246,
"learning_rate": 8.39400372766471e-06,
"loss": 0.1907,
"step": 150
},
{
"epoch": 1.696629213483146,
"grad_norm": 0.46640124917030334,
"learning_rate": 8.365062567548868e-06,
"loss": 0.2149,
"step": 151
},
{
"epoch": 1.7078651685393258,
"grad_norm": 0.4526020288467407,
"learning_rate": 8.335913834522999e-06,
"loss": 0.1977,
"step": 152
},
{
"epoch": 1.7191011235955056,
"grad_norm": 0.5012973546981812,
"learning_rate": 8.30655932661826e-06,
"loss": 0.2005,
"step": 153
},
{
"epoch": 1.7303370786516854,
"grad_norm": 0.49397850036621094,
"learning_rate": 8.27700085455897e-06,
"loss": 0.2307,
"step": 154
},
{
"epoch": 1.7415730337078652,
"grad_norm": 0.4788162410259247,
"learning_rate": 8.247240241650918e-06,
"loss": 0.189,
"step": 155
},
{
"epoch": 1.7528089887640448,
"grad_norm": 0.5018126368522644,
"learning_rate": 8.217279323668895e-06,
"loss": 0.2237,
"step": 156
},
{
"epoch": 1.7640449438202248,
"grad_norm": 0.5071457028388977,
"learning_rate": 8.18711994874345e-06,
"loss": 0.2058,
"step": 157
},
{
"epoch": 1.7752808988764044,
"grad_norm": 0.4464392066001892,
"learning_rate": 8.15676397724689e-06,
"loss": 0.2093,
"step": 158
},
{
"epoch": 1.7865168539325844,
"grad_norm": 0.6191111207008362,
"learning_rate": 8.126213281678527e-06,
"loss": 0.2286,
"step": 159
},
{
"epoch": 1.797752808988764,
"grad_norm": 0.4710521399974823,
"learning_rate": 8.095469746549172e-06,
"loss": 0.2043,
"step": 160
},
{
"epoch": 1.8089887640449438,
"grad_norm": 0.47517672181129456,
"learning_rate": 8.064535268264883e-06,
"loss": 0.2182,
"step": 161
},
{
"epoch": 1.8202247191011236,
"grad_norm": 0.48008614778518677,
"learning_rate": 8.033411755009999e-06,
"loss": 0.2221,
"step": 162
},
{
"epoch": 1.8314606741573034,
"grad_norm": 0.4443986415863037,
"learning_rate": 8.002101126629422e-06,
"loss": 0.1951,
"step": 163
},
{
"epoch": 1.8426966292134832,
"grad_norm": 0.4856914281845093,
"learning_rate": 7.970605314510194e-06,
"loss": 0.1888,
"step": 164
},
{
"epoch": 1.8539325842696628,
"grad_norm": 0.4712364077568054,
"learning_rate": 7.938926261462366e-06,
"loss": 0.1907,
"step": 165
},
{
"epoch": 1.8651685393258428,
"grad_norm": 0.486104279756546,
"learning_rate": 7.907065921599153e-06,
"loss": 0.2124,
"step": 166
},
{
"epoch": 1.8764044943820224,
"grad_norm": 0.5030559301376343,
"learning_rate": 7.875026260216395e-06,
"loss": 0.2129,
"step": 167
},
{
"epoch": 1.8876404494382022,
"grad_norm": 0.5313302278518677,
"learning_rate": 7.842809253671321e-06,
"loss": 0.2208,
"step": 168
},
{
"epoch": 1.898876404494382,
"grad_norm": 0.4822382926940918,
"learning_rate": 7.810416889260653e-06,
"loss": 0.2106,
"step": 169
},
{
"epoch": 1.9101123595505618,
"grad_norm": 0.5109752416610718,
"learning_rate": 7.777851165098012e-06,
"loss": 0.2203,
"step": 170
},
{
"epoch": 1.9213483146067416,
"grad_norm": 0.49171018600463867,
"learning_rate": 7.74511408999066e-06,
"loss": 0.2057,
"step": 171
},
{
"epoch": 1.9325842696629212,
"grad_norm": 0.4889599680900574,
"learning_rate": 7.712207683315595e-06,
"loss": 0.1964,
"step": 172
},
{
"epoch": 1.9438202247191012,
"grad_norm": 0.48753100633621216,
"learning_rate": 7.679133974894984e-06,
"loss": 0.2192,
"step": 173
},
{
"epoch": 1.9550561797752808,
"grad_norm": 0.47565633058547974,
"learning_rate": 7.645895004870953e-06,
"loss": 0.2104,
"step": 174
},
{
"epoch": 1.9662921348314608,
"grad_norm": 0.49253159761428833,
"learning_rate": 7.612492823579744e-06,
"loss": 0.1932,
"step": 175
},
{
"epoch": 1.9775280898876404,
"grad_norm": 0.46585404872894287,
"learning_rate": 7.5789294914252376e-06,
"loss": 0.2148,
"step": 176
},
{
"epoch": 1.9887640449438202,
"grad_norm": 0.4452122449874878,
"learning_rate": 7.545207078751858e-06,
"loss": 0.1978,
"step": 177
},
{
"epoch": 2.0,
"grad_norm": 0.503316342830658,
"learning_rate": 7.511327665716863e-06,
"loss": 0.1975,
"step": 178
},
{
"epoch": 2.0112359550561796,
"grad_norm": 0.5127564668655396,
"learning_rate": 7.477293342162038e-06,
"loss": 0.1344,
"step": 179
},
{
"epoch": 2.0224719101123596,
"grad_norm": 0.46806156635284424,
"learning_rate": 7.443106207484776e-06,
"loss": 0.1438,
"step": 180
},
{
"epoch": 2.033707865168539,
"grad_norm": 0.4583097994327545,
"learning_rate": 7.408768370508577e-06,
"loss": 0.1383,
"step": 181
},
{
"epoch": 2.044943820224719,
"grad_norm": 0.41952410340309143,
"learning_rate": 7.3742819493529725e-06,
"loss": 0.1247,
"step": 182
},
{
"epoch": 2.056179775280899,
"grad_norm": 0.6133317947387695,
"learning_rate": 7.3396490713028674e-06,
"loss": 0.1415,
"step": 183
},
{
"epoch": 2.067415730337079,
"grad_norm": 0.5251442193984985,
"learning_rate": 7.304871872677313e-06,
"loss": 0.1214,
"step": 184
},
{
"epoch": 2.0786516853932584,
"grad_norm": 0.4410766065120697,
"learning_rate": 7.269952498697734e-06,
"loss": 0.1281,
"step": 185
},
{
"epoch": 2.0898876404494384,
"grad_norm": 0.4335516393184662,
"learning_rate": 7.2348931033556065e-06,
"loss": 0.1389,
"step": 186
},
{
"epoch": 2.101123595505618,
"grad_norm": 0.6064698100090027,
"learning_rate": 7.199695849279576e-06,
"loss": 0.1848,
"step": 187
},
{
"epoch": 2.1123595505617976,
"grad_norm": 0.45688655972480774,
"learning_rate": 7.164362907602072e-06,
"loss": 0.1286,
"step": 188
},
{
"epoch": 2.1235955056179776,
"grad_norm": 0.4466540515422821,
"learning_rate": 7.128896457825364e-06,
"loss": 0.1317,
"step": 189
},
{
"epoch": 2.134831460674157,
"grad_norm": 0.4052949547767639,
"learning_rate": 7.093298687687141e-06,
"loss": 0.1149,
"step": 190
},
{
"epoch": 2.146067415730337,
"grad_norm": 0.48267531394958496,
"learning_rate": 7.057571793025545e-06,
"loss": 0.1427,
"step": 191
},
{
"epoch": 2.157303370786517,
"grad_norm": 0.4643622934818268,
"learning_rate": 7.021717977643726e-06,
"loss": 0.1324,
"step": 192
},
{
"epoch": 2.168539325842697,
"grad_norm": 0.5360526442527771,
"learning_rate": 6.985739453173903e-06,
"loss": 0.1501,
"step": 193
},
{
"epoch": 2.1797752808988764,
"grad_norm": 0.4406614899635315,
"learning_rate": 6.949638438940942e-06,
"loss": 0.1331,
"step": 194
},
{
"epoch": 2.191011235955056,
"grad_norm": 0.46032071113586426,
"learning_rate": 6.913417161825449e-06,
"loss": 0.1367,
"step": 195
},
{
"epoch": 2.202247191011236,
"grad_norm": 0.4472227990627289,
"learning_rate": 6.877077856126416e-06,
"loss": 0.1562,
"step": 196
},
{
"epoch": 2.2134831460674156,
"grad_norm": 0.45712995529174805,
"learning_rate": 6.840622763423391e-06,
"loss": 0.1276,
"step": 197
},
{
"epoch": 2.2247191011235956,
"grad_norm": 0.43871307373046875,
"learning_rate": 6.804054132438209e-06,
"loss": 0.1413,
"step": 198
},
{
"epoch": 2.235955056179775,
"grad_norm": 0.43786612153053284,
"learning_rate": 6.767374218896286e-06,
"loss": 0.1405,
"step": 199
},
{
"epoch": 2.247191011235955,
"grad_norm": 0.4727621376514435,
"learning_rate": 6.730585285387465e-06,
"loss": 0.1415,
"step": 200
},
{
"epoch": 2.258426966292135,
"grad_norm": 0.4680100679397583,
"learning_rate": 6.693689601226458e-06,
"loss": 0.1238,
"step": 201
},
{
"epoch": 2.2696629213483144,
"grad_norm": 0.4155004918575287,
"learning_rate": 6.656689442312855e-06,
"loss": 0.133,
"step": 202
},
{
"epoch": 2.2808988764044944,
"grad_norm": 0.45196136832237244,
"learning_rate": 6.619587090990748e-06,
"loss": 0.1355,
"step": 203
},
{
"epoch": 2.292134831460674,
"grad_norm": 0.3976186513900757,
"learning_rate": 6.582384835907931e-06,
"loss": 0.1155,
"step": 204
},
{
"epoch": 2.303370786516854,
"grad_norm": 0.39685821533203125,
"learning_rate": 6.545084971874738e-06,
"loss": 0.1413,
"step": 205
},
{
"epoch": 2.3146067415730336,
"grad_norm": 0.44746285676956177,
"learning_rate": 6.507689799722479e-06,
"loss": 0.1323,
"step": 206
},
{
"epoch": 2.3258426966292136,
"grad_norm": 0.45192277431488037,
"learning_rate": 6.47020162616152e-06,
"loss": 0.1244,
"step": 207
},
{
"epoch": 2.337078651685393,
"grad_norm": 0.367249071598053,
"learning_rate": 6.432622763638993e-06,
"loss": 0.1111,
"step": 208
},
{
"epoch": 2.348314606741573,
"grad_norm": 0.42031487822532654,
"learning_rate": 6.3949555301961474e-06,
"loss": 0.13,
"step": 209
},
{
"epoch": 2.359550561797753,
"grad_norm": 0.4596504271030426,
"learning_rate": 6.3572022493253715e-06,
"loss": 0.1328,
"step": 210
},
{
"epoch": 2.370786516853933,
"grad_norm": 0.45772069692611694,
"learning_rate": 6.3193652498268656e-06,
"loss": 0.136,
"step": 211
},
{
"epoch": 2.3820224719101124,
"grad_norm": 0.40773630142211914,
"learning_rate": 6.281446865664984e-06,
"loss": 0.1124,
"step": 212
},
{
"epoch": 2.393258426966292,
"grad_norm": 0.5169959664344788,
"learning_rate": 6.243449435824276e-06,
"loss": 0.1576,
"step": 213
},
{
"epoch": 2.404494382022472,
"grad_norm": 0.41499051451683044,
"learning_rate": 6.205375304165194e-06,
"loss": 0.1261,
"step": 214
},
{
"epoch": 2.4157303370786516,
"grad_norm": 0.41324612498283386,
"learning_rate": 6.1672268192795285e-06,
"loss": 0.1175,
"step": 215
},
{
"epoch": 2.4269662921348316,
"grad_norm": 0.467218816280365,
"learning_rate": 6.1290063343455196e-06,
"loss": 0.14,
"step": 216
},
{
"epoch": 2.438202247191011,
"grad_norm": 0.41227272152900696,
"learning_rate": 6.090716206982714e-06,
"loss": 0.1274,
"step": 217
},
{
"epoch": 2.449438202247191,
"grad_norm": 0.40290334820747375,
"learning_rate": 6.052358799106528e-06,
"loss": 0.1429,
"step": 218
},
{
"epoch": 2.460674157303371,
"grad_norm": 0.41341057419776917,
"learning_rate": 6.013936476782563e-06,
"loss": 0.1166,
"step": 219
},
{
"epoch": 2.4719101123595504,
"grad_norm": 0.4011130630970001,
"learning_rate": 5.975451610080643e-06,
"loss": 0.1278,
"step": 220
},
{
"epoch": 2.4831460674157304,
"grad_norm": 0.47281768918037415,
"learning_rate": 5.936906572928625e-06,
"loss": 0.1359,
"step": 221
},
{
"epoch": 2.49438202247191,
"grad_norm": 0.4146575629711151,
"learning_rate": 5.898303742965964e-06,
"loss": 0.1427,
"step": 222
},
{
"epoch": 2.50561797752809,
"grad_norm": 0.4124404191970825,
"learning_rate": 5.859645501397048e-06,
"loss": 0.1274,
"step": 223
},
{
"epoch": 2.5168539325842696,
"grad_norm": 0.3924229145050049,
"learning_rate": 5.820934232844315e-06,
"loss": 0.1296,
"step": 224
},
{
"epoch": 2.5280898876404496,
"grad_norm": 0.40923216938972473,
"learning_rate": 5.782172325201155e-06,
"loss": 0.1206,
"step": 225
},
{
"epoch": 2.539325842696629,
"grad_norm": 0.4435917139053345,
"learning_rate": 5.743362169484617e-06,
"loss": 0.1227,
"step": 226
},
{
"epoch": 2.550561797752809,
"grad_norm": 0.4076763689517975,
"learning_rate": 5.704506159687914e-06,
"loss": 0.1237,
"step": 227
},
{
"epoch": 2.561797752808989,
"grad_norm": 0.4328089654445648,
"learning_rate": 5.665606692632762e-06,
"loss": 0.1429,
"step": 228
},
{
"epoch": 2.5730337078651684,
"grad_norm": 0.4347201883792877,
"learning_rate": 5.626666167821522e-06,
"loss": 0.1389,
"step": 229
},
{
"epoch": 2.5842696629213484,
"grad_norm": 0.4432125389575958,
"learning_rate": 5.587686987289189e-06,
"loss": 0.1348,
"step": 230
},
{
"epoch": 2.595505617977528,
"grad_norm": 0.4269976317882538,
"learning_rate": 5.548671555455226e-06,
"loss": 0.1288,
"step": 231
},
{
"epoch": 2.606741573033708,
"grad_norm": 0.4509507119655609,
"learning_rate": 5.50962227897525e-06,
"loss": 0.1439,
"step": 232
},
{
"epoch": 2.6179775280898876,
"grad_norm": 0.48018571734428406,
"learning_rate": 5.470541566592573e-06,
"loss": 0.1472,
"step": 233
},
{
"epoch": 2.629213483146067,
"grad_norm": 0.42168352007865906,
"learning_rate": 5.431431828989618e-06,
"loss": 0.1365,
"step": 234
},
{
"epoch": 2.640449438202247,
"grad_norm": 0.4211483895778656,
"learning_rate": 5.392295478639226e-06,
"loss": 0.1362,
"step": 235
},
{
"epoch": 2.6516853932584272,
"grad_norm": 0.39289364218711853,
"learning_rate": 5.353134929655834e-06,
"loss": 0.1283,
"step": 236
},
{
"epoch": 2.662921348314607,
"grad_norm": 0.4453902840614319,
"learning_rate": 5.3139525976465675e-06,
"loss": 0.1359,
"step": 237
},
{
"epoch": 2.6741573033707864,
"grad_norm": 0.4308948516845703,
"learning_rate": 5.27475089956223e-06,
"loss": 0.1349,
"step": 238
},
{
"epoch": 2.6853932584269664,
"grad_norm": 0.41243335604667664,
"learning_rate": 5.235532253548213e-06,
"loss": 0.133,
"step": 239
},
{
"epoch": 2.696629213483146,
"grad_norm": 0.4549100995063782,
"learning_rate": 5.1962990787953436e-06,
"loss": 0.1345,
"step": 240
},
{
"epoch": 2.7078651685393256,
"grad_norm": 0.40238845348358154,
"learning_rate": 5.157053795390642e-06,
"loss": 0.1124,
"step": 241
},
{
"epoch": 2.7191011235955056,
"grad_norm": 0.4336741864681244,
"learning_rate": 5.117798824168052e-06,
"loss": 0.1291,
"step": 242
},
{
"epoch": 2.7303370786516856,
"grad_norm": 0.4185507595539093,
"learning_rate": 5.078536586559104e-06,
"loss": 0.1446,
"step": 243
},
{
"epoch": 2.741573033707865,
"grad_norm": 0.4253235161304474,
"learning_rate": 5.039269504443557e-06,
"loss": 0.1446,
"step": 244
},
{
"epoch": 2.752808988764045,
"grad_norm": 0.4910286068916321,
"learning_rate": 5e-06,
"loss": 0.1451,
"step": 245
},
{
"epoch": 2.764044943820225,
"grad_norm": 0.4122835397720337,
"learning_rate": 4.9607304955564456e-06,
"loss": 0.1125,
"step": 246
},
{
"epoch": 2.7752808988764044,
"grad_norm": 0.41325777769088745,
"learning_rate": 4.921463413440898e-06,
"loss": 0.1324,
"step": 247
},
{
"epoch": 2.7865168539325844,
"grad_norm": 0.43836483359336853,
"learning_rate": 4.88220117583195e-06,
"loss": 0.1474,
"step": 248
},
{
"epoch": 2.797752808988764,
"grad_norm": 0.3876822590827942,
"learning_rate": 4.842946204609359e-06,
"loss": 0.1362,
"step": 249
},
{
"epoch": 2.808988764044944,
"grad_norm": 0.44368717074394226,
"learning_rate": 4.803700921204659e-06,
"loss": 0.1511,
"step": 250
},
{
"epoch": 2.8202247191011236,
"grad_norm": 0.43904393911361694,
"learning_rate": 4.7644677464517874e-06,
"loss": 0.1285,
"step": 251
},
{
"epoch": 2.831460674157303,
"grad_norm": 0.43354493379592896,
"learning_rate": 4.725249100437773e-06,
"loss": 0.1237,
"step": 252
},
{
"epoch": 2.842696629213483,
"grad_norm": 0.4428333044052124,
"learning_rate": 4.686047402353433e-06,
"loss": 0.1338,
"step": 253
},
{
"epoch": 2.853932584269663,
"grad_norm": 0.4285905063152313,
"learning_rate": 4.646865070344168e-06,
"loss": 0.1382,
"step": 254
},
{
"epoch": 2.865168539325843,
"grad_norm": 0.44894492626190186,
"learning_rate": 4.6077045213607765e-06,
"loss": 0.14,
"step": 255
},
{
"epoch": 2.8764044943820224,
"grad_norm": 0.4365105628967285,
"learning_rate": 4.568568171010384e-06,
"loss": 0.1254,
"step": 256
},
{
"epoch": 2.8876404494382024,
"grad_norm": 0.4431806802749634,
"learning_rate": 4.529458433407429e-06,
"loss": 0.1389,
"step": 257
},
{
"epoch": 2.898876404494382,
"grad_norm": 0.4079169034957886,
"learning_rate": 4.490377721024751e-06,
"loss": 0.1129,
"step": 258
},
{
"epoch": 2.9101123595505616,
"grad_norm": 0.38153865933418274,
"learning_rate": 4.451328444544774e-06,
"loss": 0.1341,
"step": 259
},
{
"epoch": 2.9213483146067416,
"grad_norm": 0.4263458847999573,
"learning_rate": 4.4123130127108125e-06,
"loss": 0.146,
"step": 260
},
{
"epoch": 2.932584269662921,
"grad_norm": 0.41687625646591187,
"learning_rate": 4.373333832178478e-06,
"loss": 0.1394,
"step": 261
},
{
"epoch": 2.943820224719101,
"grad_norm": 0.40404847264289856,
"learning_rate": 4.3343933073672395e-06,
"loss": 0.1179,
"step": 262
},
{
"epoch": 2.955056179775281,
"grad_norm": 0.3679511547088623,
"learning_rate": 4.295493840312087e-06,
"loss": 0.1073,
"step": 263
},
{
"epoch": 2.966292134831461,
"grad_norm": 0.46775326132774353,
"learning_rate": 4.256637830515385e-06,
"loss": 0.1409,
"step": 264
},
{
"epoch": 2.9775280898876404,
"grad_norm": 0.4219604730606079,
"learning_rate": 4.217827674798845e-06,
"loss": 0.1252,
"step": 265
},
{
"epoch": 2.98876404494382,
"grad_norm": 0.4047105610370636,
"learning_rate": 4.179065767155686e-06,
"loss": 0.1349,
"step": 266
},
{
"epoch": 3.0,
"grad_norm": 0.4388047456741333,
"learning_rate": 4.140354498602952e-06,
"loss": 0.1283,
"step": 267
},
{
"epoch": 3.0112359550561796,
"grad_norm": 0.5455737709999084,
"learning_rate": 4.1016962570340375e-06,
"loss": 0.0822,
"step": 268
},
{
"epoch": 3.0224719101123596,
"grad_norm": 0.5572327375411987,
"learning_rate": 4.063093427071376e-06,
"loss": 0.0927,
"step": 269
},
{
"epoch": 3.033707865168539,
"grad_norm": 0.46223390102386475,
"learning_rate": 4.02454838991936e-06,
"loss": 0.0884,
"step": 270
},
{
"epoch": 3.044943820224719,
"grad_norm": 0.4062277376651764,
"learning_rate": 3.986063523217439e-06,
"loss": 0.0819,
"step": 271
},
{
"epoch": 3.056179775280899,
"grad_norm": 0.4413997232913971,
"learning_rate": 3.947641200893473e-06,
"loss": 0.0712,
"step": 272
},
{
"epoch": 3.067415730337079,
"grad_norm": 0.7831788659095764,
"learning_rate": 3.909283793017289e-06,
"loss": 0.0899,
"step": 273
},
{
"epoch": 3.0786516853932584,
"grad_norm": 0.6694844365119934,
"learning_rate": 3.870993665654482e-06,
"loss": 0.0899,
"step": 274
},
{
"epoch": 3.0898876404494384,
"grad_norm": 0.44210654497146606,
"learning_rate": 3.832773180720475e-06,
"loss": 0.0809,
"step": 275
},
{
"epoch": 3.101123595505618,
"grad_norm": 0.37950965762138367,
"learning_rate": 3.7946246958348077e-06,
"loss": 0.0742,
"step": 276
},
{
"epoch": 3.1123595505617976,
"grad_norm": 0.3797522783279419,
"learning_rate": 3.756550564175727e-06,
"loss": 0.0689,
"step": 277
},
{
"epoch": 3.1235955056179776,
"grad_norm": 0.39495426416397095,
"learning_rate": 3.7185531343350167e-06,
"loss": 0.0724,
"step": 278
},
{
"epoch": 3.134831460674157,
"grad_norm": 0.466620534658432,
"learning_rate": 3.680634750173137e-06,
"loss": 0.0867,
"step": 279
},
{
"epoch": 3.146067415730337,
"grad_norm": 0.445311576128006,
"learning_rate": 3.6427977506746293e-06,
"loss": 0.0705,
"step": 280
},
{
"epoch": 3.157303370786517,
"grad_norm": 0.41297537088394165,
"learning_rate": 3.6050444698038547e-06,
"loss": 0.0852,
"step": 281
},
{
"epoch": 3.168539325842697,
"grad_norm": 0.3536671996116638,
"learning_rate": 3.5673772363610083e-06,
"loss": 0.0639,
"step": 282
},
{
"epoch": 3.1797752808988764,
"grad_norm": 0.39393407106399536,
"learning_rate": 3.5297983738384813e-06,
"loss": 0.0753,
"step": 283
},
{
"epoch": 3.191011235955056,
"grad_norm": 0.4105747938156128,
"learning_rate": 3.492310200277522e-06,
"loss": 0.0704,
"step": 284
},
{
"epoch": 3.202247191011236,
"grad_norm": 0.4793269634246826,
"learning_rate": 3.4549150281252635e-06,
"loss": 0.0757,
"step": 285
},
{
"epoch": 3.2134831460674156,
"grad_norm": 0.4588974118232727,
"learning_rate": 3.4176151640920696e-06,
"loss": 0.0869,
"step": 286
},
{
"epoch": 3.2247191011235956,
"grad_norm": 0.48513558506965637,
"learning_rate": 3.3804129090092542e-06,
"loss": 0.078,
"step": 287
},
{
"epoch": 3.235955056179775,
"grad_norm": 0.38135215640068054,
"learning_rate": 3.3433105576871448e-06,
"loss": 0.0754,
"step": 288
},
{
"epoch": 3.247191011235955,
"grad_norm": 0.40388548374176025,
"learning_rate": 3.3063103987735433e-06,
"loss": 0.0713,
"step": 289
},
{
"epoch": 3.258426966292135,
"grad_norm": 0.3911704123020172,
"learning_rate": 3.269414714612534e-06,
"loss": 0.0761,
"step": 290
},
{
"epoch": 3.2696629213483144,
"grad_norm": 0.4082418978214264,
"learning_rate": 3.2326257811037154e-06,
"loss": 0.0724,
"step": 291
},
{
"epoch": 3.2808988764044944,
"grad_norm": 0.4108784794807434,
"learning_rate": 3.195945867561791e-06,
"loss": 0.0759,
"step": 292
},
{
"epoch": 3.292134831460674,
"grad_norm": 0.3417165279388428,
"learning_rate": 3.1593772365766107e-06,
"loss": 0.0661,
"step": 293
},
{
"epoch": 3.303370786516854,
"grad_norm": 0.3870854675769806,
"learning_rate": 3.122922143873584e-06,
"loss": 0.0797,
"step": 294
},
{
"epoch": 3.3146067415730336,
"grad_norm": 0.4093579947948456,
"learning_rate": 3.0865828381745515e-06,
"loss": 0.0774,
"step": 295
},
{
"epoch": 3.3258426966292136,
"grad_norm": 0.3988387882709503,
"learning_rate": 3.0503615610590605e-06,
"loss": 0.084,
"step": 296
},
{
"epoch": 3.337078651685393,
"grad_norm": 0.3742392957210541,
"learning_rate": 3.0142605468260976e-06,
"loss": 0.0775,
"step": 297
},
{
"epoch": 3.348314606741573,
"grad_norm": 0.39700236916542053,
"learning_rate": 2.9782820223562758e-06,
"loss": 0.0721,
"step": 298
},
{
"epoch": 3.359550561797753,
"grad_norm": 0.4066769778728485,
"learning_rate": 2.9424282069744564e-06,
"loss": 0.0809,
"step": 299
},
{
"epoch": 3.370786516853933,
"grad_norm": 0.37812596559524536,
"learning_rate": 2.906701312312861e-06,
"loss": 0.0763,
"step": 300
},
{
"epoch": 3.3820224719101124,
"grad_norm": 0.4282001852989197,
"learning_rate": 2.871103542174637e-06,
"loss": 0.0769,
"step": 301
},
{
"epoch": 3.393258426966292,
"grad_norm": 0.4477894604206085,
"learning_rate": 2.8356370923979326e-06,
"loss": 0.0859,
"step": 302
},
{
"epoch": 3.404494382022472,
"grad_norm": 0.36554381251335144,
"learning_rate": 2.800304150720424e-06,
"loss": 0.075,
"step": 303
},
{
"epoch": 3.4157303370786516,
"grad_norm": 0.398842990398407,
"learning_rate": 2.765106896644395e-06,
"loss": 0.0832,
"step": 304
},
{
"epoch": 3.4269662921348316,
"grad_norm": 0.36652693152427673,
"learning_rate": 2.7300475013022666e-06,
"loss": 0.0661,
"step": 305
},
{
"epoch": 3.438202247191011,
"grad_norm": 0.3799467384815216,
"learning_rate": 2.6951281273226894e-06,
"loss": 0.0756,
"step": 306
},
{
"epoch": 3.449438202247191,
"grad_norm": 0.39734625816345215,
"learning_rate": 2.6603509286971342e-06,
"loss": 0.0795,
"step": 307
},
{
"epoch": 3.460674157303371,
"grad_norm": 0.36823272705078125,
"learning_rate": 2.6257180506470283e-06,
"loss": 0.0771,
"step": 308
},
{
"epoch": 3.4719101123595504,
"grad_norm": 0.4019593298435211,
"learning_rate": 2.5912316294914232e-06,
"loss": 0.0847,
"step": 309
},
{
"epoch": 3.4831460674157304,
"grad_norm": 0.3722269833087921,
"learning_rate": 2.5568937925152272e-06,
"loss": 0.0786,
"step": 310
},
{
"epoch": 3.49438202247191,
"grad_norm": 0.4078711271286011,
"learning_rate": 2.5227066578379624e-06,
"loss": 0.0872,
"step": 311
},
{
"epoch": 3.50561797752809,
"grad_norm": 0.3710861802101135,
"learning_rate": 2.4886723342831375e-06,
"loss": 0.0863,
"step": 312
},
{
"epoch": 3.5168539325842696,
"grad_norm": 0.36387649178504944,
"learning_rate": 2.4547929212481436e-06,
"loss": 0.0741,
"step": 313
},
{
"epoch": 3.5280898876404496,
"grad_norm": 0.33584874868392944,
"learning_rate": 2.4210705085747633e-06,
"loss": 0.0808,
"step": 314
},
{
"epoch": 3.539325842696629,
"grad_norm": 0.3632005453109741,
"learning_rate": 2.387507176420256e-06,
"loss": 0.0639,
"step": 315
},
{
"epoch": 3.550561797752809,
"grad_norm": 0.3687782883644104,
"learning_rate": 2.354104995129048e-06,
"loss": 0.0746,
"step": 316
},
{
"epoch": 3.561797752808989,
"grad_norm": 0.3989294469356537,
"learning_rate": 2.320866025105016e-06,
"loss": 0.0653,
"step": 317
},
{
"epoch": 3.5730337078651684,
"grad_norm": 0.33881452679634094,
"learning_rate": 2.2877923166844073e-06,
"loss": 0.0732,
"step": 318
},
{
"epoch": 3.5842696629213484,
"grad_norm": 0.3820963203907013,
"learning_rate": 2.254885910009341e-06,
"loss": 0.0653,
"step": 319
},
{
"epoch": 3.595505617977528,
"grad_norm": 0.42072489857673645,
"learning_rate": 2.2221488349019903e-06,
"loss": 0.0815,
"step": 320
},
{
"epoch": 3.606741573033708,
"grad_norm": 0.3743472993373871,
"learning_rate": 2.1895831107393485e-06,
"loss": 0.076,
"step": 321
},
{
"epoch": 3.6179775280898876,
"grad_norm": 0.3808266818523407,
"learning_rate": 2.15719074632868e-06,
"loss": 0.0705,
"step": 322
},
{
"epoch": 3.629213483146067,
"grad_norm": 0.4201536774635315,
"learning_rate": 2.124973739783609e-06,
"loss": 0.0859,
"step": 323
},
{
"epoch": 3.640449438202247,
"grad_norm": 0.38096362352371216,
"learning_rate": 2.0929340784008474e-06,
"loss": 0.074,
"step": 324
},
{
"epoch": 3.6516853932584272,
"grad_norm": 0.39911383390426636,
"learning_rate": 2.061073738537635e-06,
"loss": 0.0805,
"step": 325
},
{
"epoch": 3.662921348314607,
"grad_norm": 0.32940348982810974,
"learning_rate": 2.029394685489808e-06,
"loss": 0.0616,
"step": 326
},
{
"epoch": 3.6741573033707864,
"grad_norm": 0.3778252601623535,
"learning_rate": 1.9978988733705807e-06,
"loss": 0.0872,
"step": 327
},
{
"epoch": 3.6853932584269664,
"grad_norm": 0.3902676999568939,
"learning_rate": 1.9665882449900024e-06,
"loss": 0.0656,
"step": 328
},
{
"epoch": 3.696629213483146,
"grad_norm": 0.3568686544895172,
"learning_rate": 1.9354647317351187e-06,
"loss": 0.0631,
"step": 329
},
{
"epoch": 3.7078651685393256,
"grad_norm": 0.38821014761924744,
"learning_rate": 1.9045302534508298e-06,
"loss": 0.0737,
"step": 330
},
{
"epoch": 3.7191011235955056,
"grad_norm": 0.3225777745246887,
"learning_rate": 1.873786718321476e-06,
"loss": 0.0652,
"step": 331
},
{
"epoch": 3.7303370786516856,
"grad_norm": 0.33293595910072327,
"learning_rate": 1.8432360227531116e-06,
"loss": 0.0667,
"step": 332
},
{
"epoch": 3.741573033707865,
"grad_norm": 0.38181042671203613,
"learning_rate": 1.8128800512565514e-06,
"loss": 0.0766,
"step": 333
},
{
"epoch": 3.752808988764045,
"grad_norm": 0.35045260190963745,
"learning_rate": 1.7827206763311055e-06,
"loss": 0.0721,
"step": 334
},
{
"epoch": 3.764044943820225,
"grad_norm": 0.3418102562427521,
"learning_rate": 1.7527597583490825e-06,
"loss": 0.0818,
"step": 335
},
{
"epoch": 3.7752808988764044,
"grad_norm": 0.33109530806541443,
"learning_rate": 1.722999145441031e-06,
"loss": 0.064,
"step": 336
},
{
"epoch": 3.7865168539325844,
"grad_norm": 0.37899285554885864,
"learning_rate": 1.6934406733817417e-06,
"loss": 0.0739,
"step": 337
},
{
"epoch": 3.797752808988764,
"grad_norm": 0.3409174084663391,
"learning_rate": 1.6640861654770007e-06,
"loss": 0.0666,
"step": 338
},
{
"epoch": 3.808988764044944,
"grad_norm": 0.3983001708984375,
"learning_rate": 1.6349374324511347e-06,
"loss": 0.0817,
"step": 339
},
{
"epoch": 3.8202247191011236,
"grad_norm": 0.36114805936813354,
"learning_rate": 1.6059962723352912e-06,
"loss": 0.074,
"step": 340
},
{
"epoch": 3.831460674157303,
"grad_norm": 0.39180055260658264,
"learning_rate": 1.5772644703565564e-06,
"loss": 0.0776,
"step": 341
},
{
"epoch": 3.842696629213483,
"grad_norm": 0.37725961208343506,
"learning_rate": 1.5487437988278141e-06,
"loss": 0.0741,
"step": 342
},
{
"epoch": 3.853932584269663,
"grad_norm": 0.3951829671859741,
"learning_rate": 1.5204360170384286e-06,
"loss": 0.0751,
"step": 343
},
{
"epoch": 3.865168539325843,
"grad_norm": 0.41868656873703003,
"learning_rate": 1.4923428711457217e-06,
"loss": 0.0727,
"step": 344
},
{
"epoch": 3.8764044943820224,
"grad_norm": 0.35519301891326904,
"learning_rate": 1.4644660940672628e-06,
"loss": 0.0738,
"step": 345
},
{
"epoch": 3.8876404494382024,
"grad_norm": 0.41053545475006104,
"learning_rate": 1.4368074053739733e-06,
"loss": 0.089,
"step": 346
},
{
"epoch": 3.898876404494382,
"grad_norm": 0.4047039747238159,
"learning_rate": 1.4093685111840567e-06,
"loss": 0.0659,
"step": 347
},
{
"epoch": 3.9101123595505616,
"grad_norm": 0.37193068861961365,
"learning_rate": 1.382151104057754e-06,
"loss": 0.0718,
"step": 348
},
{
"epoch": 3.9213483146067416,
"grad_norm": 0.3433876037597656,
"learning_rate": 1.3551568628929434e-06,
"loss": 0.0608,
"step": 349
},
{
"epoch": 3.932584269662921,
"grad_norm": 0.3564971685409546,
"learning_rate": 1.3283874528215735e-06,
"loss": 0.0713,
"step": 350
},
{
"epoch": 3.943820224719101,
"grad_norm": 0.3533005118370056,
"learning_rate": 1.301844525106951e-06,
"loss": 0.0722,
"step": 351
},
{
"epoch": 3.955056179775281,
"grad_norm": 0.4084896147251129,
"learning_rate": 1.2755297170418913e-06,
"loss": 0.0759,
"step": 352
},
{
"epoch": 3.966292134831461,
"grad_norm": 0.3923269510269165,
"learning_rate": 1.2494446518477022e-06,
"loss": 0.0907,
"step": 353
},
{
"epoch": 3.9775280898876404,
"grad_norm": 0.3590816259384155,
"learning_rate": 1.2235909385740825e-06,
"loss": 0.0785,
"step": 354
},
{
"epoch": 3.98876404494382,
"grad_norm": 0.4006705582141876,
"learning_rate": 1.1979701719998454e-06,
"loss": 0.0826,
"step": 355
},
{
"epoch": 4.0,
"grad_norm": 0.3407679498195648,
"learning_rate": 1.1725839325345601e-06,
"loss": 0.0709,
"step": 356
},
{
"epoch": 4.01123595505618,
"grad_norm": 0.4902256727218628,
"learning_rate": 1.1474337861210543e-06,
"loss": 0.0404,
"step": 357
},
{
"epoch": 4.022471910112359,
"grad_norm": 0.4681622087955475,
"learning_rate": 1.1225212841388282e-06,
"loss": 0.0441,
"step": 358
},
{
"epoch": 4.033707865168539,
"grad_norm": 0.47430193424224854,
"learning_rate": 1.097847963308351e-06,
"loss": 0.0503,
"step": 359
},
{
"epoch": 4.044943820224719,
"grad_norm": 0.4806967079639435,
"learning_rate": 1.0734153455962765e-06,
"loss": 0.0491,
"step": 360
},
{
"epoch": 4.056179775280899,
"grad_norm": 0.4137580990791321,
"learning_rate": 1.049224938121548e-06,
"loss": 0.0501,
"step": 361
},
{
"epoch": 4.067415730337078,
"grad_norm": 0.36525774002075195,
"learning_rate": 1.02527823306245e-06,
"loss": 0.0395,
"step": 362
},
{
"epoch": 4.078651685393258,
"grad_norm": 0.3174494206905365,
"learning_rate": 1.0015767075645472e-06,
"loss": 0.0328,
"step": 363
},
{
"epoch": 4.089887640449438,
"grad_norm": 0.31036630272865295,
"learning_rate": 9.781218236495776e-07,
"loss": 0.042,
"step": 364
},
{
"epoch": 4.101123595505618,
"grad_norm": 0.32368966937065125,
"learning_rate": 9.549150281252633e-07,
"loss": 0.0498,
"step": 365
},
{
"epoch": 4.112359550561798,
"grad_norm": 0.3594004511833191,
"learning_rate": 9.319577524960655e-07,
"loss": 0.0477,
"step": 366
},
{
"epoch": 4.123595505617978,
"grad_norm": 0.3772434890270233,
"learning_rate": 9.09251412874882e-07,
"loss": 0.0379,
"step": 367
},
{
"epoch": 4.134831460674158,
"grad_norm": 0.47112375497817993,
"learning_rate": 8.867974098957016e-07,
"loss": 0.0385,
"step": 368
},
{
"epoch": 4.146067415730337,
"grad_norm": 0.41787606477737427,
"learning_rate": 8.645971286271903e-07,
"loss": 0.0377,
"step": 369
},
{
"epoch": 4.157303370786517,
"grad_norm": 0.39339321851730347,
"learning_rate": 8.426519384872733e-07,
"loss": 0.0384,
"step": 370
},
{
"epoch": 4.168539325842697,
"grad_norm": 0.37326088547706604,
"learning_rate": 8.209631931586499e-07,
"loss": 0.044,
"step": 371
},
{
"epoch": 4.179775280898877,
"grad_norm": 0.3865525424480438,
"learning_rate": 7.995322305052905e-07,
"loss": 0.0501,
"step": 372
},
{
"epoch": 4.191011235955056,
"grad_norm": 0.3712690770626068,
"learning_rate": 7.783603724899258e-07,
"loss": 0.0402,
"step": 373
},
{
"epoch": 4.202247191011236,
"grad_norm": 0.3322262465953827,
"learning_rate": 7.574489250924821e-07,
"loss": 0.037,
"step": 374
},
{
"epoch": 4.213483146067416,
"grad_norm": 0.32212916016578674,
"learning_rate": 7.367991782295392e-07,
"loss": 0.0369,
"step": 375
},
{
"epoch": 4.224719101123595,
"grad_norm": 0.32061728835105896,
"learning_rate": 7.164124056747523e-07,
"loss": 0.0342,
"step": 376
},
{
"epoch": 4.235955056179775,
"grad_norm": 0.303558349609375,
"learning_rate": 6.962898649802824e-07,
"loss": 0.0418,
"step": 377
},
{
"epoch": 4.247191011235955,
"grad_norm": 0.32052311301231384,
"learning_rate": 6.764327973992252e-07,
"loss": 0.0442,
"step": 378
},
{
"epoch": 4.258426966292135,
"grad_norm": 0.31100547313690186,
"learning_rate": 6.568424278090446e-07,
"loss": 0.0385,
"step": 379
},
{
"epoch": 4.269662921348314,
"grad_norm": 0.32819902896881104,
"learning_rate": 6.375199646360142e-07,
"loss": 0.0506,
"step": 380
},
{
"epoch": 4.280898876404494,
"grad_norm": 0.33846989274024963,
"learning_rate": 6.184665997806832e-07,
"loss": 0.0348,
"step": 381
},
{
"epoch": 4.292134831460674,
"grad_norm": 0.31254467368125916,
"learning_rate": 5.996835085443403e-07,
"loss": 0.0421,
"step": 382
},
{
"epoch": 4.303370786516854,
"grad_norm": 0.3004555106163025,
"learning_rate": 5.811718495565327e-07,
"loss": 0.036,
"step": 383
},
{
"epoch": 4.314606741573034,
"grad_norm": 0.3517463803291321,
"learning_rate": 5.629327647035843e-07,
"loss": 0.0427,
"step": 384
},
{
"epoch": 4.325842696629214,
"grad_norm": 0.29264354705810547,
"learning_rate": 5.449673790581611e-07,
"loss": 0.0367,
"step": 385
},
{
"epoch": 4.337078651685394,
"grad_norm": 0.26809367537498474,
"learning_rate": 5.27276800809875e-07,
"loss": 0.0419,
"step": 386
},
{
"epoch": 4.348314606741573,
"grad_norm": 0.3042603135108948,
"learning_rate": 5.098621211969224e-07,
"loss": 0.0426,
"step": 387
},
{
"epoch": 4.359550561797753,
"grad_norm": 0.3065764904022217,
"learning_rate": 4.92724414438771e-07,
"loss": 0.0377,
"step": 388
},
{
"epoch": 4.370786516853933,
"grad_norm": 0.3297325074672699,
"learning_rate": 4.758647376699033e-07,
"loss": 0.0414,
"step": 389
},
{
"epoch": 4.382022471910112,
"grad_norm": 0.30828678607940674,
"learning_rate": 4.5928413087459325e-07,
"loss": 0.04,
"step": 390
},
{
"epoch": 4.393258426966292,
"grad_norm": 0.3171032965183258,
"learning_rate": 4.4298361682277355e-07,
"loss": 0.0396,
"step": 391
},
{
"epoch": 4.404494382022472,
"grad_norm": 0.2922931909561157,
"learning_rate": 4.269642010069319e-07,
"loss": 0.0373,
"step": 392
},
{
"epoch": 4.415730337078652,
"grad_norm": 0.30198633670806885,
"learning_rate": 4.112268715800943e-07,
"loss": 0.0454,
"step": 393
},
{
"epoch": 4.426966292134831,
"grad_norm": 0.2677709758281708,
"learning_rate": 3.957725992948691e-07,
"loss": 0.0378,
"step": 394
},
{
"epoch": 4.438202247191011,
"grad_norm": 0.3185364305973053,
"learning_rate": 3.8060233744356634e-07,
"loss": 0.0484,
"step": 395
},
{
"epoch": 4.449438202247191,
"grad_norm": 0.33234983682632446,
"learning_rate": 3.6571702179939604e-07,
"loss": 0.0534,
"step": 396
},
{
"epoch": 4.460674157303371,
"grad_norm": 0.2978450059890747,
"learning_rate": 3.511175705587433e-07,
"loss": 0.041,
"step": 397
},
{
"epoch": 4.47191011235955,
"grad_norm": 0.30524784326553345,
"learning_rate": 3.3680488428453005e-07,
"loss": 0.0455,
"step": 398
},
{
"epoch": 4.48314606741573,
"grad_norm": 0.31283247470855713,
"learning_rate": 3.227798458506637e-07,
"loss": 0.0493,
"step": 399
},
{
"epoch": 4.49438202247191,
"grad_norm": 0.3062666356563568,
"learning_rate": 3.0904332038757977e-07,
"loss": 0.045,
"step": 400
}
],
"logging_steps": 1,
"max_steps": 445,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 144825701957632.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}