TokenBender's picture
Upload folder using huggingface_hub
973ffd8 verified
raw
history blame contribute delete
No virus
215 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9011703511053316,
"eval_steps": 500,
"global_step": 1386,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 0.0303955078125,
"learning_rate": 4.2553191489361704e-07,
"loss": 1.11,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 0.0294189453125,
"learning_rate": 8.510638297872341e-07,
"loss": 0.9825,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 0.028076171875,
"learning_rate": 1.276595744680851e-06,
"loss": 1.0375,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 0.036865234375,
"learning_rate": 1.7021276595744682e-06,
"loss": 1.042,
"step": 4
},
{
"epoch": 0.0,
"grad_norm": 0.0311279296875,
"learning_rate": 2.1276595744680853e-06,
"loss": 0.9769,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 0.036865234375,
"learning_rate": 2.553191489361702e-06,
"loss": 0.9316,
"step": 6
},
{
"epoch": 0.0,
"grad_norm": 0.0299072265625,
"learning_rate": 2.978723404255319e-06,
"loss": 1.0077,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 0.03369140625,
"learning_rate": 3.4042553191489363e-06,
"loss": 1.0346,
"step": 8
},
{
"epoch": 0.01,
"grad_norm": 0.0301513671875,
"learning_rate": 3.8297872340425535e-06,
"loss": 1.0193,
"step": 9
},
{
"epoch": 0.01,
"grad_norm": 0.0267333984375,
"learning_rate": 4.255319148936171e-06,
"loss": 1.0297,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 0.0291748046875,
"learning_rate": 4.680851063829788e-06,
"loss": 1.0868,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 0.0296630859375,
"learning_rate": 5.106382978723404e-06,
"loss": 1.0641,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 0.028564453125,
"learning_rate": 5.531914893617022e-06,
"loss": 0.9389,
"step": 13
},
{
"epoch": 0.01,
"grad_norm": 0.03515625,
"learning_rate": 5.957446808510638e-06,
"loss": 1.033,
"step": 14
},
{
"epoch": 0.01,
"grad_norm": 0.0283203125,
"learning_rate": 6.382978723404256e-06,
"loss": 1.0216,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 0.0322265625,
"learning_rate": 6.808510638297873e-06,
"loss": 1.1086,
"step": 16
},
{
"epoch": 0.01,
"grad_norm": 0.0322265625,
"learning_rate": 7.234042553191491e-06,
"loss": 1.0577,
"step": 17
},
{
"epoch": 0.01,
"grad_norm": 0.030517578125,
"learning_rate": 7.659574468085107e-06,
"loss": 1.0733,
"step": 18
},
{
"epoch": 0.01,
"grad_norm": 0.0303955078125,
"learning_rate": 8.085106382978723e-06,
"loss": 0.9865,
"step": 19
},
{
"epoch": 0.01,
"grad_norm": 0.0291748046875,
"learning_rate": 8.510638297872341e-06,
"loss": 1.0125,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 0.034423828125,
"learning_rate": 8.936170212765958e-06,
"loss": 1.1245,
"step": 21
},
{
"epoch": 0.01,
"grad_norm": 0.0260009765625,
"learning_rate": 9.361702127659576e-06,
"loss": 1.0024,
"step": 22
},
{
"epoch": 0.01,
"grad_norm": 0.0306396484375,
"learning_rate": 9.787234042553192e-06,
"loss": 1.0131,
"step": 23
},
{
"epoch": 0.02,
"grad_norm": 0.033447265625,
"learning_rate": 1.0212765957446808e-05,
"loss": 1.0171,
"step": 24
},
{
"epoch": 0.02,
"grad_norm": 0.033203125,
"learning_rate": 1.0638297872340426e-05,
"loss": 0.9613,
"step": 25
},
{
"epoch": 0.02,
"grad_norm": 0.03857421875,
"learning_rate": 1.1063829787234044e-05,
"loss": 1.1312,
"step": 26
},
{
"epoch": 0.02,
"grad_norm": 0.036865234375,
"learning_rate": 1.1489361702127662e-05,
"loss": 1.0187,
"step": 27
},
{
"epoch": 0.02,
"grad_norm": 0.03515625,
"learning_rate": 1.1914893617021277e-05,
"loss": 0.9934,
"step": 28
},
{
"epoch": 0.02,
"grad_norm": 0.036376953125,
"learning_rate": 1.2340425531914895e-05,
"loss": 1.0872,
"step": 29
},
{
"epoch": 0.02,
"grad_norm": 0.035888671875,
"learning_rate": 1.2765957446808513e-05,
"loss": 0.9591,
"step": 30
},
{
"epoch": 0.02,
"grad_norm": 0.033203125,
"learning_rate": 1.3191489361702127e-05,
"loss": 0.9589,
"step": 31
},
{
"epoch": 0.02,
"grad_norm": 0.040771484375,
"learning_rate": 1.3617021276595745e-05,
"loss": 1.0093,
"step": 32
},
{
"epoch": 0.02,
"grad_norm": 0.0400390625,
"learning_rate": 1.4042553191489363e-05,
"loss": 1.0195,
"step": 33
},
{
"epoch": 0.02,
"grad_norm": 0.038330078125,
"learning_rate": 1.4468085106382981e-05,
"loss": 0.8936,
"step": 34
},
{
"epoch": 0.02,
"grad_norm": 0.04443359375,
"learning_rate": 1.4893617021276596e-05,
"loss": 0.9958,
"step": 35
},
{
"epoch": 0.02,
"grad_norm": 0.045654296875,
"learning_rate": 1.5319148936170214e-05,
"loss": 0.9279,
"step": 36
},
{
"epoch": 0.02,
"grad_norm": 0.03662109375,
"learning_rate": 1.5744680851063832e-05,
"loss": 1.0153,
"step": 37
},
{
"epoch": 0.02,
"grad_norm": 0.04638671875,
"learning_rate": 1.6170212765957446e-05,
"loss": 0.9862,
"step": 38
},
{
"epoch": 0.03,
"grad_norm": 0.042236328125,
"learning_rate": 1.6595744680851064e-05,
"loss": 1.0962,
"step": 39
},
{
"epoch": 0.03,
"grad_norm": 0.040771484375,
"learning_rate": 1.7021276595744682e-05,
"loss": 0.956,
"step": 40
},
{
"epoch": 0.03,
"grad_norm": 0.035888671875,
"learning_rate": 1.74468085106383e-05,
"loss": 1.0559,
"step": 41
},
{
"epoch": 0.03,
"grad_norm": 0.04736328125,
"learning_rate": 1.7872340425531915e-05,
"loss": 1.0014,
"step": 42
},
{
"epoch": 0.03,
"grad_norm": 0.051025390625,
"learning_rate": 1.8297872340425533e-05,
"loss": 1.0252,
"step": 43
},
{
"epoch": 0.03,
"grad_norm": 0.04931640625,
"learning_rate": 1.872340425531915e-05,
"loss": 0.9541,
"step": 44
},
{
"epoch": 0.03,
"grad_norm": 0.0556640625,
"learning_rate": 1.914893617021277e-05,
"loss": 0.9603,
"step": 45
},
{
"epoch": 0.03,
"grad_norm": 0.0419921875,
"learning_rate": 1.9574468085106384e-05,
"loss": 1.0601,
"step": 46
},
{
"epoch": 0.03,
"grad_norm": 0.0478515625,
"learning_rate": 2e-05,
"loss": 0.9919,
"step": 47
},
{
"epoch": 0.03,
"grad_norm": 0.047119140625,
"learning_rate": 1.9999977801976743e-05,
"loss": 1.0247,
"step": 48
},
{
"epoch": 0.03,
"grad_norm": 0.048095703125,
"learning_rate": 1.999991120800551e-05,
"loss": 0.9936,
"step": 49
},
{
"epoch": 0.03,
"grad_norm": 0.05419921875,
"learning_rate": 1.9999800218381958e-05,
"loss": 1.0315,
"step": 50
},
{
"epoch": 0.03,
"grad_norm": 0.0478515625,
"learning_rate": 1.9999644833598836e-05,
"loss": 0.9392,
"step": 51
},
{
"epoch": 0.03,
"grad_norm": 0.0546875,
"learning_rate": 1.9999445054345993e-05,
"loss": 1.0716,
"step": 52
},
{
"epoch": 0.03,
"grad_norm": 0.05224609375,
"learning_rate": 1.9999200881510366e-05,
"loss": 0.9724,
"step": 53
},
{
"epoch": 0.04,
"grad_norm": 0.04736328125,
"learning_rate": 1.999891231617599e-05,
"loss": 0.9966,
"step": 54
},
{
"epoch": 0.04,
"grad_norm": 0.049072265625,
"learning_rate": 1.9998579359623977e-05,
"loss": 0.969,
"step": 55
},
{
"epoch": 0.04,
"grad_norm": 0.051513671875,
"learning_rate": 1.9998202013332525e-05,
"loss": 0.972,
"step": 56
},
{
"epoch": 0.04,
"grad_norm": 0.043701171875,
"learning_rate": 1.99977802789769e-05,
"loss": 0.9705,
"step": 57
},
{
"epoch": 0.04,
"grad_norm": 0.044189453125,
"learning_rate": 1.999731415842944e-05,
"loss": 1.002,
"step": 58
},
{
"epoch": 0.04,
"grad_norm": 0.039794921875,
"learning_rate": 1.9996803653759534e-05,
"loss": 0.9508,
"step": 59
},
{
"epoch": 0.04,
"grad_norm": 0.03759765625,
"learning_rate": 1.9996248767233616e-05,
"loss": 0.9232,
"step": 60
},
{
"epoch": 0.04,
"grad_norm": 0.0390625,
"learning_rate": 1.9995649501315172e-05,
"loss": 1.0054,
"step": 61
},
{
"epoch": 0.04,
"grad_norm": 0.034423828125,
"learning_rate": 1.9995005858664696e-05,
"loss": 0.9685,
"step": 62
},
{
"epoch": 0.04,
"grad_norm": 0.03369140625,
"learning_rate": 1.9994317842139715e-05,
"loss": 0.9313,
"step": 63
},
{
"epoch": 0.04,
"grad_norm": 0.0311279296875,
"learning_rate": 1.9993585454794748e-05,
"loss": 0.9463,
"step": 64
},
{
"epoch": 0.04,
"grad_norm": 0.0311279296875,
"learning_rate": 1.9992808699881303e-05,
"loss": 0.9049,
"step": 65
},
{
"epoch": 0.04,
"grad_norm": 0.0322265625,
"learning_rate": 1.999198758084787e-05,
"loss": 0.9088,
"step": 66
},
{
"epoch": 0.04,
"grad_norm": 0.033203125,
"learning_rate": 1.9991122101339885e-05,
"loss": 0.9369,
"step": 67
},
{
"epoch": 0.04,
"grad_norm": 0.0478515625,
"learning_rate": 1.9990212265199738e-05,
"loss": 0.9902,
"step": 68
},
{
"epoch": 0.04,
"grad_norm": 0.03466796875,
"learning_rate": 1.9989258076466743e-05,
"loss": 0.9569,
"step": 69
},
{
"epoch": 0.05,
"grad_norm": 0.042724609375,
"learning_rate": 1.998825953937712e-05,
"loss": 0.9779,
"step": 70
},
{
"epoch": 0.05,
"grad_norm": 0.0380859375,
"learning_rate": 1.9987216658363983e-05,
"loss": 0.9505,
"step": 71
},
{
"epoch": 0.05,
"grad_norm": 0.036376953125,
"learning_rate": 1.9986129438057306e-05,
"loss": 0.9374,
"step": 72
},
{
"epoch": 0.05,
"grad_norm": 0.0361328125,
"learning_rate": 1.998499788328392e-05,
"loss": 1.0086,
"step": 73
},
{
"epoch": 0.05,
"grad_norm": 0.034912109375,
"learning_rate": 1.9983821999067478e-05,
"loss": 1.046,
"step": 74
},
{
"epoch": 0.05,
"grad_norm": 0.031005859375,
"learning_rate": 1.998260179062844e-05,
"loss": 0.9375,
"step": 75
},
{
"epoch": 0.05,
"grad_norm": 0.032958984375,
"learning_rate": 1.9981337263384057e-05,
"loss": 0.9514,
"step": 76
},
{
"epoch": 0.05,
"grad_norm": 0.031982421875,
"learning_rate": 1.9980028422948323e-05,
"loss": 0.8629,
"step": 77
},
{
"epoch": 0.05,
"grad_norm": 0.03125,
"learning_rate": 1.9978675275131975e-05,
"loss": 0.933,
"step": 78
},
{
"epoch": 0.05,
"grad_norm": 0.0299072265625,
"learning_rate": 1.9977277825942453e-05,
"loss": 0.9408,
"step": 79
},
{
"epoch": 0.05,
"grad_norm": 0.031494140625,
"learning_rate": 1.997583608158388e-05,
"loss": 1.0041,
"step": 80
},
{
"epoch": 0.05,
"grad_norm": 0.031982421875,
"learning_rate": 1.997435004845703e-05,
"loss": 0.9605,
"step": 81
},
{
"epoch": 0.05,
"grad_norm": 0.03271484375,
"learning_rate": 1.99728197331593e-05,
"loss": 0.9256,
"step": 82
},
{
"epoch": 0.05,
"grad_norm": 0.034912109375,
"learning_rate": 1.9971245142484693e-05,
"loss": 1.0026,
"step": 83
},
{
"epoch": 0.05,
"grad_norm": 0.031494140625,
"learning_rate": 1.996962628342376e-05,
"loss": 0.9789,
"step": 84
},
{
"epoch": 0.06,
"grad_norm": 0.0341796875,
"learning_rate": 1.99679631631636e-05,
"loss": 0.9437,
"step": 85
},
{
"epoch": 0.06,
"grad_norm": 0.030517578125,
"learning_rate": 1.996625578908781e-05,
"loss": 0.9487,
"step": 86
},
{
"epoch": 0.06,
"grad_norm": 0.033447265625,
"learning_rate": 1.9964504168776454e-05,
"loss": 0.9645,
"step": 87
},
{
"epoch": 0.06,
"grad_norm": 0.03271484375,
"learning_rate": 1.9962708310006032e-05,
"loss": 0.9967,
"step": 88
},
{
"epoch": 0.06,
"grad_norm": 0.0296630859375,
"learning_rate": 1.996086822074945e-05,
"loss": 1.0195,
"step": 89
},
{
"epoch": 0.06,
"grad_norm": 0.030517578125,
"learning_rate": 1.9958983909175977e-05,
"loss": 0.8769,
"step": 90
},
{
"epoch": 0.06,
"grad_norm": 0.031494140625,
"learning_rate": 1.995705538365121e-05,
"loss": 0.8407,
"step": 91
},
{
"epoch": 0.06,
"grad_norm": 0.033203125,
"learning_rate": 1.995508265273704e-05,
"loss": 0.9368,
"step": 92
},
{
"epoch": 0.06,
"grad_norm": 0.031982421875,
"learning_rate": 1.9953065725191613e-05,
"loss": 0.9308,
"step": 93
},
{
"epoch": 0.06,
"grad_norm": 0.03076171875,
"learning_rate": 1.9951004609969286e-05,
"loss": 0.9235,
"step": 94
},
{
"epoch": 0.06,
"grad_norm": 0.032958984375,
"learning_rate": 1.9948899316220603e-05,
"loss": 0.9008,
"step": 95
},
{
"epoch": 0.06,
"grad_norm": 0.03173828125,
"learning_rate": 1.9946749853292233e-05,
"loss": 0.9735,
"step": 96
},
{
"epoch": 0.06,
"grad_norm": 0.033447265625,
"learning_rate": 1.994455623072694e-05,
"loss": 0.9328,
"step": 97
},
{
"epoch": 0.06,
"grad_norm": 0.033203125,
"learning_rate": 1.994231845826354e-05,
"loss": 0.8967,
"step": 98
},
{
"epoch": 0.06,
"grad_norm": 0.03173828125,
"learning_rate": 1.994003654583686e-05,
"loss": 0.8363,
"step": 99
},
{
"epoch": 0.07,
"grad_norm": 0.033447265625,
"learning_rate": 1.993771050357769e-05,
"loss": 0.9072,
"step": 100
},
{
"epoch": 0.07,
"grad_norm": 0.03369140625,
"learning_rate": 1.9935340341812737e-05,
"loss": 0.9502,
"step": 101
},
{
"epoch": 0.07,
"grad_norm": 0.03271484375,
"learning_rate": 1.993292607106458e-05,
"loss": 0.8794,
"step": 102
},
{
"epoch": 0.07,
"grad_norm": 0.03466796875,
"learning_rate": 1.9930467702051632e-05,
"loss": 0.9601,
"step": 103
},
{
"epoch": 0.07,
"grad_norm": 0.0341796875,
"learning_rate": 1.9927965245688073e-05,
"loss": 0.9099,
"step": 104
},
{
"epoch": 0.07,
"grad_norm": 0.033935546875,
"learning_rate": 1.9925418713083824e-05,
"loss": 0.929,
"step": 105
},
{
"epoch": 0.07,
"grad_norm": 0.033447265625,
"learning_rate": 1.992282811554448e-05,
"loss": 0.9046,
"step": 106
},
{
"epoch": 0.07,
"grad_norm": 0.031005859375,
"learning_rate": 1.9920193464571277e-05,
"loss": 0.9393,
"step": 107
},
{
"epoch": 0.07,
"grad_norm": 0.03515625,
"learning_rate": 1.9917514771861015e-05,
"loss": 0.9933,
"step": 108
},
{
"epoch": 0.07,
"grad_norm": 0.035400390625,
"learning_rate": 1.9914792049306034e-05,
"loss": 0.8865,
"step": 109
},
{
"epoch": 0.07,
"grad_norm": 0.032958984375,
"learning_rate": 1.9912025308994146e-05,
"loss": 0.9158,
"step": 110
},
{
"epoch": 0.07,
"grad_norm": 0.035888671875,
"learning_rate": 1.990921456320859e-05,
"loss": 0.9143,
"step": 111
},
{
"epoch": 0.07,
"grad_norm": 0.03369140625,
"learning_rate": 1.9906359824427953e-05,
"loss": 0.9707,
"step": 112
},
{
"epoch": 0.07,
"grad_norm": 0.0341796875,
"learning_rate": 1.9903461105326155e-05,
"loss": 0.8894,
"step": 113
},
{
"epoch": 0.07,
"grad_norm": 0.0341796875,
"learning_rate": 1.9900518418772364e-05,
"loss": 0.966,
"step": 114
},
{
"epoch": 0.07,
"grad_norm": 0.035888671875,
"learning_rate": 1.989753177783094e-05,
"loss": 0.9201,
"step": 115
},
{
"epoch": 0.08,
"grad_norm": 0.0341796875,
"learning_rate": 1.9894501195761393e-05,
"loss": 0.9299,
"step": 116
},
{
"epoch": 0.08,
"grad_norm": 0.03369140625,
"learning_rate": 1.9891426686018308e-05,
"loss": 0.8812,
"step": 117
},
{
"epoch": 0.08,
"grad_norm": 0.035888671875,
"learning_rate": 1.9888308262251286e-05,
"loss": 0.9995,
"step": 118
},
{
"epoch": 0.08,
"grad_norm": 0.031982421875,
"learning_rate": 1.9885145938304905e-05,
"loss": 0.8804,
"step": 119
},
{
"epoch": 0.08,
"grad_norm": 0.03955078125,
"learning_rate": 1.988193972821863e-05,
"loss": 0.9021,
"step": 120
},
{
"epoch": 0.08,
"grad_norm": 0.039306640625,
"learning_rate": 1.987868964622676e-05,
"loss": 0.8066,
"step": 121
},
{
"epoch": 0.08,
"grad_norm": 0.0361328125,
"learning_rate": 1.9875395706758388e-05,
"loss": 0.909,
"step": 122
},
{
"epoch": 0.08,
"grad_norm": 0.037109375,
"learning_rate": 1.987205792443729e-05,
"loss": 0.8611,
"step": 123
},
{
"epoch": 0.08,
"grad_norm": 0.037841796875,
"learning_rate": 1.9868676314081907e-05,
"loss": 0.9249,
"step": 124
},
{
"epoch": 0.08,
"grad_norm": 0.0322265625,
"learning_rate": 1.986525089070525e-05,
"loss": 0.837,
"step": 125
},
{
"epoch": 0.08,
"grad_norm": 0.03515625,
"learning_rate": 1.986178166951484e-05,
"loss": 0.8653,
"step": 126
},
{
"epoch": 0.08,
"grad_norm": 0.03662109375,
"learning_rate": 1.9858268665912653e-05,
"loss": 0.9011,
"step": 127
},
{
"epoch": 0.08,
"grad_norm": 0.035888671875,
"learning_rate": 1.9854711895495034e-05,
"loss": 0.9942,
"step": 128
},
{
"epoch": 0.08,
"grad_norm": 0.032958984375,
"learning_rate": 1.985111137405264e-05,
"loss": 0.9303,
"step": 129
},
{
"epoch": 0.08,
"grad_norm": 0.03369140625,
"learning_rate": 1.9847467117570364e-05,
"loss": 0.9206,
"step": 130
},
{
"epoch": 0.09,
"grad_norm": 0.033935546875,
"learning_rate": 1.9843779142227258e-05,
"loss": 0.8366,
"step": 131
},
{
"epoch": 0.09,
"grad_norm": 0.0380859375,
"learning_rate": 1.9840047464396477e-05,
"loss": 0.8988,
"step": 132
},
{
"epoch": 0.09,
"grad_norm": 0.06005859375,
"learning_rate": 1.98362721006452e-05,
"loss": 0.9719,
"step": 133
},
{
"epoch": 0.09,
"grad_norm": 0.03466796875,
"learning_rate": 1.983245306773454e-05,
"loss": 0.9629,
"step": 134
},
{
"epoch": 0.09,
"grad_norm": 0.0380859375,
"learning_rate": 1.98285903826195e-05,
"loss": 0.8384,
"step": 135
},
{
"epoch": 0.09,
"grad_norm": 0.034423828125,
"learning_rate": 1.9824684062448876e-05,
"loss": 0.8031,
"step": 136
},
{
"epoch": 0.09,
"grad_norm": 0.036376953125,
"learning_rate": 1.982073412456518e-05,
"loss": 0.8623,
"step": 137
},
{
"epoch": 0.09,
"grad_norm": 0.033935546875,
"learning_rate": 1.981674058650458e-05,
"loss": 0.8357,
"step": 138
},
{
"epoch": 0.09,
"grad_norm": 0.036376953125,
"learning_rate": 1.98127034659968e-05,
"loss": 0.9306,
"step": 139
},
{
"epoch": 0.09,
"grad_norm": 0.03564453125,
"learning_rate": 1.9808622780965064e-05,
"loss": 0.9464,
"step": 140
},
{
"epoch": 0.09,
"grad_norm": 0.033935546875,
"learning_rate": 1.9804498549526e-05,
"loss": 0.9146,
"step": 141
},
{
"epoch": 0.09,
"grad_norm": 0.034912109375,
"learning_rate": 1.980033078998956e-05,
"loss": 0.8999,
"step": 142
},
{
"epoch": 0.09,
"grad_norm": 0.03564453125,
"learning_rate": 1.9796119520858957e-05,
"loss": 0.9932,
"step": 143
},
{
"epoch": 0.09,
"grad_norm": 0.035888671875,
"learning_rate": 1.9791864760830554e-05,
"loss": 0.8976,
"step": 144
},
{
"epoch": 0.09,
"grad_norm": 0.03369140625,
"learning_rate": 1.9787566528793806e-05,
"loss": 0.9024,
"step": 145
},
{
"epoch": 0.09,
"grad_norm": 0.033447265625,
"learning_rate": 1.9783224843831162e-05,
"loss": 0.8262,
"step": 146
},
{
"epoch": 0.1,
"grad_norm": 0.036376953125,
"learning_rate": 1.977883972521799e-05,
"loss": 0.9491,
"step": 147
},
{
"epoch": 0.1,
"grad_norm": 0.0361328125,
"learning_rate": 1.9774411192422486e-05,
"loss": 0.9347,
"step": 148
},
{
"epoch": 0.1,
"grad_norm": 0.0390625,
"learning_rate": 1.9769939265105573e-05,
"loss": 0.8401,
"step": 149
},
{
"epoch": 0.1,
"grad_norm": 0.03466796875,
"learning_rate": 1.976542396312085e-05,
"loss": 0.8949,
"step": 150
},
{
"epoch": 0.1,
"grad_norm": 0.03369140625,
"learning_rate": 1.976086530651447e-05,
"loss": 0.8675,
"step": 151
},
{
"epoch": 0.1,
"grad_norm": 0.0322265625,
"learning_rate": 1.975626331552507e-05,
"loss": 0.8617,
"step": 152
},
{
"epoch": 0.1,
"grad_norm": 0.034423828125,
"learning_rate": 1.9751618010583665e-05,
"loss": 0.8374,
"step": 153
},
{
"epoch": 0.1,
"grad_norm": 0.036865234375,
"learning_rate": 1.974692941231357e-05,
"loss": 0.8396,
"step": 154
},
{
"epoch": 0.1,
"grad_norm": 0.034912109375,
"learning_rate": 1.974219754153032e-05,
"loss": 0.9553,
"step": 155
},
{
"epoch": 0.1,
"grad_norm": 0.0341796875,
"learning_rate": 1.9737422419241538e-05,
"loss": 0.8821,
"step": 156
},
{
"epoch": 0.1,
"grad_norm": 0.03466796875,
"learning_rate": 1.9732604066646882e-05,
"loss": 0.8778,
"step": 157
},
{
"epoch": 0.1,
"grad_norm": 0.03955078125,
"learning_rate": 1.9727742505137936e-05,
"loss": 0.8552,
"step": 158
},
{
"epoch": 0.1,
"grad_norm": 0.03515625,
"learning_rate": 1.9722837756298112e-05,
"loss": 0.9358,
"step": 159
},
{
"epoch": 0.1,
"grad_norm": 0.03466796875,
"learning_rate": 1.9717889841902553e-05,
"loss": 0.9171,
"step": 160
},
{
"epoch": 0.1,
"grad_norm": 0.033447265625,
"learning_rate": 1.971289878391804e-05,
"loss": 0.8395,
"step": 161
},
{
"epoch": 0.11,
"grad_norm": 0.035400390625,
"learning_rate": 1.97078646045029e-05,
"loss": 0.8955,
"step": 162
},
{
"epoch": 0.11,
"grad_norm": 0.0419921875,
"learning_rate": 1.9702787326006906e-05,
"loss": 0.8192,
"step": 163
},
{
"epoch": 0.11,
"grad_norm": 0.037353515625,
"learning_rate": 1.9697666970971153e-05,
"loss": 0.8264,
"step": 164
},
{
"epoch": 0.11,
"grad_norm": 0.03857421875,
"learning_rate": 1.9692503562128004e-05,
"loss": 0.9093,
"step": 165
},
{
"epoch": 0.11,
"grad_norm": 0.044189453125,
"learning_rate": 1.9687297122400952e-05,
"loss": 0.9446,
"step": 166
},
{
"epoch": 0.11,
"grad_norm": 0.03466796875,
"learning_rate": 1.9682047674904527e-05,
"loss": 0.8802,
"step": 167
},
{
"epoch": 0.11,
"grad_norm": 0.03564453125,
"learning_rate": 1.9676755242944202e-05,
"loss": 0.9152,
"step": 168
},
{
"epoch": 0.11,
"grad_norm": 0.032958984375,
"learning_rate": 1.9671419850016283e-05,
"loss": 0.8396,
"step": 169
},
{
"epoch": 0.11,
"grad_norm": 0.0341796875,
"learning_rate": 1.9666041519807802e-05,
"loss": 0.7976,
"step": 170
},
{
"epoch": 0.11,
"grad_norm": 0.036376953125,
"learning_rate": 1.966062027619643e-05,
"loss": 0.8979,
"step": 171
},
{
"epoch": 0.11,
"grad_norm": 0.032470703125,
"learning_rate": 1.9655156143250328e-05,
"loss": 0.8632,
"step": 172
},
{
"epoch": 0.11,
"grad_norm": 0.0390625,
"learning_rate": 1.96496491452281e-05,
"loss": 0.9456,
"step": 173
},
{
"epoch": 0.11,
"grad_norm": 0.0361328125,
"learning_rate": 1.9644099306578636e-05,
"loss": 0.837,
"step": 174
},
{
"epoch": 0.11,
"grad_norm": 0.031982421875,
"learning_rate": 1.9638506651941024e-05,
"loss": 0.7911,
"step": 175
},
{
"epoch": 0.11,
"grad_norm": 0.03564453125,
"learning_rate": 1.963287120614444e-05,
"loss": 0.8926,
"step": 176
},
{
"epoch": 0.12,
"grad_norm": 0.0341796875,
"learning_rate": 1.9627192994208038e-05,
"loss": 0.8054,
"step": 177
},
{
"epoch": 0.12,
"grad_norm": 0.034423828125,
"learning_rate": 1.962147204134083e-05,
"loss": 0.9226,
"step": 178
},
{
"epoch": 0.12,
"grad_norm": 0.035400390625,
"learning_rate": 1.9615708372941588e-05,
"loss": 0.8987,
"step": 179
},
{
"epoch": 0.12,
"grad_norm": 0.040283203125,
"learning_rate": 1.960990201459872e-05,
"loss": 0.8729,
"step": 180
},
{
"epoch": 0.12,
"grad_norm": 0.039306640625,
"learning_rate": 1.960405299209016e-05,
"loss": 0.9454,
"step": 181
},
{
"epoch": 0.12,
"grad_norm": 0.035888671875,
"learning_rate": 1.9598161331383258e-05,
"loss": 0.9157,
"step": 182
},
{
"epoch": 0.12,
"grad_norm": 0.03857421875,
"learning_rate": 1.9592227058634655e-05,
"loss": 0.8724,
"step": 183
},
{
"epoch": 0.12,
"grad_norm": 0.0361328125,
"learning_rate": 1.958625020019018e-05,
"loss": 0.8446,
"step": 184
},
{
"epoch": 0.12,
"grad_norm": 0.0341796875,
"learning_rate": 1.9580230782584722e-05,
"loss": 0.8441,
"step": 185
},
{
"epoch": 0.12,
"grad_norm": 0.037841796875,
"learning_rate": 1.957416883254211e-05,
"loss": 0.9078,
"step": 186
},
{
"epoch": 0.12,
"grad_norm": 0.037109375,
"learning_rate": 1.9568064376975013e-05,
"loss": 0.9075,
"step": 187
},
{
"epoch": 0.12,
"grad_norm": 0.036376953125,
"learning_rate": 1.956191744298479e-05,
"loss": 0.8932,
"step": 188
},
{
"epoch": 0.12,
"grad_norm": 0.03466796875,
"learning_rate": 1.955572805786141e-05,
"loss": 0.8577,
"step": 189
},
{
"epoch": 0.12,
"grad_norm": 0.03564453125,
"learning_rate": 1.9549496249083288e-05,
"loss": 0.8257,
"step": 190
},
{
"epoch": 0.12,
"grad_norm": 0.03369140625,
"learning_rate": 1.954322204431719e-05,
"loss": 0.7848,
"step": 191
},
{
"epoch": 0.12,
"grad_norm": 0.037353515625,
"learning_rate": 1.953690547141811e-05,
"loss": 0.8617,
"step": 192
},
{
"epoch": 0.13,
"grad_norm": 0.034912109375,
"learning_rate": 1.953054655842913e-05,
"loss": 0.7992,
"step": 193
},
{
"epoch": 0.13,
"grad_norm": 0.03515625,
"learning_rate": 1.9524145333581315e-05,
"loss": 0.8101,
"step": 194
},
{
"epoch": 0.13,
"grad_norm": 0.038330078125,
"learning_rate": 1.951770182529357e-05,
"loss": 0.8669,
"step": 195
},
{
"epoch": 0.13,
"grad_norm": 0.03564453125,
"learning_rate": 1.951121606217252e-05,
"loss": 0.8589,
"step": 196
},
{
"epoch": 0.13,
"grad_norm": 0.036376953125,
"learning_rate": 1.9504688073012397e-05,
"loss": 0.9205,
"step": 197
},
{
"epoch": 0.13,
"grad_norm": 0.039794921875,
"learning_rate": 1.9498117886794885e-05,
"loss": 0.9052,
"step": 198
},
{
"epoch": 0.13,
"grad_norm": 0.037109375,
"learning_rate": 1.9491505532689017e-05,
"loss": 0.8167,
"step": 199
},
{
"epoch": 0.13,
"grad_norm": 0.03662109375,
"learning_rate": 1.948485104005103e-05,
"loss": 0.9358,
"step": 200
},
{
"epoch": 0.13,
"grad_norm": 0.03759765625,
"learning_rate": 1.947815443842424e-05,
"loss": 0.8639,
"step": 201
},
{
"epoch": 0.13,
"grad_norm": 0.034423828125,
"learning_rate": 1.9471415757538918e-05,
"loss": 0.8684,
"step": 202
},
{
"epoch": 0.13,
"grad_norm": 0.032470703125,
"learning_rate": 1.946463502731213e-05,
"loss": 0.7762,
"step": 203
},
{
"epoch": 0.13,
"grad_norm": 0.034912109375,
"learning_rate": 1.9457812277847645e-05,
"loss": 0.8664,
"step": 204
},
{
"epoch": 0.13,
"grad_norm": 0.038330078125,
"learning_rate": 1.945094753943577e-05,
"loss": 0.9964,
"step": 205
},
{
"epoch": 0.13,
"grad_norm": 0.037353515625,
"learning_rate": 1.944404084255324e-05,
"loss": 0.8768,
"step": 206
},
{
"epoch": 0.13,
"grad_norm": 0.0380859375,
"learning_rate": 1.9437092217863043e-05,
"loss": 0.8999,
"step": 207
},
{
"epoch": 0.14,
"grad_norm": 0.036376953125,
"learning_rate": 1.9430101696214335e-05,
"loss": 0.8437,
"step": 208
},
{
"epoch": 0.14,
"grad_norm": 0.037841796875,
"learning_rate": 1.9423069308642267e-05,
"loss": 0.8273,
"step": 209
},
{
"epoch": 0.14,
"grad_norm": 0.04052734375,
"learning_rate": 1.9415995086367858e-05,
"loss": 0.9275,
"step": 210
},
{
"epoch": 0.14,
"grad_norm": 0.036865234375,
"learning_rate": 1.940887906079786e-05,
"loss": 0.8938,
"step": 211
},
{
"epoch": 0.14,
"grad_norm": 0.033203125,
"learning_rate": 1.9401721263524616e-05,
"loss": 0.8414,
"step": 212
},
{
"epoch": 0.14,
"grad_norm": 0.037109375,
"learning_rate": 1.9394521726325907e-05,
"loss": 0.9055,
"step": 213
},
{
"epoch": 0.14,
"grad_norm": 0.042236328125,
"learning_rate": 1.938728048116484e-05,
"loss": 0.9002,
"step": 214
},
{
"epoch": 0.14,
"grad_norm": 0.034912109375,
"learning_rate": 1.9379997560189677e-05,
"loss": 0.8598,
"step": 215
},
{
"epoch": 0.14,
"grad_norm": 0.038330078125,
"learning_rate": 1.9372672995733706e-05,
"loss": 0.8557,
"step": 216
},
{
"epoch": 0.14,
"grad_norm": 0.036376953125,
"learning_rate": 1.9365306820315104e-05,
"loss": 0.9001,
"step": 217
},
{
"epoch": 0.14,
"grad_norm": 0.037109375,
"learning_rate": 1.9357899066636774e-05,
"loss": 0.842,
"step": 218
},
{
"epoch": 0.14,
"grad_norm": 0.036865234375,
"learning_rate": 1.935044976758621e-05,
"loss": 0.8759,
"step": 219
},
{
"epoch": 0.14,
"grad_norm": 0.03759765625,
"learning_rate": 1.9342958956235365e-05,
"loss": 0.8306,
"step": 220
},
{
"epoch": 0.14,
"grad_norm": 0.03759765625,
"learning_rate": 1.933542666584047e-05,
"loss": 0.8322,
"step": 221
},
{
"epoch": 0.14,
"grad_norm": 0.03515625,
"learning_rate": 1.9327852929841918e-05,
"loss": 0.8149,
"step": 222
},
{
"epoch": 0.14,
"grad_norm": 0.03955078125,
"learning_rate": 1.9320237781864106e-05,
"loss": 0.8458,
"step": 223
},
{
"epoch": 0.15,
"grad_norm": 0.03759765625,
"learning_rate": 1.9312581255715276e-05,
"loss": 0.84,
"step": 224
},
{
"epoch": 0.15,
"grad_norm": 0.038818359375,
"learning_rate": 1.9304883385387383e-05,
"loss": 0.8254,
"step": 225
},
{
"epoch": 0.15,
"grad_norm": 0.03466796875,
"learning_rate": 1.9297144205055925e-05,
"loss": 0.8898,
"step": 226
},
{
"epoch": 0.15,
"grad_norm": 0.037353515625,
"learning_rate": 1.9289363749079798e-05,
"loss": 0.8231,
"step": 227
},
{
"epoch": 0.15,
"grad_norm": 0.041015625,
"learning_rate": 1.928154205200116e-05,
"loss": 0.8764,
"step": 228
},
{
"epoch": 0.15,
"grad_norm": 0.037353515625,
"learning_rate": 1.9273679148545246e-05,
"loss": 0.8436,
"step": 229
},
{
"epoch": 0.15,
"grad_norm": 0.037841796875,
"learning_rate": 1.9265775073620244e-05,
"loss": 0.8622,
"step": 230
},
{
"epoch": 0.15,
"grad_norm": 0.036865234375,
"learning_rate": 1.9257829862317118e-05,
"loss": 0.8484,
"step": 231
},
{
"epoch": 0.15,
"grad_norm": 0.037841796875,
"learning_rate": 1.9249843549909467e-05,
"loss": 0.8765,
"step": 232
},
{
"epoch": 0.15,
"grad_norm": 0.03759765625,
"learning_rate": 1.9241816171853362e-05,
"loss": 0.8762,
"step": 233
},
{
"epoch": 0.15,
"grad_norm": 0.03955078125,
"learning_rate": 1.9233747763787187e-05,
"loss": 0.8716,
"step": 234
},
{
"epoch": 0.15,
"grad_norm": 0.04443359375,
"learning_rate": 1.9225638361531482e-05,
"loss": 0.8453,
"step": 235
},
{
"epoch": 0.15,
"grad_norm": 0.037109375,
"learning_rate": 1.9217488001088784e-05,
"loss": 0.7992,
"step": 236
},
{
"epoch": 0.15,
"grad_norm": 0.036865234375,
"learning_rate": 1.920929671864348e-05,
"loss": 0.9607,
"step": 237
},
{
"epoch": 0.15,
"grad_norm": 0.037841796875,
"learning_rate": 1.920106455056162e-05,
"loss": 0.8416,
"step": 238
},
{
"epoch": 0.16,
"grad_norm": 0.0380859375,
"learning_rate": 1.9192791533390778e-05,
"loss": 0.7983,
"step": 239
},
{
"epoch": 0.16,
"grad_norm": 0.040283203125,
"learning_rate": 1.9184477703859876e-05,
"loss": 0.8942,
"step": 240
},
{
"epoch": 0.16,
"grad_norm": 0.037841796875,
"learning_rate": 1.9176123098879035e-05,
"loss": 0.8849,
"step": 241
},
{
"epoch": 0.16,
"grad_norm": 0.037109375,
"learning_rate": 1.9167727755539393e-05,
"loss": 0.83,
"step": 242
},
{
"epoch": 0.16,
"grad_norm": 0.036865234375,
"learning_rate": 1.9159291711112962e-05,
"loss": 0.7999,
"step": 243
},
{
"epoch": 0.16,
"grad_norm": 0.035400390625,
"learning_rate": 1.9150815003052436e-05,
"loss": 0.8281,
"step": 244
},
{
"epoch": 0.16,
"grad_norm": 0.038818359375,
"learning_rate": 1.9142297668991053e-05,
"loss": 0.884,
"step": 245
},
{
"epoch": 0.16,
"grad_norm": 0.044189453125,
"learning_rate": 1.913373974674241e-05,
"loss": 0.8701,
"step": 246
},
{
"epoch": 0.16,
"grad_norm": 0.039306640625,
"learning_rate": 1.9125141274300293e-05,
"loss": 0.8734,
"step": 247
},
{
"epoch": 0.16,
"grad_norm": 0.03857421875,
"learning_rate": 1.9116502289838524e-05,
"loss": 0.8851,
"step": 248
},
{
"epoch": 0.16,
"grad_norm": 0.044189453125,
"learning_rate": 1.910782283171078e-05,
"loss": 0.9402,
"step": 249
},
{
"epoch": 0.16,
"grad_norm": 0.038818359375,
"learning_rate": 1.909910293845042e-05,
"loss": 0.831,
"step": 250
},
{
"epoch": 0.16,
"grad_norm": 0.038330078125,
"learning_rate": 1.909034264877032e-05,
"loss": 0.8093,
"step": 251
},
{
"epoch": 0.16,
"grad_norm": 0.039794921875,
"learning_rate": 1.9081542001562713e-05,
"loss": 0.9085,
"step": 252
},
{
"epoch": 0.16,
"grad_norm": 0.038330078125,
"learning_rate": 1.9072701035898985e-05,
"loss": 0.8466,
"step": 253
},
{
"epoch": 0.17,
"grad_norm": 0.0419921875,
"learning_rate": 1.906381979102953e-05,
"loss": 0.8938,
"step": 254
},
{
"epoch": 0.17,
"grad_norm": 0.0400390625,
"learning_rate": 1.9054898306383568e-05,
"loss": 0.8787,
"step": 255
},
{
"epoch": 0.17,
"grad_norm": 0.040283203125,
"learning_rate": 1.904593662156896e-05,
"loss": 0.882,
"step": 256
},
{
"epoch": 0.17,
"grad_norm": 0.03759765625,
"learning_rate": 1.903693477637204e-05,
"loss": 0.7803,
"step": 257
},
{
"epoch": 0.17,
"grad_norm": 0.037353515625,
"learning_rate": 1.902789281075745e-05,
"loss": 0.8078,
"step": 258
},
{
"epoch": 0.17,
"grad_norm": 0.03857421875,
"learning_rate": 1.9018810764867935e-05,
"loss": 0.8318,
"step": 259
},
{
"epoch": 0.17,
"grad_norm": 0.04150390625,
"learning_rate": 1.900968867902419e-05,
"loss": 0.8728,
"step": 260
},
{
"epoch": 0.17,
"grad_norm": 0.0390625,
"learning_rate": 1.9000526593724678e-05,
"loss": 0.836,
"step": 261
},
{
"epoch": 0.17,
"grad_norm": 0.0380859375,
"learning_rate": 1.8991324549645424e-05,
"loss": 0.9197,
"step": 262
},
{
"epoch": 0.17,
"grad_norm": 0.04150390625,
"learning_rate": 1.898208258763987e-05,
"loss": 0.7965,
"step": 263
},
{
"epoch": 0.17,
"grad_norm": 0.041015625,
"learning_rate": 1.897280074873868e-05,
"loss": 0.8078,
"step": 264
},
{
"epoch": 0.17,
"grad_norm": 0.042236328125,
"learning_rate": 1.8963479074149537e-05,
"loss": 0.9035,
"step": 265
},
{
"epoch": 0.17,
"grad_norm": 0.040771484375,
"learning_rate": 1.8954117605257e-05,
"loss": 0.8515,
"step": 266
},
{
"epoch": 0.17,
"grad_norm": 0.041015625,
"learning_rate": 1.8944716383622288e-05,
"loss": 0.8147,
"step": 267
},
{
"epoch": 0.17,
"grad_norm": 0.040771484375,
"learning_rate": 1.8935275450983102e-05,
"loss": 0.8121,
"step": 268
},
{
"epoch": 0.17,
"grad_norm": 0.04150390625,
"learning_rate": 1.8925794849253462e-05,
"loss": 0.858,
"step": 269
},
{
"epoch": 0.18,
"grad_norm": 0.044921875,
"learning_rate": 1.8916274620523482e-05,
"loss": 0.8502,
"step": 270
},
{
"epoch": 0.18,
"grad_norm": 0.04150390625,
"learning_rate": 1.8906714807059218e-05,
"loss": 0.8438,
"step": 271
},
{
"epoch": 0.18,
"grad_norm": 0.0390625,
"learning_rate": 1.889711545130246e-05,
"loss": 0.8464,
"step": 272
},
{
"epoch": 0.18,
"grad_norm": 0.05712890625,
"learning_rate": 1.8887476595870558e-05,
"loss": 0.8227,
"step": 273
},
{
"epoch": 0.18,
"grad_norm": 0.036865234375,
"learning_rate": 1.887779828355621e-05,
"loss": 0.8546,
"step": 274
},
{
"epoch": 0.18,
"grad_norm": 0.037109375,
"learning_rate": 1.8868080557327305e-05,
"loss": 0.8932,
"step": 275
},
{
"epoch": 0.18,
"grad_norm": 0.041748046875,
"learning_rate": 1.8858323460326704e-05,
"loss": 0.889,
"step": 276
},
{
"epoch": 0.18,
"grad_norm": 0.039306640625,
"learning_rate": 1.8848527035872057e-05,
"loss": 0.8174,
"step": 277
},
{
"epoch": 0.18,
"grad_norm": 0.038818359375,
"learning_rate": 1.883869132745561e-05,
"loss": 0.8183,
"step": 278
},
{
"epoch": 0.18,
"grad_norm": 0.0390625,
"learning_rate": 1.8828816378744035e-05,
"loss": 0.8924,
"step": 279
},
{
"epoch": 0.18,
"grad_norm": 0.038330078125,
"learning_rate": 1.8818902233578188e-05,
"loss": 0.7906,
"step": 280
},
{
"epoch": 0.18,
"grad_norm": 0.04248046875,
"learning_rate": 1.8808948935972965e-05,
"loss": 0.8118,
"step": 281
},
{
"epoch": 0.18,
"grad_norm": 0.03857421875,
"learning_rate": 1.8798956530117058e-05,
"loss": 0.8512,
"step": 282
},
{
"epoch": 0.18,
"grad_norm": 0.044677734375,
"learning_rate": 1.8788925060372806e-05,
"loss": 0.8224,
"step": 283
},
{
"epoch": 0.18,
"grad_norm": 0.047607421875,
"learning_rate": 1.8778854571275972e-05,
"loss": 0.8207,
"step": 284
},
{
"epoch": 0.19,
"grad_norm": 0.038330078125,
"learning_rate": 1.876874510753554e-05,
"loss": 0.8011,
"step": 285
},
{
"epoch": 0.19,
"grad_norm": 0.04296875,
"learning_rate": 1.875859671403354e-05,
"loss": 0.8132,
"step": 286
},
{
"epoch": 0.19,
"grad_norm": 0.042236328125,
"learning_rate": 1.874840943582482e-05,
"loss": 0.9056,
"step": 287
},
{
"epoch": 0.19,
"grad_norm": 0.0400390625,
"learning_rate": 1.8738183318136867e-05,
"loss": 0.8353,
"step": 288
},
{
"epoch": 0.19,
"grad_norm": 0.040771484375,
"learning_rate": 1.872791840636961e-05,
"loss": 0.7943,
"step": 289
},
{
"epoch": 0.19,
"grad_norm": 0.045654296875,
"learning_rate": 1.871761474609519e-05,
"loss": 0.8207,
"step": 290
},
{
"epoch": 0.19,
"grad_norm": 0.04345703125,
"learning_rate": 1.8707272383057785e-05,
"loss": 0.8415,
"step": 291
},
{
"epoch": 0.19,
"grad_norm": 0.04052734375,
"learning_rate": 1.8696891363173405e-05,
"loss": 0.797,
"step": 292
},
{
"epoch": 0.19,
"grad_norm": 0.046142578125,
"learning_rate": 1.8686471732529667e-05,
"loss": 0.8248,
"step": 293
},
{
"epoch": 0.19,
"grad_norm": 0.041259765625,
"learning_rate": 1.8676013537385614e-05,
"loss": 0.76,
"step": 294
},
{
"epoch": 0.19,
"grad_norm": 0.04150390625,
"learning_rate": 1.8665516824171497e-05,
"loss": 0.8362,
"step": 295
},
{
"epoch": 0.19,
"grad_norm": 0.040771484375,
"learning_rate": 1.865498163948858e-05,
"loss": 0.8093,
"step": 296
},
{
"epoch": 0.19,
"grad_norm": 0.0380859375,
"learning_rate": 1.864440803010891e-05,
"loss": 0.7735,
"step": 297
},
{
"epoch": 0.19,
"grad_norm": 0.041015625,
"learning_rate": 1.863379604297513e-05,
"loss": 0.8824,
"step": 298
},
{
"epoch": 0.19,
"grad_norm": 0.039794921875,
"learning_rate": 1.862314572520028e-05,
"loss": 0.8157,
"step": 299
},
{
"epoch": 0.2,
"grad_norm": 0.03857421875,
"learning_rate": 1.861245712406755e-05,
"loss": 0.8084,
"step": 300
},
{
"epoch": 0.2,
"grad_norm": 0.049072265625,
"learning_rate": 1.86017302870301e-05,
"loss": 0.7976,
"step": 301
},
{
"epoch": 0.2,
"grad_norm": 0.041259765625,
"learning_rate": 1.8590965261710856e-05,
"loss": 0.8406,
"step": 302
},
{
"epoch": 0.2,
"grad_norm": 0.041015625,
"learning_rate": 1.858016209590227e-05,
"loss": 0.8145,
"step": 303
},
{
"epoch": 0.2,
"grad_norm": 0.038818359375,
"learning_rate": 1.8569320837566128e-05,
"loss": 0.8142,
"step": 304
},
{
"epoch": 0.2,
"grad_norm": 0.04052734375,
"learning_rate": 1.8558441534833327e-05,
"loss": 0.8894,
"step": 305
},
{
"epoch": 0.2,
"grad_norm": 0.04296875,
"learning_rate": 1.8547524236003675e-05,
"loss": 0.8793,
"step": 306
},
{
"epoch": 0.2,
"grad_norm": 0.0380859375,
"learning_rate": 1.8536568989545662e-05,
"loss": 0.868,
"step": 307
},
{
"epoch": 0.2,
"grad_norm": 0.0419921875,
"learning_rate": 1.8525575844096243e-05,
"loss": 0.8572,
"step": 308
},
{
"epoch": 0.2,
"grad_norm": 0.04931640625,
"learning_rate": 1.8514544848460653e-05,
"loss": 0.7933,
"step": 309
},
{
"epoch": 0.2,
"grad_norm": 0.0390625,
"learning_rate": 1.8503476051612138e-05,
"loss": 0.8017,
"step": 310
},
{
"epoch": 0.2,
"grad_norm": 0.042724609375,
"learning_rate": 1.8492369502691785e-05,
"loss": 0.8317,
"step": 311
},
{
"epoch": 0.2,
"grad_norm": 0.04052734375,
"learning_rate": 1.8481225251008284e-05,
"loss": 0.8201,
"step": 312
},
{
"epoch": 0.2,
"grad_norm": 0.041748046875,
"learning_rate": 1.8470043346037698e-05,
"loss": 0.8258,
"step": 313
},
{
"epoch": 0.2,
"grad_norm": 0.0419921875,
"learning_rate": 1.8458823837423274e-05,
"loss": 0.8402,
"step": 314
},
{
"epoch": 0.2,
"grad_norm": 0.044921875,
"learning_rate": 1.8447566774975187e-05,
"loss": 0.9293,
"step": 315
},
{
"epoch": 0.21,
"grad_norm": 0.048583984375,
"learning_rate": 1.8436272208670346e-05,
"loss": 0.8716,
"step": 316
},
{
"epoch": 0.21,
"grad_norm": 0.0458984375,
"learning_rate": 1.842494018865216e-05,
"loss": 0.8868,
"step": 317
},
{
"epoch": 0.21,
"grad_norm": 0.047607421875,
"learning_rate": 1.841357076523032e-05,
"loss": 0.9027,
"step": 318
},
{
"epoch": 0.21,
"grad_norm": 0.044189453125,
"learning_rate": 1.840216398888057e-05,
"loss": 0.7936,
"step": 319
},
{
"epoch": 0.21,
"grad_norm": 0.0458984375,
"learning_rate": 1.8390719910244487e-05,
"loss": 0.8498,
"step": 320
},
{
"epoch": 0.21,
"grad_norm": 0.044677734375,
"learning_rate": 1.8379238580129256e-05,
"loss": 0.798,
"step": 321
},
{
"epoch": 0.21,
"grad_norm": 0.044677734375,
"learning_rate": 1.836772004950744e-05,
"loss": 0.8746,
"step": 322
},
{
"epoch": 0.21,
"grad_norm": 0.04541015625,
"learning_rate": 1.8356164369516772e-05,
"loss": 0.8658,
"step": 323
},
{
"epoch": 0.21,
"grad_norm": 0.0400390625,
"learning_rate": 1.834457159145989e-05,
"loss": 0.8299,
"step": 324
},
{
"epoch": 0.21,
"grad_norm": 0.0458984375,
"learning_rate": 1.8332941766804152e-05,
"loss": 0.8723,
"step": 325
},
{
"epoch": 0.21,
"grad_norm": 0.04150390625,
"learning_rate": 1.832127494718138e-05,
"loss": 0.8311,
"step": 326
},
{
"epoch": 0.21,
"grad_norm": 0.0439453125,
"learning_rate": 1.830957118438764e-05,
"loss": 0.8159,
"step": 327
},
{
"epoch": 0.21,
"grad_norm": 0.044921875,
"learning_rate": 1.829783053038301e-05,
"loss": 0.8351,
"step": 328
},
{
"epoch": 0.21,
"grad_norm": 0.0419921875,
"learning_rate": 1.8286053037291356e-05,
"loss": 0.7679,
"step": 329
},
{
"epoch": 0.21,
"grad_norm": 0.04345703125,
"learning_rate": 1.8274238757400096e-05,
"loss": 0.7848,
"step": 330
},
{
"epoch": 0.22,
"grad_norm": 0.04150390625,
"learning_rate": 1.826238774315995e-05,
"loss": 0.8741,
"step": 331
},
{
"epoch": 0.22,
"grad_norm": 0.042236328125,
"learning_rate": 1.8250500047184744e-05,
"loss": 0.8517,
"step": 332
},
{
"epoch": 0.22,
"grad_norm": 0.046875,
"learning_rate": 1.8238575722251144e-05,
"loss": 0.8602,
"step": 333
},
{
"epoch": 0.22,
"grad_norm": 0.041259765625,
"learning_rate": 1.8226614821298444e-05,
"loss": 0.8087,
"step": 334
},
{
"epoch": 0.22,
"grad_norm": 0.042724609375,
"learning_rate": 1.821461739742831e-05,
"loss": 0.8301,
"step": 335
},
{
"epoch": 0.22,
"grad_norm": 0.046875,
"learning_rate": 1.820258350390456e-05,
"loss": 0.8342,
"step": 336
},
{
"epoch": 0.22,
"grad_norm": 0.043701171875,
"learning_rate": 1.819051319415293e-05,
"loss": 0.8249,
"step": 337
},
{
"epoch": 0.22,
"grad_norm": 0.041748046875,
"learning_rate": 1.817840652176082e-05,
"loss": 0.7909,
"step": 338
},
{
"epoch": 0.22,
"grad_norm": 0.04248046875,
"learning_rate": 1.8166263540477068e-05,
"loss": 0.8071,
"step": 339
},
{
"epoch": 0.22,
"grad_norm": 0.043212890625,
"learning_rate": 1.815408430421171e-05,
"loss": 0.7983,
"step": 340
},
{
"epoch": 0.22,
"grad_norm": 0.041748046875,
"learning_rate": 1.8141868867035745e-05,
"loss": 0.7877,
"step": 341
},
{
"epoch": 0.22,
"grad_norm": 0.04443359375,
"learning_rate": 1.8129617283180878e-05,
"loss": 0.9056,
"step": 342
},
{
"epoch": 0.22,
"grad_norm": 0.043212890625,
"learning_rate": 1.81173296070393e-05,
"loss": 0.8708,
"step": 343
},
{
"epoch": 0.22,
"grad_norm": 0.04541015625,
"learning_rate": 1.8105005893163436e-05,
"loss": 0.8387,
"step": 344
},
{
"epoch": 0.22,
"grad_norm": 0.042724609375,
"learning_rate": 1.8092646196265705e-05,
"loss": 0.8578,
"step": 345
},
{
"epoch": 0.22,
"grad_norm": 0.042236328125,
"learning_rate": 1.808025057121827e-05,
"loss": 0.8642,
"step": 346
},
{
"epoch": 0.23,
"grad_norm": 0.048095703125,
"learning_rate": 1.8067819073052813e-05,
"loss": 0.8058,
"step": 347
},
{
"epoch": 0.23,
"grad_norm": 0.041259765625,
"learning_rate": 1.8055351756960262e-05,
"loss": 0.8128,
"step": 348
},
{
"epoch": 0.23,
"grad_norm": 0.04296875,
"learning_rate": 1.804284867829058e-05,
"loss": 0.8387,
"step": 349
},
{
"epoch": 0.23,
"grad_norm": 0.0458984375,
"learning_rate": 1.8030309892552488e-05,
"loss": 0.9106,
"step": 350
},
{
"epoch": 0.23,
"grad_norm": 0.041259765625,
"learning_rate": 1.801773545541324e-05,
"loss": 0.752,
"step": 351
},
{
"epoch": 0.23,
"grad_norm": 0.044189453125,
"learning_rate": 1.800512542269836e-05,
"loss": 0.881,
"step": 352
},
{
"epoch": 0.23,
"grad_norm": 0.045166015625,
"learning_rate": 1.7992479850391416e-05,
"loss": 0.8004,
"step": 353
},
{
"epoch": 0.23,
"grad_norm": 0.046875,
"learning_rate": 1.797979879463375e-05,
"loss": 0.8075,
"step": 354
},
{
"epoch": 0.23,
"grad_norm": 0.044677734375,
"learning_rate": 1.796708231172423e-05,
"loss": 0.8315,
"step": 355
},
{
"epoch": 0.23,
"grad_norm": 0.041259765625,
"learning_rate": 1.795433045811901e-05,
"loss": 0.8506,
"step": 356
},
{
"epoch": 0.23,
"grad_norm": 0.051025390625,
"learning_rate": 1.7941543290431286e-05,
"loss": 0.8314,
"step": 357
},
{
"epoch": 0.23,
"grad_norm": 0.045166015625,
"learning_rate": 1.792872086543103e-05,
"loss": 0.7697,
"step": 358
},
{
"epoch": 0.23,
"grad_norm": 0.04541015625,
"learning_rate": 1.7915863240044727e-05,
"loss": 0.9001,
"step": 359
},
{
"epoch": 0.23,
"grad_norm": 0.04443359375,
"learning_rate": 1.7902970471355162e-05,
"loss": 0.7685,
"step": 360
},
{
"epoch": 0.23,
"grad_norm": 0.0458984375,
"learning_rate": 1.7890042616601125e-05,
"loss": 0.8105,
"step": 361
},
{
"epoch": 0.24,
"grad_norm": 0.044677734375,
"learning_rate": 1.7877079733177185e-05,
"loss": 0.9061,
"step": 362
},
{
"epoch": 0.24,
"grad_norm": 0.043212890625,
"learning_rate": 1.7864081878633414e-05,
"loss": 0.813,
"step": 363
},
{
"epoch": 0.24,
"grad_norm": 0.04345703125,
"learning_rate": 1.785104911067515e-05,
"loss": 0.8197,
"step": 364
},
{
"epoch": 0.24,
"grad_norm": 0.044189453125,
"learning_rate": 1.783798148716273e-05,
"loss": 0.894,
"step": 365
},
{
"epoch": 0.24,
"grad_norm": 0.04296875,
"learning_rate": 1.782487906611124e-05,
"loss": 0.7809,
"step": 366
},
{
"epoch": 0.24,
"grad_norm": 0.04296875,
"learning_rate": 1.781174190569024e-05,
"loss": 0.8428,
"step": 367
},
{
"epoch": 0.24,
"grad_norm": 0.049072265625,
"learning_rate": 1.7798570064223536e-05,
"loss": 0.8276,
"step": 368
},
{
"epoch": 0.24,
"grad_norm": 0.04541015625,
"learning_rate": 1.7785363600188894e-05,
"loss": 0.7937,
"step": 369
},
{
"epoch": 0.24,
"grad_norm": 0.03955078125,
"learning_rate": 1.7772122572217796e-05,
"loss": 0.7835,
"step": 370
},
{
"epoch": 0.24,
"grad_norm": 0.04833984375,
"learning_rate": 1.7758847039095167e-05,
"loss": 0.8456,
"step": 371
},
{
"epoch": 0.24,
"grad_norm": 0.0439453125,
"learning_rate": 1.774553705975913e-05,
"loss": 0.8483,
"step": 372
},
{
"epoch": 0.24,
"grad_norm": 0.041259765625,
"learning_rate": 1.773219269330073e-05,
"loss": 0.7902,
"step": 373
},
{
"epoch": 0.24,
"grad_norm": 0.04296875,
"learning_rate": 1.7718813998963678e-05,
"loss": 0.8734,
"step": 374
},
{
"epoch": 0.24,
"grad_norm": 0.044921875,
"learning_rate": 1.7705401036144086e-05,
"loss": 0.8646,
"step": 375
},
{
"epoch": 0.24,
"grad_norm": 0.04345703125,
"learning_rate": 1.7691953864390208e-05,
"loss": 0.8005,
"step": 376
},
{
"epoch": 0.25,
"grad_norm": 0.0419921875,
"learning_rate": 1.7678472543402166e-05,
"loss": 0.8701,
"step": 377
},
{
"epoch": 0.25,
"grad_norm": 0.045166015625,
"learning_rate": 1.7664957133031705e-05,
"loss": 0.8099,
"step": 378
},
{
"epoch": 0.25,
"grad_norm": 0.057373046875,
"learning_rate": 1.7651407693281896e-05,
"loss": 0.8524,
"step": 379
},
{
"epoch": 0.25,
"grad_norm": 0.05224609375,
"learning_rate": 1.7637824284306898e-05,
"loss": 0.8456,
"step": 380
},
{
"epoch": 0.25,
"grad_norm": 0.05078125,
"learning_rate": 1.762420696641167e-05,
"loss": 0.7977,
"step": 381
},
{
"epoch": 0.25,
"grad_norm": 0.044189453125,
"learning_rate": 1.7610555800051727e-05,
"loss": 0.7834,
"step": 382
},
{
"epoch": 0.25,
"grad_norm": 0.045166015625,
"learning_rate": 1.759687084583285e-05,
"loss": 0.7946,
"step": 383
},
{
"epoch": 0.25,
"grad_norm": 0.04443359375,
"learning_rate": 1.7583152164510827e-05,
"loss": 0.7456,
"step": 384
},
{
"epoch": 0.25,
"grad_norm": 0.044677734375,
"learning_rate": 1.7569399816991174e-05,
"loss": 0.8358,
"step": 385
},
{
"epoch": 0.25,
"grad_norm": 0.047119140625,
"learning_rate": 1.7555613864328876e-05,
"loss": 0.7976,
"step": 386
},
{
"epoch": 0.25,
"grad_norm": 0.09423828125,
"learning_rate": 1.754179436772812e-05,
"loss": 0.9486,
"step": 387
},
{
"epoch": 0.25,
"grad_norm": 0.046142578125,
"learning_rate": 1.7527941388542006e-05,
"loss": 0.7898,
"step": 388
},
{
"epoch": 0.25,
"grad_norm": 0.04150390625,
"learning_rate": 1.751405498827228e-05,
"loss": 0.7644,
"step": 389
},
{
"epoch": 0.25,
"grad_norm": 0.04931640625,
"learning_rate": 1.7500135228569067e-05,
"loss": 0.8363,
"step": 390
},
{
"epoch": 0.25,
"grad_norm": 0.047119140625,
"learning_rate": 1.748618217123061e-05,
"loss": 0.801,
"step": 391
},
{
"epoch": 0.25,
"grad_norm": 0.044677734375,
"learning_rate": 1.7472195878202955e-05,
"loss": 0.8487,
"step": 392
},
{
"epoch": 0.26,
"grad_norm": 0.046142578125,
"learning_rate": 1.7458176411579715e-05,
"loss": 0.8884,
"step": 393
},
{
"epoch": 0.26,
"grad_norm": 0.0439453125,
"learning_rate": 1.7444123833601784e-05,
"loss": 0.8484,
"step": 394
},
{
"epoch": 0.26,
"grad_norm": 0.043701171875,
"learning_rate": 1.743003820665705e-05,
"loss": 0.8325,
"step": 395
},
{
"epoch": 0.26,
"grad_norm": 0.048095703125,
"learning_rate": 1.741591959328013e-05,
"loss": 0.8061,
"step": 396
},
{
"epoch": 0.26,
"grad_norm": 0.047607421875,
"learning_rate": 1.7401768056152083e-05,
"loss": 0.7888,
"step": 397
},
{
"epoch": 0.26,
"grad_norm": 0.047119140625,
"learning_rate": 1.7387583658100144e-05,
"loss": 0.8564,
"step": 398
},
{
"epoch": 0.26,
"grad_norm": 0.044677734375,
"learning_rate": 1.737336646209742e-05,
"loss": 0.8412,
"step": 399
},
{
"epoch": 0.26,
"grad_norm": 0.0439453125,
"learning_rate": 1.7359116531262654e-05,
"loss": 0.9182,
"step": 400
},
{
"epoch": 0.26,
"grad_norm": 0.047119140625,
"learning_rate": 1.73448339288599e-05,
"loss": 0.8653,
"step": 401
},
{
"epoch": 0.26,
"grad_norm": 0.0478515625,
"learning_rate": 1.7330518718298263e-05,
"loss": 0.8174,
"step": 402
},
{
"epoch": 0.26,
"grad_norm": 0.05859375,
"learning_rate": 1.7316170963131627e-05,
"loss": 0.8621,
"step": 403
},
{
"epoch": 0.26,
"grad_norm": 0.047119140625,
"learning_rate": 1.7301790727058344e-05,
"loss": 0.7991,
"step": 404
},
{
"epoch": 0.26,
"grad_norm": 0.046142578125,
"learning_rate": 1.728737807392098e-05,
"loss": 0.8706,
"step": 405
},
{
"epoch": 0.26,
"grad_norm": 0.046142578125,
"learning_rate": 1.727293306770602e-05,
"loss": 0.824,
"step": 406
},
{
"epoch": 0.26,
"grad_norm": 0.0498046875,
"learning_rate": 1.7258455772543573e-05,
"loss": 0.9865,
"step": 407
},
{
"epoch": 0.27,
"grad_norm": 0.0537109375,
"learning_rate": 1.7243946252707115e-05,
"loss": 0.844,
"step": 408
},
{
"epoch": 0.27,
"grad_norm": 0.04052734375,
"learning_rate": 1.7229404572613174e-05,
"loss": 0.7566,
"step": 409
},
{
"epoch": 0.27,
"grad_norm": 0.0439453125,
"learning_rate": 1.721483079682106e-05,
"loss": 0.8393,
"step": 410
},
{
"epoch": 0.27,
"grad_norm": 0.0458984375,
"learning_rate": 1.7200224990032577e-05,
"loss": 0.7992,
"step": 411
},
{
"epoch": 0.27,
"grad_norm": 0.04443359375,
"learning_rate": 1.7185587217091727e-05,
"loss": 0.8862,
"step": 412
},
{
"epoch": 0.27,
"grad_norm": 0.046630859375,
"learning_rate": 1.7170917542984445e-05,
"loss": 0.8859,
"step": 413
},
{
"epoch": 0.27,
"grad_norm": 0.050537109375,
"learning_rate": 1.7156216032838275e-05,
"loss": 0.8738,
"step": 414
},
{
"epoch": 0.27,
"grad_norm": 0.047119140625,
"learning_rate": 1.7141482751922117e-05,
"loss": 0.8702,
"step": 415
},
{
"epoch": 0.27,
"grad_norm": 0.044677734375,
"learning_rate": 1.7126717765645908e-05,
"loss": 0.8496,
"step": 416
},
{
"epoch": 0.27,
"grad_norm": 0.047607421875,
"learning_rate": 1.7111921139560356e-05,
"loss": 0.8402,
"step": 417
},
{
"epoch": 0.27,
"grad_norm": 0.044677734375,
"learning_rate": 1.7097092939356622e-05,
"loss": 0.8719,
"step": 418
},
{
"epoch": 0.27,
"grad_norm": 0.05126953125,
"learning_rate": 1.7082233230866064e-05,
"loss": 0.865,
"step": 419
},
{
"epoch": 0.27,
"grad_norm": 0.0517578125,
"learning_rate": 1.7067342080059904e-05,
"loss": 0.8876,
"step": 420
},
{
"epoch": 0.27,
"grad_norm": 0.0478515625,
"learning_rate": 1.7052419553048965e-05,
"loss": 0.8594,
"step": 421
},
{
"epoch": 0.27,
"grad_norm": 0.041015625,
"learning_rate": 1.703746571608337e-05,
"loss": 0.7774,
"step": 422
},
{
"epoch": 0.28,
"grad_norm": 0.04638671875,
"learning_rate": 1.7022480635552243e-05,
"loss": 0.8357,
"step": 423
},
{
"epoch": 0.28,
"grad_norm": 0.044189453125,
"learning_rate": 1.700746437798342e-05,
"loss": 0.8365,
"step": 424
},
{
"epoch": 0.28,
"grad_norm": 0.0517578125,
"learning_rate": 1.6992417010043144e-05,
"loss": 0.7916,
"step": 425
},
{
"epoch": 0.28,
"grad_norm": 0.0439453125,
"learning_rate": 1.6977338598535776e-05,
"loss": 0.886,
"step": 426
},
{
"epoch": 0.28,
"grad_norm": 0.04638671875,
"learning_rate": 1.696222921040351e-05,
"loss": 0.8391,
"step": 427
},
{
"epoch": 0.28,
"grad_norm": 0.045166015625,
"learning_rate": 1.6947088912726054e-05,
"loss": 0.8403,
"step": 428
},
{
"epoch": 0.28,
"grad_norm": 0.046875,
"learning_rate": 1.693191777272034e-05,
"loss": 0.8048,
"step": 429
},
{
"epoch": 0.28,
"grad_norm": 0.049072265625,
"learning_rate": 1.6916715857740234e-05,
"loss": 0.7742,
"step": 430
},
{
"epoch": 0.28,
"grad_norm": 0.045166015625,
"learning_rate": 1.690148323527623e-05,
"loss": 0.7859,
"step": 431
},
{
"epoch": 0.28,
"grad_norm": 0.045166015625,
"learning_rate": 1.688621997295515e-05,
"loss": 0.7956,
"step": 432
},
{
"epoch": 0.28,
"grad_norm": 0.04443359375,
"learning_rate": 1.6870926138539837e-05,
"loss": 0.8672,
"step": 433
},
{
"epoch": 0.28,
"grad_norm": 0.060546875,
"learning_rate": 1.6855601799928877e-05,
"loss": 0.848,
"step": 434
},
{
"epoch": 0.28,
"grad_norm": 0.046875,
"learning_rate": 1.6840247025156272e-05,
"loss": 0.8125,
"step": 435
},
{
"epoch": 0.28,
"grad_norm": 0.052001953125,
"learning_rate": 1.6824861882391154e-05,
"loss": 0.8359,
"step": 436
},
{
"epoch": 0.28,
"grad_norm": 0.048583984375,
"learning_rate": 1.6809446439937472e-05,
"loss": 0.877,
"step": 437
},
{
"epoch": 0.28,
"grad_norm": 0.04833984375,
"learning_rate": 1.6794000766233697e-05,
"loss": 0.8408,
"step": 438
},
{
"epoch": 0.29,
"grad_norm": 0.04833984375,
"learning_rate": 1.6778524929852513e-05,
"loss": 0.8381,
"step": 439
},
{
"epoch": 0.29,
"grad_norm": 0.052490234375,
"learning_rate": 1.676301899950052e-05,
"loss": 0.782,
"step": 440
},
{
"epoch": 0.29,
"grad_norm": 0.04345703125,
"learning_rate": 1.674748304401791e-05,
"loss": 0.8621,
"step": 441
},
{
"epoch": 0.29,
"grad_norm": 0.04345703125,
"learning_rate": 1.673191713237819e-05,
"loss": 0.8012,
"step": 442
},
{
"epoch": 0.29,
"grad_norm": 0.05322265625,
"learning_rate": 1.671632133368785e-05,
"loss": 0.8245,
"step": 443
},
{
"epoch": 0.29,
"grad_norm": 0.0478515625,
"learning_rate": 1.670069571718607e-05,
"loss": 0.7882,
"step": 444
},
{
"epoch": 0.29,
"grad_norm": 0.045166015625,
"learning_rate": 1.6685040352244414e-05,
"loss": 0.8387,
"step": 445
},
{
"epoch": 0.29,
"grad_norm": 0.055419921875,
"learning_rate": 1.666935530836651e-05,
"loss": 0.7766,
"step": 446
},
{
"epoch": 0.29,
"grad_norm": 0.04931640625,
"learning_rate": 1.665364065518775e-05,
"loss": 0.8204,
"step": 447
},
{
"epoch": 0.29,
"grad_norm": 0.046630859375,
"learning_rate": 1.6637896462474986e-05,
"loss": 0.8133,
"step": 448
},
{
"epoch": 0.29,
"grad_norm": 0.048828125,
"learning_rate": 1.662212280012621e-05,
"loss": 0.85,
"step": 449
},
{
"epoch": 0.29,
"grad_norm": 0.045654296875,
"learning_rate": 1.660631973817024e-05,
"loss": 0.8247,
"step": 450
},
{
"epoch": 0.29,
"grad_norm": 0.048583984375,
"learning_rate": 1.6590487346766426e-05,
"loss": 0.8977,
"step": 451
},
{
"epoch": 0.29,
"grad_norm": 0.05712890625,
"learning_rate": 1.657462569620433e-05,
"loss": 0.8456,
"step": 452
},
{
"epoch": 0.29,
"grad_norm": 0.048583984375,
"learning_rate": 1.6558734856903406e-05,
"loss": 0.8369,
"step": 453
},
{
"epoch": 0.3,
"grad_norm": 0.04638671875,
"learning_rate": 1.6542814899412694e-05,
"loss": 0.8055,
"step": 454
},
{
"epoch": 0.3,
"grad_norm": 0.04443359375,
"learning_rate": 1.6526865894410526e-05,
"loss": 0.8358,
"step": 455
},
{
"epoch": 0.3,
"grad_norm": 0.046630859375,
"learning_rate": 1.651088791270416e-05,
"loss": 0.8094,
"step": 456
},
{
"epoch": 0.3,
"grad_norm": 0.04833984375,
"learning_rate": 1.6494881025229535e-05,
"loss": 0.8518,
"step": 457
},
{
"epoch": 0.3,
"grad_norm": 0.048583984375,
"learning_rate": 1.647884530305089e-05,
"loss": 0.9644,
"step": 458
},
{
"epoch": 0.3,
"grad_norm": 0.046630859375,
"learning_rate": 1.6462780817360502e-05,
"loss": 0.8415,
"step": 459
},
{
"epoch": 0.3,
"grad_norm": 0.050537109375,
"learning_rate": 1.644668763947833e-05,
"loss": 0.8764,
"step": 460
},
{
"epoch": 0.3,
"grad_norm": 0.044921875,
"learning_rate": 1.6430565840851723e-05,
"loss": 0.7737,
"step": 461
},
{
"epoch": 0.3,
"grad_norm": 0.047607421875,
"learning_rate": 1.641441549305509e-05,
"loss": 0.7559,
"step": 462
},
{
"epoch": 0.3,
"grad_norm": 0.044677734375,
"learning_rate": 1.6398236667789595e-05,
"loss": 0.7893,
"step": 463
},
{
"epoch": 0.3,
"grad_norm": 0.045166015625,
"learning_rate": 1.6382029436882826e-05,
"loss": 0.8285,
"step": 464
},
{
"epoch": 0.3,
"grad_norm": 0.05419921875,
"learning_rate": 1.636579387228848e-05,
"loss": 0.9,
"step": 465
},
{
"epoch": 0.3,
"grad_norm": 0.052978515625,
"learning_rate": 1.634953004608604e-05,
"loss": 0.9457,
"step": 466
},
{
"epoch": 0.3,
"grad_norm": 0.044921875,
"learning_rate": 1.6333238030480473e-05,
"loss": 0.8015,
"step": 467
},
{
"epoch": 0.3,
"grad_norm": 0.056884765625,
"learning_rate": 1.631691789780188e-05,
"loss": 0.8978,
"step": 468
},
{
"epoch": 0.3,
"grad_norm": 0.04443359375,
"learning_rate": 1.6300569720505198e-05,
"loss": 0.8787,
"step": 469
},
{
"epoch": 0.31,
"grad_norm": 0.047119140625,
"learning_rate": 1.6284193571169878e-05,
"loss": 0.8,
"step": 470
},
{
"epoch": 0.31,
"grad_norm": 0.0498046875,
"learning_rate": 1.6267789522499545e-05,
"loss": 0.7745,
"step": 471
},
{
"epoch": 0.31,
"grad_norm": 0.051025390625,
"learning_rate": 1.6251357647321685e-05,
"loss": 0.8191,
"step": 472
},
{
"epoch": 0.31,
"grad_norm": 0.046142578125,
"learning_rate": 1.6234898018587336e-05,
"loss": 0.7503,
"step": 473
},
{
"epoch": 0.31,
"grad_norm": 0.048095703125,
"learning_rate": 1.6218410709370735e-05,
"loss": 0.8587,
"step": 474
},
{
"epoch": 0.31,
"grad_norm": 0.05029296875,
"learning_rate": 1.6201895792869023e-05,
"loss": 0.8287,
"step": 475
},
{
"epoch": 0.31,
"grad_norm": 0.0517578125,
"learning_rate": 1.6185353342401896e-05,
"loss": 0.8017,
"step": 476
},
{
"epoch": 0.31,
"grad_norm": 0.04833984375,
"learning_rate": 1.6168783431411295e-05,
"loss": 0.8638,
"step": 477
},
{
"epoch": 0.31,
"grad_norm": 0.0498046875,
"learning_rate": 1.6152186133461075e-05,
"loss": 0.8111,
"step": 478
},
{
"epoch": 0.31,
"grad_norm": 0.0458984375,
"learning_rate": 1.6135561522236675e-05,
"loss": 0.7919,
"step": 479
},
{
"epoch": 0.31,
"grad_norm": 0.05078125,
"learning_rate": 1.6118909671544797e-05,
"loss": 0.8731,
"step": 480
},
{
"epoch": 0.31,
"grad_norm": 0.049072265625,
"learning_rate": 1.6102230655313076e-05,
"loss": 0.9006,
"step": 481
},
{
"epoch": 0.31,
"grad_norm": 0.044921875,
"learning_rate": 1.6085524547589747e-05,
"loss": 0.7621,
"step": 482
},
{
"epoch": 0.31,
"grad_norm": 0.050537109375,
"learning_rate": 1.6068791422543327e-05,
"loss": 0.9065,
"step": 483
},
{
"epoch": 0.31,
"grad_norm": 0.051025390625,
"learning_rate": 1.6052031354462275e-05,
"loss": 0.8989,
"step": 484
},
{
"epoch": 0.32,
"grad_norm": 0.047119140625,
"learning_rate": 1.6035244417754666e-05,
"loss": 0.82,
"step": 485
},
{
"epoch": 0.32,
"grad_norm": 0.048095703125,
"learning_rate": 1.6018430686947865e-05,
"loss": 0.8926,
"step": 486
},
{
"epoch": 0.32,
"grad_norm": 0.047607421875,
"learning_rate": 1.6001590236688187e-05,
"loss": 0.8628,
"step": 487
},
{
"epoch": 0.32,
"grad_norm": 0.0517578125,
"learning_rate": 1.5984723141740578e-05,
"loss": 0.8583,
"step": 488
},
{
"epoch": 0.32,
"grad_norm": 0.048095703125,
"learning_rate": 1.596782947698826e-05,
"loss": 0.8314,
"step": 489
},
{
"epoch": 0.32,
"grad_norm": 0.058837890625,
"learning_rate": 1.5950909317432436e-05,
"loss": 0.8222,
"step": 490
},
{
"epoch": 0.32,
"grad_norm": 0.0458984375,
"learning_rate": 1.593396273819192e-05,
"loss": 0.8258,
"step": 491
},
{
"epoch": 0.32,
"grad_norm": 0.048583984375,
"learning_rate": 1.591698981450283e-05,
"loss": 0.8246,
"step": 492
},
{
"epoch": 0.32,
"grad_norm": 0.046142578125,
"learning_rate": 1.5899990621718232e-05,
"loss": 0.868,
"step": 493
},
{
"epoch": 0.32,
"grad_norm": 0.046630859375,
"learning_rate": 1.588296523530782e-05,
"loss": 0.835,
"step": 494
},
{
"epoch": 0.32,
"grad_norm": 0.04736328125,
"learning_rate": 1.5865913730857583e-05,
"loss": 0.8298,
"step": 495
},
{
"epoch": 0.32,
"grad_norm": 0.04833984375,
"learning_rate": 1.584883618406946e-05,
"loss": 0.8405,
"step": 496
},
{
"epoch": 0.32,
"grad_norm": 0.051025390625,
"learning_rate": 1.5831732670761e-05,
"loss": 0.8951,
"step": 497
},
{
"epoch": 0.32,
"grad_norm": 0.044921875,
"learning_rate": 1.5814603266865046e-05,
"loss": 0.8396,
"step": 498
},
{
"epoch": 0.32,
"grad_norm": 0.04736328125,
"learning_rate": 1.5797448048429377e-05,
"loss": 0.8433,
"step": 499
},
{
"epoch": 0.33,
"grad_norm": 0.046142578125,
"learning_rate": 1.5780267091616383e-05,
"loss": 0.869,
"step": 500
},
{
"epoch": 0.33,
"grad_norm": 0.048095703125,
"learning_rate": 1.576306047270272e-05,
"loss": 0.8454,
"step": 501
},
{
"epoch": 0.33,
"grad_norm": 0.0478515625,
"learning_rate": 1.574582826807897e-05,
"loss": 0.7873,
"step": 502
},
{
"epoch": 0.33,
"grad_norm": 0.056884765625,
"learning_rate": 1.5728570554249312e-05,
"loss": 0.9116,
"step": 503
},
{
"epoch": 0.33,
"grad_norm": 0.0546875,
"learning_rate": 1.571128740783117e-05,
"loss": 0.8564,
"step": 504
},
{
"epoch": 0.33,
"grad_norm": 0.04833984375,
"learning_rate": 1.5693978905554886e-05,
"loss": 0.8302,
"step": 505
},
{
"epoch": 0.33,
"grad_norm": 0.050537109375,
"learning_rate": 1.567664512426336e-05,
"loss": 0.8041,
"step": 506
},
{
"epoch": 0.33,
"grad_norm": 0.0537109375,
"learning_rate": 1.5659286140911733e-05,
"loss": 0.8421,
"step": 507
},
{
"epoch": 0.33,
"grad_norm": 0.05859375,
"learning_rate": 1.5641902032567023e-05,
"loss": 0.9291,
"step": 508
},
{
"epoch": 0.33,
"grad_norm": 0.05712890625,
"learning_rate": 1.562449287640781e-05,
"loss": 0.8886,
"step": 509
},
{
"epoch": 0.33,
"grad_norm": 0.052001953125,
"learning_rate": 1.560705874972385e-05,
"loss": 0.8472,
"step": 510
},
{
"epoch": 0.33,
"grad_norm": 0.0517578125,
"learning_rate": 1.5589599729915783e-05,
"loss": 0.8108,
"step": 511
},
{
"epoch": 0.33,
"grad_norm": 0.049072265625,
"learning_rate": 1.5572115894494752e-05,
"loss": 0.868,
"step": 512
},
{
"epoch": 0.33,
"grad_norm": 0.04833984375,
"learning_rate": 1.5554607321082077e-05,
"loss": 0.8719,
"step": 513
},
{
"epoch": 0.33,
"grad_norm": 0.0517578125,
"learning_rate": 1.5537074087408894e-05,
"loss": 0.8289,
"step": 514
},
{
"epoch": 0.33,
"grad_norm": 0.05615234375,
"learning_rate": 1.5519516271315834e-05,
"loss": 0.8315,
"step": 515
},
{
"epoch": 0.34,
"grad_norm": 0.049072265625,
"learning_rate": 1.5501933950752655e-05,
"loss": 0.7429,
"step": 516
},
{
"epoch": 0.34,
"grad_norm": 0.0556640625,
"learning_rate": 1.5484327203777917e-05,
"loss": 0.7782,
"step": 517
},
{
"epoch": 0.34,
"grad_norm": 0.044189453125,
"learning_rate": 1.5466696108558614e-05,
"loss": 0.8454,
"step": 518
},
{
"epoch": 0.34,
"grad_norm": 0.050537109375,
"learning_rate": 1.544904074336983e-05,
"loss": 0.7808,
"step": 519
},
{
"epoch": 0.34,
"grad_norm": 0.04736328125,
"learning_rate": 1.5431361186594415e-05,
"loss": 0.7698,
"step": 520
},
{
"epoch": 0.34,
"grad_norm": 0.044921875,
"learning_rate": 1.5413657516722607e-05,
"loss": 0.8511,
"step": 521
},
{
"epoch": 0.34,
"grad_norm": 0.0498046875,
"learning_rate": 1.53959298123517e-05,
"loss": 0.8284,
"step": 522
},
{
"epoch": 0.34,
"grad_norm": 0.054443359375,
"learning_rate": 1.5378178152185703e-05,
"loss": 0.852,
"step": 523
},
{
"epoch": 0.34,
"grad_norm": 0.05126953125,
"learning_rate": 1.5360402615034958e-05,
"loss": 0.8806,
"step": 524
},
{
"epoch": 0.34,
"grad_norm": 0.05224609375,
"learning_rate": 1.5342603279815826e-05,
"loss": 0.778,
"step": 525
},
{
"epoch": 0.34,
"grad_norm": 0.05126953125,
"learning_rate": 1.5324780225550316e-05,
"loss": 0.8209,
"step": 526
},
{
"epoch": 0.34,
"grad_norm": 0.049072265625,
"learning_rate": 1.5306933531365748e-05,
"loss": 0.8506,
"step": 527
},
{
"epoch": 0.34,
"grad_norm": 0.049072265625,
"learning_rate": 1.5289063276494384e-05,
"loss": 0.8609,
"step": 528
},
{
"epoch": 0.34,
"grad_norm": 0.051025390625,
"learning_rate": 1.5271169540273093e-05,
"loss": 0.848,
"step": 529
},
{
"epoch": 0.34,
"grad_norm": 0.049560546875,
"learning_rate": 1.5253252402142989e-05,
"loss": 0.7898,
"step": 530
},
{
"epoch": 0.35,
"grad_norm": 0.049560546875,
"learning_rate": 1.5235311941649085e-05,
"loss": 0.845,
"step": 531
},
{
"epoch": 0.35,
"grad_norm": 0.052490234375,
"learning_rate": 1.5217348238439922e-05,
"loss": 0.8216,
"step": 532
},
{
"epoch": 0.35,
"grad_norm": 0.050048828125,
"learning_rate": 1.5199361372267252e-05,
"loss": 0.8517,
"step": 533
},
{
"epoch": 0.35,
"grad_norm": 0.05029296875,
"learning_rate": 1.5181351422985646e-05,
"loss": 0.8148,
"step": 534
},
{
"epoch": 0.35,
"grad_norm": 0.050537109375,
"learning_rate": 1.516331847055216e-05,
"loss": 0.8649,
"step": 535
},
{
"epoch": 0.35,
"grad_norm": 0.046142578125,
"learning_rate": 1.514526259502597e-05,
"loss": 0.8321,
"step": 536
},
{
"epoch": 0.35,
"grad_norm": 0.048583984375,
"learning_rate": 1.5127183876568024e-05,
"loss": 0.8175,
"step": 537
},
{
"epoch": 0.35,
"grad_norm": 0.04931640625,
"learning_rate": 1.5109082395440689e-05,
"loss": 0.7704,
"step": 538
},
{
"epoch": 0.35,
"grad_norm": 0.0595703125,
"learning_rate": 1.5090958232007383e-05,
"loss": 0.9095,
"step": 539
},
{
"epoch": 0.35,
"grad_norm": 0.049560546875,
"learning_rate": 1.507281146673223e-05,
"loss": 0.8498,
"step": 540
},
{
"epoch": 0.35,
"grad_norm": 0.04833984375,
"learning_rate": 1.5054642180179684e-05,
"loss": 0.8709,
"step": 541
},
{
"epoch": 0.35,
"grad_norm": 0.050537109375,
"learning_rate": 1.5036450453014202e-05,
"loss": 0.8439,
"step": 542
},
{
"epoch": 0.35,
"grad_norm": 0.048583984375,
"learning_rate": 1.5018236365999862e-05,
"loss": 0.9223,
"step": 543
},
{
"epoch": 0.35,
"grad_norm": 0.045654296875,
"learning_rate": 1.5000000000000002e-05,
"loss": 0.8336,
"step": 544
},
{
"epoch": 0.35,
"grad_norm": 0.049072265625,
"learning_rate": 1.4981741435976882e-05,
"loss": 0.8523,
"step": 545
},
{
"epoch": 0.36,
"grad_norm": 0.048583984375,
"learning_rate": 1.4963460754991309e-05,
"loss": 0.8496,
"step": 546
},
{
"epoch": 0.36,
"grad_norm": 0.0478515625,
"learning_rate": 1.4945158038202274e-05,
"loss": 0.8392,
"step": 547
},
{
"epoch": 0.36,
"grad_norm": 0.050048828125,
"learning_rate": 1.4926833366866611e-05,
"loss": 0.7421,
"step": 548
},
{
"epoch": 0.36,
"grad_norm": 0.052734375,
"learning_rate": 1.4908486822338611e-05,
"loss": 0.8284,
"step": 549
},
{
"epoch": 0.36,
"grad_norm": 0.048583984375,
"learning_rate": 1.489011848606968e-05,
"loss": 0.8383,
"step": 550
},
{
"epoch": 0.36,
"grad_norm": 0.0517578125,
"learning_rate": 1.4871728439607967e-05,
"loss": 0.8099,
"step": 551
},
{
"epoch": 0.36,
"grad_norm": 0.047607421875,
"learning_rate": 1.4853316764598011e-05,
"loss": 0.9332,
"step": 552
},
{
"epoch": 0.36,
"grad_norm": 0.046875,
"learning_rate": 1.4834883542780367e-05,
"loss": 0.8748,
"step": 553
},
{
"epoch": 0.36,
"grad_norm": 0.050048828125,
"learning_rate": 1.4816428855991257e-05,
"loss": 0.7789,
"step": 554
},
{
"epoch": 0.36,
"grad_norm": 0.051025390625,
"learning_rate": 1.4797952786162188e-05,
"loss": 0.8209,
"step": 555
},
{
"epoch": 0.36,
"grad_norm": 0.051025390625,
"learning_rate": 1.4779455415319612e-05,
"loss": 0.8381,
"step": 556
},
{
"epoch": 0.36,
"grad_norm": 0.05029296875,
"learning_rate": 1.4760936825584535e-05,
"loss": 0.7684,
"step": 557
},
{
"epoch": 0.36,
"grad_norm": 0.048828125,
"learning_rate": 1.4742397099172183e-05,
"loss": 0.8275,
"step": 558
},
{
"epoch": 0.36,
"grad_norm": 0.05224609375,
"learning_rate": 1.4723836318391607e-05,
"loss": 0.9173,
"step": 559
},
{
"epoch": 0.36,
"grad_norm": 0.05322265625,
"learning_rate": 1.4705254565645335e-05,
"loss": 0.7534,
"step": 560
},
{
"epoch": 0.36,
"grad_norm": 0.052001953125,
"learning_rate": 1.4686651923429002e-05,
"loss": 0.7822,
"step": 561
},
{
"epoch": 0.37,
"grad_norm": 0.04931640625,
"learning_rate": 1.4668028474330989e-05,
"loss": 0.8575,
"step": 562
},
{
"epoch": 0.37,
"grad_norm": 0.052978515625,
"learning_rate": 1.4649384301032044e-05,
"loss": 0.8385,
"step": 563
},
{
"epoch": 0.37,
"grad_norm": 0.052490234375,
"learning_rate": 1.4630719486304928e-05,
"loss": 0.8523,
"step": 564
},
{
"epoch": 0.37,
"grad_norm": 0.050537109375,
"learning_rate": 1.4612034113014036e-05,
"loss": 0.8569,
"step": 565
},
{
"epoch": 0.37,
"grad_norm": 0.051025390625,
"learning_rate": 1.4593328264115044e-05,
"loss": 0.7559,
"step": 566
},
{
"epoch": 0.37,
"grad_norm": 0.052490234375,
"learning_rate": 1.4574602022654516e-05,
"loss": 0.822,
"step": 567
},
{
"epoch": 0.37,
"grad_norm": 0.04833984375,
"learning_rate": 1.4555855471769572e-05,
"loss": 0.7773,
"step": 568
},
{
"epoch": 0.37,
"grad_norm": 0.047607421875,
"learning_rate": 1.4537088694687476e-05,
"loss": 0.8264,
"step": 569
},
{
"epoch": 0.37,
"grad_norm": 0.04833984375,
"learning_rate": 1.4518301774725308e-05,
"loss": 0.8323,
"step": 570
},
{
"epoch": 0.37,
"grad_norm": 0.0576171875,
"learning_rate": 1.4499494795289562e-05,
"loss": 0.8173,
"step": 571
},
{
"epoch": 0.37,
"grad_norm": 0.050048828125,
"learning_rate": 1.4480667839875786e-05,
"loss": 0.8007,
"step": 572
},
{
"epoch": 0.37,
"grad_norm": 0.046630859375,
"learning_rate": 1.4461820992068224e-05,
"loss": 0.8152,
"step": 573
},
{
"epoch": 0.37,
"grad_norm": 0.049560546875,
"learning_rate": 1.4442954335539432e-05,
"loss": 0.8258,
"step": 574
},
{
"epoch": 0.37,
"grad_norm": 0.046875,
"learning_rate": 1.4424067954049903e-05,
"loss": 0.8003,
"step": 575
},
{
"epoch": 0.37,
"grad_norm": 0.046875,
"learning_rate": 1.4405161931447702e-05,
"loss": 0.7678,
"step": 576
},
{
"epoch": 0.38,
"grad_norm": 0.049072265625,
"learning_rate": 1.4386236351668095e-05,
"loss": 0.8295,
"step": 577
},
{
"epoch": 0.38,
"grad_norm": 0.06298828125,
"learning_rate": 1.436729129873318e-05,
"loss": 0.8286,
"step": 578
},
{
"epoch": 0.38,
"grad_norm": 0.05322265625,
"learning_rate": 1.4348326856751496e-05,
"loss": 0.827,
"step": 579
},
{
"epoch": 0.38,
"grad_norm": 0.046142578125,
"learning_rate": 1.4329343109917671e-05,
"loss": 0.7671,
"step": 580
},
{
"epoch": 0.38,
"grad_norm": 0.050537109375,
"learning_rate": 1.431034014251203e-05,
"loss": 0.8195,
"step": 581
},
{
"epoch": 0.38,
"grad_norm": 0.05029296875,
"learning_rate": 1.4291318038900243e-05,
"loss": 0.8331,
"step": 582
},
{
"epoch": 0.38,
"grad_norm": 0.050537109375,
"learning_rate": 1.4272276883532927e-05,
"loss": 0.8458,
"step": 583
},
{
"epoch": 0.38,
"grad_norm": 0.05712890625,
"learning_rate": 1.4253216760945284e-05,
"loss": 0.8311,
"step": 584
},
{
"epoch": 0.38,
"grad_norm": 0.054931640625,
"learning_rate": 1.423413775575672e-05,
"loss": 0.8339,
"step": 585
},
{
"epoch": 0.38,
"grad_norm": 0.05029296875,
"learning_rate": 1.4215039952670482e-05,
"loss": 0.811,
"step": 586
},
{
"epoch": 0.38,
"grad_norm": 0.060546875,
"learning_rate": 1.4195923436473257e-05,
"loss": 0.8431,
"step": 587
},
{
"epoch": 0.38,
"grad_norm": 0.04833984375,
"learning_rate": 1.4176788292034824e-05,
"loss": 0.7789,
"step": 588
},
{
"epoch": 0.38,
"grad_norm": 0.052734375,
"learning_rate": 1.4157634604307661e-05,
"loss": 0.8773,
"step": 589
},
{
"epoch": 0.38,
"grad_norm": 0.05126953125,
"learning_rate": 1.413846245832657e-05,
"loss": 0.8304,
"step": 590
},
{
"epoch": 0.38,
"grad_norm": 0.052001953125,
"learning_rate": 1.411927193920829e-05,
"loss": 0.8578,
"step": 591
},
{
"epoch": 0.38,
"grad_norm": 0.05029296875,
"learning_rate": 1.4100063132151148e-05,
"loss": 0.7993,
"step": 592
},
{
"epoch": 0.39,
"grad_norm": 0.052001953125,
"learning_rate": 1.408083612243465e-05,
"loss": 0.8845,
"step": 593
},
{
"epoch": 0.39,
"grad_norm": 0.0517578125,
"learning_rate": 1.4061590995419118e-05,
"loss": 0.8415,
"step": 594
},
{
"epoch": 0.39,
"grad_norm": 0.0517578125,
"learning_rate": 1.404232783654531e-05,
"loss": 0.7521,
"step": 595
},
{
"epoch": 0.39,
"grad_norm": 0.04931640625,
"learning_rate": 1.402304673133403e-05,
"loss": 0.8167,
"step": 596
},
{
"epoch": 0.39,
"grad_norm": 0.050048828125,
"learning_rate": 1.4003747765385767e-05,
"loss": 0.7807,
"step": 597
},
{
"epoch": 0.39,
"grad_norm": 0.0498046875,
"learning_rate": 1.3984431024380301e-05,
"loss": 0.8049,
"step": 598
},
{
"epoch": 0.39,
"grad_norm": 0.053466796875,
"learning_rate": 1.3965096594076322e-05,
"loss": 0.8303,
"step": 599
},
{
"epoch": 0.39,
"grad_norm": 0.050048828125,
"learning_rate": 1.3945744560311056e-05,
"loss": 0.8684,
"step": 600
},
{
"epoch": 0.39,
"grad_norm": 0.053466796875,
"learning_rate": 1.3926375008999887e-05,
"loss": 0.7436,
"step": 601
},
{
"epoch": 0.39,
"grad_norm": 0.05029296875,
"learning_rate": 1.3906988026135957e-05,
"loss": 0.8579,
"step": 602
},
{
"epoch": 0.39,
"grad_norm": 0.04736328125,
"learning_rate": 1.3887583697789815e-05,
"loss": 0.7671,
"step": 603
},
{
"epoch": 0.39,
"grad_norm": 0.049072265625,
"learning_rate": 1.3868162110109001e-05,
"loss": 0.8025,
"step": 604
},
{
"epoch": 0.39,
"grad_norm": 0.054443359375,
"learning_rate": 1.3848723349317688e-05,
"loss": 0.8687,
"step": 605
},
{
"epoch": 0.39,
"grad_norm": 0.053955078125,
"learning_rate": 1.382926750171629e-05,
"loss": 0.8175,
"step": 606
},
{
"epoch": 0.39,
"grad_norm": 0.05078125,
"learning_rate": 1.3809794653681075e-05,
"loss": 0.8536,
"step": 607
},
{
"epoch": 0.4,
"grad_norm": 0.0498046875,
"learning_rate": 1.3790304891663793e-05,
"loss": 0.8289,
"step": 608
},
{
"epoch": 0.4,
"grad_norm": 0.051513671875,
"learning_rate": 1.3770798302191279e-05,
"loss": 0.7827,
"step": 609
},
{
"epoch": 0.4,
"grad_norm": 0.050537109375,
"learning_rate": 1.3751274971865086e-05,
"loss": 0.8537,
"step": 610
},
{
"epoch": 0.4,
"grad_norm": 0.048095703125,
"learning_rate": 1.3731734987361069e-05,
"loss": 0.8194,
"step": 611
},
{
"epoch": 0.4,
"grad_norm": 0.048095703125,
"learning_rate": 1.3712178435429044e-05,
"loss": 0.7846,
"step": 612
},
{
"epoch": 0.4,
"grad_norm": 0.0478515625,
"learning_rate": 1.3692605402892369e-05,
"loss": 0.8813,
"step": 613
},
{
"epoch": 0.4,
"grad_norm": 0.0478515625,
"learning_rate": 1.367301597664757e-05,
"loss": 0.7698,
"step": 614
},
{
"epoch": 0.4,
"grad_norm": 0.051513671875,
"learning_rate": 1.3653410243663953e-05,
"loss": 0.7923,
"step": 615
},
{
"epoch": 0.4,
"grad_norm": 0.0478515625,
"learning_rate": 1.3633788290983221e-05,
"loss": 0.7843,
"step": 616
},
{
"epoch": 0.4,
"grad_norm": 0.051025390625,
"learning_rate": 1.3614150205719086e-05,
"loss": 0.8806,
"step": 617
},
{
"epoch": 0.4,
"grad_norm": 0.046142578125,
"learning_rate": 1.3594496075056886e-05,
"loss": 0.7934,
"step": 618
},
{
"epoch": 0.4,
"grad_norm": 0.049072265625,
"learning_rate": 1.3574825986253191e-05,
"loss": 0.8547,
"step": 619
},
{
"epoch": 0.4,
"grad_norm": 0.04833984375,
"learning_rate": 1.3555140026635415e-05,
"loss": 0.8556,
"step": 620
},
{
"epoch": 0.4,
"grad_norm": 0.04931640625,
"learning_rate": 1.3535438283601437e-05,
"loss": 0.8136,
"step": 621
},
{
"epoch": 0.4,
"grad_norm": 0.051025390625,
"learning_rate": 1.3515720844619206e-05,
"loss": 0.7409,
"step": 622
},
{
"epoch": 0.41,
"grad_norm": 0.046875,
"learning_rate": 1.3495987797226362e-05,
"loss": 0.7693,
"step": 623
},
{
"epoch": 0.41,
"grad_norm": 0.05322265625,
"learning_rate": 1.3476239229029826e-05,
"loss": 0.8478,
"step": 624
},
{
"epoch": 0.41,
"grad_norm": 0.05859375,
"learning_rate": 1.3456475227705442e-05,
"loss": 0.8639,
"step": 625
},
{
"epoch": 0.41,
"grad_norm": 0.059326171875,
"learning_rate": 1.3436695880997551e-05,
"loss": 0.8093,
"step": 626
},
{
"epoch": 0.41,
"grad_norm": 0.050048828125,
"learning_rate": 1.3416901276718643e-05,
"loss": 0.7741,
"step": 627
},
{
"epoch": 0.41,
"grad_norm": 0.0615234375,
"learning_rate": 1.339709150274893e-05,
"loss": 0.8319,
"step": 628
},
{
"epoch": 0.41,
"grad_norm": 0.052490234375,
"learning_rate": 1.3377266647035977e-05,
"loss": 0.8408,
"step": 629
},
{
"epoch": 0.41,
"grad_norm": 0.052734375,
"learning_rate": 1.3357426797594309e-05,
"loss": 0.7784,
"step": 630
},
{
"epoch": 0.41,
"grad_norm": 0.05712890625,
"learning_rate": 1.3337572042505007e-05,
"loss": 0.7569,
"step": 631
},
{
"epoch": 0.41,
"grad_norm": 0.052490234375,
"learning_rate": 1.331770246991534e-05,
"loss": 0.8598,
"step": 632
},
{
"epoch": 0.41,
"grad_norm": 0.053466796875,
"learning_rate": 1.3297818168038353e-05,
"loss": 0.8132,
"step": 633
},
{
"epoch": 0.41,
"grad_norm": 0.05029296875,
"learning_rate": 1.3277919225152486e-05,
"loss": 0.8481,
"step": 634
},
{
"epoch": 0.41,
"grad_norm": 0.049560546875,
"learning_rate": 1.3258005729601178e-05,
"loss": 0.8206,
"step": 635
},
{
"epoch": 0.41,
"grad_norm": 0.0546875,
"learning_rate": 1.3238077769792475e-05,
"loss": 0.8263,
"step": 636
},
{
"epoch": 0.41,
"grad_norm": 0.049560546875,
"learning_rate": 1.321813543419864e-05,
"loss": 0.8544,
"step": 637
},
{
"epoch": 0.41,
"grad_norm": 0.061279296875,
"learning_rate": 1.3198178811355762e-05,
"loss": 0.8752,
"step": 638
},
{
"epoch": 0.42,
"grad_norm": 0.054931640625,
"learning_rate": 1.3178207989863356e-05,
"loss": 0.8917,
"step": 639
},
{
"epoch": 0.42,
"grad_norm": 0.053466796875,
"learning_rate": 1.3158223058383972e-05,
"loss": 0.8589,
"step": 640
},
{
"epoch": 0.42,
"grad_norm": 0.053955078125,
"learning_rate": 1.3138224105642803e-05,
"loss": 0.7553,
"step": 641
},
{
"epoch": 0.42,
"grad_norm": 0.0625,
"learning_rate": 1.31182112204273e-05,
"loss": 0.8247,
"step": 642
},
{
"epoch": 0.42,
"grad_norm": 0.058349609375,
"learning_rate": 1.3098184491586752e-05,
"loss": 0.8437,
"step": 643
},
{
"epoch": 0.42,
"grad_norm": 0.0556640625,
"learning_rate": 1.3078144008031924e-05,
"loss": 0.8129,
"step": 644
},
{
"epoch": 0.42,
"grad_norm": 0.050537109375,
"learning_rate": 1.3058089858734637e-05,
"loss": 0.7883,
"step": 645
},
{
"epoch": 0.42,
"grad_norm": 0.051025390625,
"learning_rate": 1.3038022132727388e-05,
"loss": 0.889,
"step": 646
},
{
"epoch": 0.42,
"grad_norm": 0.052490234375,
"learning_rate": 1.3017940919102943e-05,
"loss": 0.8434,
"step": 647
},
{
"epoch": 0.42,
"grad_norm": 0.05859375,
"learning_rate": 1.2997846307013955e-05,
"loss": 0.8118,
"step": 648
},
{
"epoch": 0.42,
"grad_norm": 0.05322265625,
"learning_rate": 1.2977738385672558e-05,
"loss": 0.8348,
"step": 649
},
{
"epoch": 0.42,
"grad_norm": 0.05322265625,
"learning_rate": 1.295761724434997e-05,
"loss": 0.8419,
"step": 650
},
{
"epoch": 0.42,
"grad_norm": 0.057373046875,
"learning_rate": 1.2937482972376104e-05,
"loss": 0.8148,
"step": 651
},
{
"epoch": 0.42,
"grad_norm": 0.050048828125,
"learning_rate": 1.2917335659139166e-05,
"loss": 0.8083,
"step": 652
},
{
"epoch": 0.42,
"grad_norm": 0.05029296875,
"learning_rate": 1.2897175394085266e-05,
"loss": 0.7939,
"step": 653
},
{
"epoch": 0.43,
"grad_norm": 0.053955078125,
"learning_rate": 1.2877002266718011e-05,
"loss": 0.8044,
"step": 654
},
{
"epoch": 0.43,
"grad_norm": 0.0517578125,
"learning_rate": 1.2856816366598103e-05,
"loss": 0.809,
"step": 655
},
{
"epoch": 0.43,
"grad_norm": 0.048095703125,
"learning_rate": 1.2836617783342968e-05,
"loss": 0.88,
"step": 656
},
{
"epoch": 0.43,
"grad_norm": 0.050048828125,
"learning_rate": 1.2816406606626324e-05,
"loss": 0.7649,
"step": 657
},
{
"epoch": 0.43,
"grad_norm": 0.0517578125,
"learning_rate": 1.2796182926177809e-05,
"loss": 0.8551,
"step": 658
},
{
"epoch": 0.43,
"grad_norm": 0.05126953125,
"learning_rate": 1.2775946831782565e-05,
"loss": 0.8506,
"step": 659
},
{
"epoch": 0.43,
"grad_norm": 0.048095703125,
"learning_rate": 1.2755698413280853e-05,
"loss": 0.8209,
"step": 660
},
{
"epoch": 0.43,
"grad_norm": 0.052490234375,
"learning_rate": 1.2735437760567644e-05,
"loss": 0.847,
"step": 661
},
{
"epoch": 0.43,
"grad_norm": 0.048828125,
"learning_rate": 1.2715164963592228e-05,
"loss": 0.7587,
"step": 662
},
{
"epoch": 0.43,
"grad_norm": 0.05224609375,
"learning_rate": 1.2694880112357809e-05,
"loss": 0.7927,
"step": 663
},
{
"epoch": 0.43,
"grad_norm": 0.0478515625,
"learning_rate": 1.2674583296921109e-05,
"loss": 0.7415,
"step": 664
},
{
"epoch": 0.43,
"grad_norm": 0.048095703125,
"learning_rate": 1.2654274607391959e-05,
"loss": 0.7919,
"step": 665
},
{
"epoch": 0.43,
"grad_norm": 0.04931640625,
"learning_rate": 1.2633954133932913e-05,
"loss": 0.8203,
"step": 666
},
{
"epoch": 0.43,
"grad_norm": 0.0478515625,
"learning_rate": 1.2613621966758838e-05,
"loss": 0.8021,
"step": 667
},
{
"epoch": 0.43,
"grad_norm": 0.051513671875,
"learning_rate": 1.2593278196136525e-05,
"loss": 0.7899,
"step": 668
},
{
"epoch": 0.43,
"grad_norm": 0.056884765625,
"learning_rate": 1.257292291238427e-05,
"loss": 0.8331,
"step": 669
},
{
"epoch": 0.44,
"grad_norm": 0.05029296875,
"learning_rate": 1.255255620587148e-05,
"loss": 0.7661,
"step": 670
},
{
"epoch": 0.44,
"grad_norm": 0.056396484375,
"learning_rate": 1.2532178167018283e-05,
"loss": 0.8314,
"step": 671
},
{
"epoch": 0.44,
"grad_norm": 0.051513671875,
"learning_rate": 1.2511788886295115e-05,
"loss": 0.7618,
"step": 672
},
{
"epoch": 0.44,
"grad_norm": 0.0556640625,
"learning_rate": 1.2491388454222327e-05,
"loss": 0.8003,
"step": 673
},
{
"epoch": 0.44,
"grad_norm": 0.051025390625,
"learning_rate": 1.2470976961369765e-05,
"loss": 0.8458,
"step": 674
},
{
"epoch": 0.44,
"grad_norm": 0.0458984375,
"learning_rate": 1.2450554498356388e-05,
"loss": 0.7778,
"step": 675
},
{
"epoch": 0.44,
"grad_norm": 0.048095703125,
"learning_rate": 1.243012115584986e-05,
"loss": 0.8396,
"step": 676
},
{
"epoch": 0.44,
"grad_norm": 0.048828125,
"learning_rate": 1.2409677024566145e-05,
"loss": 0.8113,
"step": 677
},
{
"epoch": 0.44,
"grad_norm": 0.049072265625,
"learning_rate": 1.2389222195269102e-05,
"loss": 0.8426,
"step": 678
},
{
"epoch": 0.44,
"grad_norm": 0.052978515625,
"learning_rate": 1.2368756758770084e-05,
"loss": 0.8192,
"step": 679
},
{
"epoch": 0.44,
"grad_norm": 0.054443359375,
"learning_rate": 1.2348280805927541e-05,
"loss": 0.883,
"step": 680
},
{
"epoch": 0.44,
"grad_norm": 0.061279296875,
"learning_rate": 1.2327794427646607e-05,
"loss": 0.7733,
"step": 681
},
{
"epoch": 0.44,
"grad_norm": 0.06201171875,
"learning_rate": 1.2307297714878706e-05,
"loss": 0.8566,
"step": 682
},
{
"epoch": 0.44,
"grad_norm": 0.051513671875,
"learning_rate": 1.2286790758621132e-05,
"loss": 0.86,
"step": 683
},
{
"epoch": 0.44,
"grad_norm": 0.05615234375,
"learning_rate": 1.2266273649916669e-05,
"loss": 0.8344,
"step": 684
},
{
"epoch": 0.45,
"grad_norm": 0.07373046875,
"learning_rate": 1.2245746479853168e-05,
"loss": 0.9192,
"step": 685
},
{
"epoch": 0.45,
"grad_norm": 0.0537109375,
"learning_rate": 1.2225209339563144e-05,
"loss": 0.7797,
"step": 686
},
{
"epoch": 0.45,
"grad_norm": 0.051025390625,
"learning_rate": 1.2204662320223385e-05,
"loss": 0.7331,
"step": 687
},
{
"epoch": 0.45,
"grad_norm": 0.046875,
"learning_rate": 1.2184105513054539e-05,
"loss": 0.8126,
"step": 688
},
{
"epoch": 0.45,
"grad_norm": 0.05126953125,
"learning_rate": 1.2163539009320691e-05,
"loss": 0.8052,
"step": 689
},
{
"epoch": 0.45,
"grad_norm": 0.051025390625,
"learning_rate": 1.2142962900328994e-05,
"loss": 0.7866,
"step": 690
},
{
"epoch": 0.45,
"grad_norm": 0.052001953125,
"learning_rate": 1.2122377277429231e-05,
"loss": 0.8124,
"step": 691
},
{
"epoch": 0.45,
"grad_norm": 0.052734375,
"learning_rate": 1.2101782232013436e-05,
"loss": 0.8899,
"step": 692
},
{
"epoch": 0.45,
"grad_norm": 0.053955078125,
"learning_rate": 1.208117785551547e-05,
"loss": 0.7966,
"step": 693
},
{
"epoch": 0.45,
"grad_norm": 0.0546875,
"learning_rate": 1.2060564239410613e-05,
"loss": 0.8062,
"step": 694
},
{
"epoch": 0.45,
"grad_norm": 0.0634765625,
"learning_rate": 1.2039941475215169e-05,
"loss": 0.8438,
"step": 695
},
{
"epoch": 0.45,
"grad_norm": 0.05078125,
"learning_rate": 1.2019309654486065e-05,
"loss": 0.8648,
"step": 696
},
{
"epoch": 0.45,
"grad_norm": 0.0546875,
"learning_rate": 1.1998668868820422e-05,
"loss": 0.8274,
"step": 697
},
{
"epoch": 0.45,
"grad_norm": 0.05712890625,
"learning_rate": 1.1978019209855174e-05,
"loss": 0.8074,
"step": 698
},
{
"epoch": 0.45,
"grad_norm": 0.052490234375,
"learning_rate": 1.195736076926664e-05,
"loss": 0.8436,
"step": 699
},
{
"epoch": 0.46,
"grad_norm": 0.057861328125,
"learning_rate": 1.1936693638770127e-05,
"loss": 0.7833,
"step": 700
},
{
"epoch": 0.46,
"grad_norm": 0.054443359375,
"learning_rate": 1.1916017910119525e-05,
"loss": 0.8146,
"step": 701
},
{
"epoch": 0.46,
"grad_norm": 0.05419921875,
"learning_rate": 1.1895333675106897e-05,
"loss": 0.848,
"step": 702
},
{
"epoch": 0.46,
"grad_norm": 0.0537109375,
"learning_rate": 1.1874641025562065e-05,
"loss": 0.8167,
"step": 703
},
{
"epoch": 0.46,
"grad_norm": 0.05517578125,
"learning_rate": 1.185394005335222e-05,
"loss": 0.8499,
"step": 704
},
{
"epoch": 0.46,
"grad_norm": 0.0556640625,
"learning_rate": 1.1833230850381488e-05,
"loss": 0.7853,
"step": 705
},
{
"epoch": 0.46,
"grad_norm": 0.055908203125,
"learning_rate": 1.1812513508590541e-05,
"loss": 0.8321,
"step": 706
},
{
"epoch": 0.46,
"grad_norm": 0.05712890625,
"learning_rate": 1.1791788119956191e-05,
"loss": 0.7874,
"step": 707
},
{
"epoch": 0.46,
"grad_norm": 0.055419921875,
"learning_rate": 1.1771054776490968e-05,
"loss": 0.8356,
"step": 708
},
{
"epoch": 0.46,
"grad_norm": 0.053466796875,
"learning_rate": 1.1750313570242721e-05,
"loss": 0.8236,
"step": 709
},
{
"epoch": 0.46,
"grad_norm": 0.06103515625,
"learning_rate": 1.1729564593294203e-05,
"loss": 0.8301,
"step": 710
},
{
"epoch": 0.46,
"grad_norm": 0.048583984375,
"learning_rate": 1.170880793776267e-05,
"loss": 0.7926,
"step": 711
},
{
"epoch": 0.46,
"grad_norm": 0.055908203125,
"learning_rate": 1.168804369579947e-05,
"loss": 0.7654,
"step": 712
},
{
"epoch": 0.46,
"grad_norm": 0.050537109375,
"learning_rate": 1.1667271959589623e-05,
"loss": 0.7618,
"step": 713
},
{
"epoch": 0.46,
"grad_norm": 0.05126953125,
"learning_rate": 1.1646492821351428e-05,
"loss": 0.8027,
"step": 714
},
{
"epoch": 0.46,
"grad_norm": 0.0537109375,
"learning_rate": 1.1625706373336046e-05,
"loss": 0.8891,
"step": 715
},
{
"epoch": 0.47,
"grad_norm": 0.051513671875,
"learning_rate": 1.1604912707827083e-05,
"loss": 0.85,
"step": 716
},
{
"epoch": 0.47,
"grad_norm": 0.05419921875,
"learning_rate": 1.1584111917140197e-05,
"loss": 0.8244,
"step": 717
},
{
"epoch": 0.47,
"grad_norm": 0.0537109375,
"learning_rate": 1.1563304093622674e-05,
"loss": 0.7958,
"step": 718
},
{
"epoch": 0.47,
"grad_norm": 0.05126953125,
"learning_rate": 1.1542489329653024e-05,
"loss": 0.8329,
"step": 719
},
{
"epoch": 0.47,
"grad_norm": 0.055908203125,
"learning_rate": 1.1521667717640572e-05,
"loss": 0.7596,
"step": 720
},
{
"epoch": 0.47,
"grad_norm": 0.05712890625,
"learning_rate": 1.1500839350025039e-05,
"loss": 0.9115,
"step": 721
},
{
"epoch": 0.47,
"grad_norm": 0.0556640625,
"learning_rate": 1.1480004319276145e-05,
"loss": 0.8006,
"step": 722
},
{
"epoch": 0.47,
"grad_norm": 0.05615234375,
"learning_rate": 1.1459162717893193e-05,
"loss": 0.7918,
"step": 723
},
{
"epoch": 0.47,
"grad_norm": 0.054443359375,
"learning_rate": 1.143831463840465e-05,
"loss": 0.7406,
"step": 724
},
{
"epoch": 0.47,
"grad_norm": 0.05322265625,
"learning_rate": 1.1417460173367748e-05,
"loss": 0.7735,
"step": 725
},
{
"epoch": 0.47,
"grad_norm": 0.050048828125,
"learning_rate": 1.1396599415368062e-05,
"loss": 0.7926,
"step": 726
},
{
"epoch": 0.47,
"grad_norm": 0.05126953125,
"learning_rate": 1.1375732457019118e-05,
"loss": 0.8132,
"step": 727
},
{
"epoch": 0.47,
"grad_norm": 0.05224609375,
"learning_rate": 1.1354859390961958e-05,
"loss": 0.7791,
"step": 728
},
{
"epoch": 0.47,
"grad_norm": 0.052001953125,
"learning_rate": 1.1333980309864743e-05,
"loss": 0.8277,
"step": 729
},
{
"epoch": 0.47,
"grad_norm": 0.055908203125,
"learning_rate": 1.1313095306422336e-05,
"loss": 0.8324,
"step": 730
},
{
"epoch": 0.48,
"grad_norm": 0.050048828125,
"learning_rate": 1.1292204473355897e-05,
"loss": 0.8637,
"step": 731
},
{
"epoch": 0.48,
"grad_norm": 0.050048828125,
"learning_rate": 1.1271307903412469e-05,
"loss": 0.8237,
"step": 732
},
{
"epoch": 0.48,
"grad_norm": 0.054931640625,
"learning_rate": 1.1250405689364561e-05,
"loss": 0.8395,
"step": 733
},
{
"epoch": 0.48,
"grad_norm": 0.0517578125,
"learning_rate": 1.1229497924009731e-05,
"loss": 0.7743,
"step": 734
},
{
"epoch": 0.48,
"grad_norm": 0.1611328125,
"learning_rate": 1.1208584700170203e-05,
"loss": 0.7385,
"step": 735
},
{
"epoch": 0.48,
"grad_norm": 0.058837890625,
"learning_rate": 1.1187666110692417e-05,
"loss": 0.8611,
"step": 736
},
{
"epoch": 0.48,
"grad_norm": 0.06103515625,
"learning_rate": 1.116674224844664e-05,
"loss": 0.7648,
"step": 737
},
{
"epoch": 0.48,
"grad_norm": 0.057373046875,
"learning_rate": 1.1145813206326548e-05,
"loss": 0.8365,
"step": 738
},
{
"epoch": 0.48,
"grad_norm": 0.054931640625,
"learning_rate": 1.1124879077248815e-05,
"loss": 0.7706,
"step": 739
},
{
"epoch": 0.48,
"grad_norm": 0.0498046875,
"learning_rate": 1.11039399541527e-05,
"loss": 0.8028,
"step": 740
},
{
"epoch": 0.48,
"grad_norm": 0.0498046875,
"learning_rate": 1.1082995929999626e-05,
"loss": 0.7975,
"step": 741
},
{
"epoch": 0.48,
"grad_norm": 0.049072265625,
"learning_rate": 1.1062047097772783e-05,
"loss": 0.8077,
"step": 742
},
{
"epoch": 0.48,
"grad_norm": 0.053955078125,
"learning_rate": 1.1041093550476706e-05,
"loss": 0.794,
"step": 743
},
{
"epoch": 0.48,
"grad_norm": 0.056640625,
"learning_rate": 1.1020135381136858e-05,
"loss": 0.825,
"step": 744
},
{
"epoch": 0.48,
"grad_norm": 0.05224609375,
"learning_rate": 1.0999172682799227e-05,
"loss": 0.8103,
"step": 745
},
{
"epoch": 0.49,
"grad_norm": 0.05322265625,
"learning_rate": 1.0978205548529902e-05,
"loss": 0.838,
"step": 746
},
{
"epoch": 0.49,
"grad_norm": 0.05224609375,
"learning_rate": 1.0957234071414675e-05,
"loss": 0.8659,
"step": 747
},
{
"epoch": 0.49,
"grad_norm": 0.052978515625,
"learning_rate": 1.0936258344558613e-05,
"loss": 0.858,
"step": 748
},
{
"epoch": 0.49,
"grad_norm": 0.0576171875,
"learning_rate": 1.091527846108565e-05,
"loss": 0.7929,
"step": 749
},
{
"epoch": 0.49,
"grad_norm": 0.05029296875,
"learning_rate": 1.0894294514138169e-05,
"loss": 0.7513,
"step": 750
},
{
"epoch": 0.49,
"grad_norm": 0.051025390625,
"learning_rate": 1.0873306596876602e-05,
"loss": 0.8347,
"step": 751
},
{
"epoch": 0.49,
"grad_norm": 0.054443359375,
"learning_rate": 1.0852314802479009e-05,
"loss": 0.8225,
"step": 752
},
{
"epoch": 0.49,
"grad_norm": 0.049072265625,
"learning_rate": 1.0831319224140653e-05,
"loss": 0.8331,
"step": 753
},
{
"epoch": 0.49,
"grad_norm": 0.055908203125,
"learning_rate": 1.08103199550736e-05,
"loss": 0.8195,
"step": 754
},
{
"epoch": 0.49,
"grad_norm": 0.057373046875,
"learning_rate": 1.0789317088506307e-05,
"loss": 0.7831,
"step": 755
},
{
"epoch": 0.49,
"grad_norm": 0.052978515625,
"learning_rate": 1.0768310717683192e-05,
"loss": 0.8176,
"step": 756
},
{
"epoch": 0.49,
"grad_norm": 0.04638671875,
"learning_rate": 1.0747300935864245e-05,
"loss": 0.7595,
"step": 757
},
{
"epoch": 0.49,
"grad_norm": 0.04931640625,
"learning_rate": 1.0726287836324583e-05,
"loss": 0.8544,
"step": 758
},
{
"epoch": 0.49,
"grad_norm": 0.05029296875,
"learning_rate": 1.0705271512354068e-05,
"loss": 0.88,
"step": 759
},
{
"epoch": 0.49,
"grad_norm": 0.053955078125,
"learning_rate": 1.0684252057256861e-05,
"loss": 0.8787,
"step": 760
},
{
"epoch": 0.49,
"grad_norm": 0.06201171875,
"learning_rate": 1.066322956435104e-05,
"loss": 0.8907,
"step": 761
},
{
"epoch": 0.5,
"grad_norm": 0.0537109375,
"learning_rate": 1.0642204126968159e-05,
"loss": 0.7765,
"step": 762
},
{
"epoch": 0.5,
"grad_norm": 0.054443359375,
"learning_rate": 1.062117583845285e-05,
"loss": 0.7968,
"step": 763
},
{
"epoch": 0.5,
"grad_norm": 0.05029296875,
"learning_rate": 1.06001447921624e-05,
"loss": 0.8493,
"step": 764
},
{
"epoch": 0.5,
"grad_norm": 0.07421875,
"learning_rate": 1.0579111081466333e-05,
"loss": 0.789,
"step": 765
},
{
"epoch": 0.5,
"grad_norm": 0.051513671875,
"learning_rate": 1.0558074799746019e-05,
"loss": 0.8526,
"step": 766
},
{
"epoch": 0.5,
"grad_norm": 0.052490234375,
"learning_rate": 1.0537036040394226e-05,
"loss": 0.8511,
"step": 767
},
{
"epoch": 0.5,
"grad_norm": 0.051513671875,
"learning_rate": 1.0515994896814731e-05,
"loss": 0.8339,
"step": 768
},
{
"epoch": 0.5,
"grad_norm": 0.05419921875,
"learning_rate": 1.0494951462421893e-05,
"loss": 0.7972,
"step": 769
},
{
"epoch": 0.5,
"grad_norm": 0.05029296875,
"learning_rate": 1.0473905830640239e-05,
"loss": 0.8114,
"step": 770
},
{
"epoch": 0.5,
"grad_norm": 0.050048828125,
"learning_rate": 1.0452858094904053e-05,
"loss": 0.7742,
"step": 771
},
{
"epoch": 0.5,
"grad_norm": 0.05224609375,
"learning_rate": 1.0431808348656961e-05,
"loss": 0.8655,
"step": 772
},
{
"epoch": 0.5,
"grad_norm": 0.05224609375,
"learning_rate": 1.0410756685351517e-05,
"loss": 0.8543,
"step": 773
},
{
"epoch": 0.5,
"grad_norm": 0.052001953125,
"learning_rate": 1.0389703198448784e-05,
"loss": 0.8646,
"step": 774
},
{
"epoch": 0.5,
"grad_norm": 0.0546875,
"learning_rate": 1.0368647981417917e-05,
"loss": 0.8008,
"step": 775
},
{
"epoch": 0.5,
"grad_norm": 0.053466796875,
"learning_rate": 1.0347591127735754e-05,
"loss": 0.839,
"step": 776
},
{
"epoch": 0.51,
"grad_norm": 0.05517578125,
"learning_rate": 1.0326532730886405e-05,
"loss": 0.749,
"step": 777
},
{
"epoch": 0.51,
"grad_norm": 0.052978515625,
"learning_rate": 1.0305472884360825e-05,
"loss": 0.778,
"step": 778
},
{
"epoch": 0.51,
"grad_norm": 0.0546875,
"learning_rate": 1.0284411681656408e-05,
"loss": 0.7992,
"step": 779
},
{
"epoch": 0.51,
"grad_norm": 0.0517578125,
"learning_rate": 1.0263349216276564e-05,
"loss": 0.8138,
"step": 780
},
{
"epoch": 0.51,
"grad_norm": 0.056396484375,
"learning_rate": 1.0242285581730313e-05,
"loss": 0.8241,
"step": 781
},
{
"epoch": 0.51,
"grad_norm": 0.047119140625,
"learning_rate": 1.022122087153187e-05,
"loss": 0.7916,
"step": 782
},
{
"epoch": 0.51,
"grad_norm": 0.0517578125,
"learning_rate": 1.0200155179200214e-05,
"loss": 0.8253,
"step": 783
},
{
"epoch": 0.51,
"grad_norm": 0.05517578125,
"learning_rate": 1.0179088598258697e-05,
"loss": 0.8981,
"step": 784
},
{
"epoch": 0.51,
"grad_norm": 0.053466796875,
"learning_rate": 1.0158021222234602e-05,
"loss": 0.8234,
"step": 785
},
{
"epoch": 0.51,
"grad_norm": 0.053955078125,
"learning_rate": 1.0136953144658753e-05,
"loss": 0.8459,
"step": 786
},
{
"epoch": 0.51,
"grad_norm": 0.04931640625,
"learning_rate": 1.0115884459065088e-05,
"loss": 0.8396,
"step": 787
},
{
"epoch": 0.51,
"grad_norm": 0.055419921875,
"learning_rate": 1.009481525899024e-05,
"loss": 0.8258,
"step": 788
},
{
"epoch": 0.51,
"grad_norm": 0.050537109375,
"learning_rate": 1.0073745637973125e-05,
"loss": 0.7542,
"step": 789
},
{
"epoch": 0.51,
"grad_norm": 0.051513671875,
"learning_rate": 1.0052675689554534e-05,
"loss": 0.7882,
"step": 790
},
{
"epoch": 0.51,
"grad_norm": 0.052978515625,
"learning_rate": 1.0031605507276705e-05,
"loss": 0.8435,
"step": 791
},
{
"epoch": 0.51,
"grad_norm": 0.05029296875,
"learning_rate": 1.0010535184682921e-05,
"loss": 0.7813,
"step": 792
},
{
"epoch": 0.52,
"grad_norm": 0.0498046875,
"learning_rate": 9.98946481531708e-06,
"loss": 0.8568,
"step": 793
},
{
"epoch": 0.52,
"grad_norm": 0.053955078125,
"learning_rate": 9.968394492723298e-06,
"loss": 0.7625,
"step": 794
},
{
"epoch": 0.52,
"grad_norm": 0.0517578125,
"learning_rate": 9.947324310445467e-06,
"loss": 0.8384,
"step": 795
},
{
"epoch": 0.52,
"grad_norm": 0.052490234375,
"learning_rate": 9.926254362026875e-06,
"loss": 0.8727,
"step": 796
},
{
"epoch": 0.52,
"grad_norm": 0.057373046875,
"learning_rate": 9.905184741009765e-06,
"loss": 0.8494,
"step": 797
},
{
"epoch": 0.52,
"grad_norm": 0.06591796875,
"learning_rate": 9.884115540934915e-06,
"loss": 0.8167,
"step": 798
},
{
"epoch": 0.52,
"grad_norm": 0.0576171875,
"learning_rate": 9.863046855341247e-06,
"loss": 0.7945,
"step": 799
},
{
"epoch": 0.52,
"grad_norm": 0.05419921875,
"learning_rate": 9.841978777765401e-06,
"loss": 0.8663,
"step": 800
},
{
"epoch": 0.52,
"grad_norm": 0.0498046875,
"learning_rate": 9.820911401741306e-06,
"loss": 0.8504,
"step": 801
},
{
"epoch": 0.52,
"grad_norm": 0.055419921875,
"learning_rate": 9.79984482079979e-06,
"loss": 0.9436,
"step": 802
},
{
"epoch": 0.52,
"grad_norm": 0.05322265625,
"learning_rate": 9.778779128468133e-06,
"loss": 0.7959,
"step": 803
},
{
"epoch": 0.52,
"grad_norm": 0.055908203125,
"learning_rate": 9.757714418269687e-06,
"loss": 0.8337,
"step": 804
},
{
"epoch": 0.52,
"grad_norm": 0.053466796875,
"learning_rate": 9.73665078372344e-06,
"loss": 0.8399,
"step": 805
},
{
"epoch": 0.52,
"grad_norm": 0.053955078125,
"learning_rate": 9.715588318343594e-06,
"loss": 0.832,
"step": 806
},
{
"epoch": 0.52,
"grad_norm": 0.055908203125,
"learning_rate": 9.694527115639175e-06,
"loss": 0.8985,
"step": 807
},
{
"epoch": 0.53,
"grad_norm": 0.05029296875,
"learning_rate": 9.673467269113599e-06,
"loss": 0.7697,
"step": 808
},
{
"epoch": 0.53,
"grad_norm": 0.04931640625,
"learning_rate": 9.652408872264249e-06,
"loss": 0.8257,
"step": 809
},
{
"epoch": 0.53,
"grad_norm": 0.055908203125,
"learning_rate": 9.631352018582088e-06,
"loss": 0.8525,
"step": 810
},
{
"epoch": 0.53,
"grad_norm": 0.051513671875,
"learning_rate": 9.61029680155122e-06,
"loss": 0.758,
"step": 811
},
{
"epoch": 0.53,
"grad_norm": 0.051025390625,
"learning_rate": 9.589243314648483e-06,
"loss": 0.9023,
"step": 812
},
{
"epoch": 0.53,
"grad_norm": 0.050537109375,
"learning_rate": 9.568191651343042e-06,
"loss": 0.7768,
"step": 813
},
{
"epoch": 0.53,
"grad_norm": 0.0517578125,
"learning_rate": 9.54714190509595e-06,
"loss": 0.8373,
"step": 814
},
{
"epoch": 0.53,
"grad_norm": 0.059814453125,
"learning_rate": 9.526094169359766e-06,
"loss": 0.8108,
"step": 815
},
{
"epoch": 0.53,
"grad_norm": 0.052490234375,
"learning_rate": 9.50504853757811e-06,
"loss": 0.8059,
"step": 816
},
{
"epoch": 0.53,
"grad_norm": 0.05908203125,
"learning_rate": 9.48400510318527e-06,
"loss": 0.8751,
"step": 817
},
{
"epoch": 0.53,
"grad_norm": 0.05029296875,
"learning_rate": 9.462963959605777e-06,
"loss": 0.8535,
"step": 818
},
{
"epoch": 0.53,
"grad_norm": 0.0537109375,
"learning_rate": 9.441925200253985e-06,
"loss": 0.8148,
"step": 819
},
{
"epoch": 0.53,
"grad_norm": 0.05126953125,
"learning_rate": 9.420888918533669e-06,
"loss": 0.8368,
"step": 820
},
{
"epoch": 0.53,
"grad_norm": 0.048095703125,
"learning_rate": 9.399855207837606e-06,
"loss": 0.9126,
"step": 821
},
{
"epoch": 0.53,
"grad_norm": 0.0517578125,
"learning_rate": 9.378824161547152e-06,
"loss": 0.8548,
"step": 822
},
{
"epoch": 0.54,
"grad_norm": 0.055419921875,
"learning_rate": 9.357795873031841e-06,
"loss": 0.9106,
"step": 823
},
{
"epoch": 0.54,
"grad_norm": 0.058349609375,
"learning_rate": 9.336770435648963e-06,
"loss": 0.8111,
"step": 824
},
{
"epoch": 0.54,
"grad_norm": 0.056640625,
"learning_rate": 9.315747942743142e-06,
"loss": 0.8079,
"step": 825
},
{
"epoch": 0.54,
"grad_norm": 0.05126953125,
"learning_rate": 9.294728487645934e-06,
"loss": 0.7641,
"step": 826
},
{
"epoch": 0.54,
"grad_norm": 0.05908203125,
"learning_rate": 9.273712163675419e-06,
"loss": 0.7871,
"step": 827
},
{
"epoch": 0.54,
"grad_norm": 0.05615234375,
"learning_rate": 9.252699064135759e-06,
"loss": 0.8296,
"step": 828
},
{
"epoch": 0.54,
"grad_norm": 0.05078125,
"learning_rate": 9.23168928231681e-06,
"loss": 0.8139,
"step": 829
},
{
"epoch": 0.54,
"grad_norm": 0.053955078125,
"learning_rate": 9.210682911493697e-06,
"loss": 0.8108,
"step": 830
},
{
"epoch": 0.54,
"grad_norm": 0.0546875,
"learning_rate": 9.189680044926402e-06,
"loss": 0.843,
"step": 831
},
{
"epoch": 0.54,
"grad_norm": 0.061279296875,
"learning_rate": 9.168680775859352e-06,
"loss": 0.8102,
"step": 832
},
{
"epoch": 0.54,
"grad_norm": 0.0576171875,
"learning_rate": 9.147685197520995e-06,
"loss": 0.8662,
"step": 833
},
{
"epoch": 0.54,
"grad_norm": 0.05126953125,
"learning_rate": 9.126693403123398e-06,
"loss": 0.9419,
"step": 834
},
{
"epoch": 0.54,
"grad_norm": 0.051513671875,
"learning_rate": 9.105705485861834e-06,
"loss": 0.8292,
"step": 835
},
{
"epoch": 0.54,
"grad_norm": 0.0546875,
"learning_rate": 9.084721538914354e-06,
"loss": 0.8407,
"step": 836
},
{
"epoch": 0.54,
"grad_norm": 0.054443359375,
"learning_rate": 9.06374165544139e-06,
"loss": 0.818,
"step": 837
},
{
"epoch": 0.54,
"grad_norm": 0.052490234375,
"learning_rate": 9.042765928585327e-06,
"loss": 0.8263,
"step": 838
},
{
"epoch": 0.55,
"grad_norm": 0.05810546875,
"learning_rate": 9.0217944514701e-06,
"loss": 0.8929,
"step": 839
},
{
"epoch": 0.55,
"grad_norm": 0.048828125,
"learning_rate": 9.000827317200778e-06,
"loss": 0.8218,
"step": 840
},
{
"epoch": 0.55,
"grad_norm": 0.0595703125,
"learning_rate": 8.979864618863144e-06,
"loss": 0.9103,
"step": 841
},
{
"epoch": 0.55,
"grad_norm": 0.051513671875,
"learning_rate": 8.958906449523295e-06,
"loss": 0.7595,
"step": 842
},
{
"epoch": 0.55,
"grad_norm": 0.0498046875,
"learning_rate": 8.93795290222722e-06,
"loss": 0.8868,
"step": 843
},
{
"epoch": 0.55,
"grad_norm": 0.05712890625,
"learning_rate": 8.917004070000377e-06,
"loss": 0.8137,
"step": 844
},
{
"epoch": 0.55,
"grad_norm": 0.056640625,
"learning_rate": 8.896060045847305e-06,
"loss": 0.7945,
"step": 845
},
{
"epoch": 0.55,
"grad_norm": 0.05810546875,
"learning_rate": 8.875120922751186e-06,
"loss": 0.8517,
"step": 846
},
{
"epoch": 0.55,
"grad_norm": 0.05419921875,
"learning_rate": 8.854186793673454e-06,
"loss": 0.8248,
"step": 847
},
{
"epoch": 0.55,
"grad_norm": 0.05224609375,
"learning_rate": 8.833257751553365e-06,
"loss": 0.7861,
"step": 848
},
{
"epoch": 0.55,
"grad_norm": 0.049072265625,
"learning_rate": 8.812333889307586e-06,
"loss": 0.8321,
"step": 849
},
{
"epoch": 0.55,
"grad_norm": 0.053466796875,
"learning_rate": 8.791415299829798e-06,
"loss": 0.8041,
"step": 850
},
{
"epoch": 0.55,
"grad_norm": 0.0537109375,
"learning_rate": 8.77050207599027e-06,
"loss": 0.8564,
"step": 851
},
{
"epoch": 0.55,
"grad_norm": 0.059326171875,
"learning_rate": 8.749594310635442e-06,
"loss": 0.811,
"step": 852
},
{
"epoch": 0.55,
"grad_norm": 0.052001953125,
"learning_rate": 8.728692096587536e-06,
"loss": 0.8223,
"step": 853
},
{
"epoch": 0.56,
"grad_norm": 0.056884765625,
"learning_rate": 8.707795526644107e-06,
"loss": 0.8617,
"step": 854
},
{
"epoch": 0.56,
"grad_norm": 0.05078125,
"learning_rate": 8.686904693577668e-06,
"loss": 0.778,
"step": 855
},
{
"epoch": 0.56,
"grad_norm": 0.05322265625,
"learning_rate": 8.666019690135264e-06,
"loss": 0.7807,
"step": 856
},
{
"epoch": 0.56,
"grad_norm": 0.051513671875,
"learning_rate": 8.645140609038045e-06,
"loss": 0.8375,
"step": 857
},
{
"epoch": 0.56,
"grad_norm": 0.050048828125,
"learning_rate": 8.624267542980882e-06,
"loss": 0.8494,
"step": 858
},
{
"epoch": 0.56,
"grad_norm": 0.05078125,
"learning_rate": 8.60340058463194e-06,
"loss": 0.7645,
"step": 859
},
{
"epoch": 0.56,
"grad_norm": 0.055419921875,
"learning_rate": 8.582539826632253e-06,
"loss": 0.8531,
"step": 860
},
{
"epoch": 0.56,
"grad_norm": 0.053466796875,
"learning_rate": 8.561685361595353e-06,
"loss": 0.7901,
"step": 861
},
{
"epoch": 0.56,
"grad_norm": 0.04931640625,
"learning_rate": 8.540837282106809e-06,
"loss": 0.7988,
"step": 862
},
{
"epoch": 0.56,
"grad_norm": 0.05322265625,
"learning_rate": 8.519995680723853e-06,
"loss": 0.7846,
"step": 863
},
{
"epoch": 0.56,
"grad_norm": 0.0537109375,
"learning_rate": 8.499160649974964e-06,
"loss": 0.7196,
"step": 864
},
{
"epoch": 0.56,
"grad_norm": 0.055908203125,
"learning_rate": 8.47833228235943e-06,
"loss": 0.8331,
"step": 865
},
{
"epoch": 0.56,
"grad_norm": 0.055419921875,
"learning_rate": 8.457510670346976e-06,
"loss": 0.7706,
"step": 866
},
{
"epoch": 0.56,
"grad_norm": 0.053466796875,
"learning_rate": 8.43669590637733e-06,
"loss": 0.819,
"step": 867
},
{
"epoch": 0.56,
"grad_norm": 0.051025390625,
"learning_rate": 8.415888082859806e-06,
"loss": 0.8358,
"step": 868
},
{
"epoch": 0.57,
"grad_norm": 0.05615234375,
"learning_rate": 8.39508729217292e-06,
"loss": 0.9253,
"step": 869
},
{
"epoch": 0.57,
"grad_norm": 0.05859375,
"learning_rate": 8.374293626663958e-06,
"loss": 0.9424,
"step": 870
},
{
"epoch": 0.57,
"grad_norm": 0.04833984375,
"learning_rate": 8.353507178648572e-06,
"loss": 0.7783,
"step": 871
},
{
"epoch": 0.57,
"grad_norm": 0.054931640625,
"learning_rate": 8.33272804041038e-06,
"loss": 0.838,
"step": 872
},
{
"epoch": 0.57,
"grad_norm": 0.056640625,
"learning_rate": 8.311956304200532e-06,
"loss": 0.8241,
"step": 873
},
{
"epoch": 0.57,
"grad_norm": 0.07421875,
"learning_rate": 8.291192062237329e-06,
"loss": 0.8346,
"step": 874
},
{
"epoch": 0.57,
"grad_norm": 0.051513671875,
"learning_rate": 8.2704354067058e-06,
"loss": 0.752,
"step": 875
},
{
"epoch": 0.57,
"grad_norm": 0.052001953125,
"learning_rate": 8.249686429757282e-06,
"loss": 0.8214,
"step": 876
},
{
"epoch": 0.57,
"grad_norm": 0.05810546875,
"learning_rate": 8.228945223509037e-06,
"loss": 0.7654,
"step": 877
},
{
"epoch": 0.57,
"grad_norm": 0.058349609375,
"learning_rate": 8.208211880043812e-06,
"loss": 0.8251,
"step": 878
},
{
"epoch": 0.57,
"grad_norm": 0.052978515625,
"learning_rate": 8.187486491409462e-06,
"loss": 0.8461,
"step": 879
},
{
"epoch": 0.57,
"grad_norm": 0.052734375,
"learning_rate": 8.166769149618517e-06,
"loss": 0.7852,
"step": 880
},
{
"epoch": 0.57,
"grad_norm": 0.05419921875,
"learning_rate": 8.146059946647784e-06,
"loss": 0.8457,
"step": 881
},
{
"epoch": 0.57,
"grad_norm": 0.0537109375,
"learning_rate": 8.125358974437933e-06,
"loss": 0.868,
"step": 882
},
{
"epoch": 0.57,
"grad_norm": 0.053955078125,
"learning_rate": 8.104666324893106e-06,
"loss": 0.7821,
"step": 883
},
{
"epoch": 0.57,
"grad_norm": 0.052978515625,
"learning_rate": 8.083982089880477e-06,
"loss": 0.8308,
"step": 884
},
{
"epoch": 0.58,
"grad_norm": 0.055908203125,
"learning_rate": 8.063306361229876e-06,
"loss": 0.8241,
"step": 885
},
{
"epoch": 0.58,
"grad_norm": 0.0537109375,
"learning_rate": 8.042639230733364e-06,
"loss": 0.8983,
"step": 886
},
{
"epoch": 0.58,
"grad_norm": 0.0556640625,
"learning_rate": 8.021980790144828e-06,
"loss": 0.8106,
"step": 887
},
{
"epoch": 0.58,
"grad_norm": 0.05078125,
"learning_rate": 8.001331131179581e-06,
"loss": 0.7587,
"step": 888
},
{
"epoch": 0.58,
"grad_norm": 0.061767578125,
"learning_rate": 7.98069034551394e-06,
"loss": 0.8053,
"step": 889
},
{
"epoch": 0.58,
"grad_norm": 0.057373046875,
"learning_rate": 7.960058524784833e-06,
"loss": 0.7977,
"step": 890
},
{
"epoch": 0.58,
"grad_norm": 0.054443359375,
"learning_rate": 7.939435760589392e-06,
"loss": 0.8021,
"step": 891
},
{
"epoch": 0.58,
"grad_norm": 0.052001953125,
"learning_rate": 7.918822144484532e-06,
"loss": 0.832,
"step": 892
},
{
"epoch": 0.58,
"grad_norm": 0.061279296875,
"learning_rate": 7.898217767986562e-06,
"loss": 0.8482,
"step": 893
},
{
"epoch": 0.58,
"grad_norm": 0.054443359375,
"learning_rate": 7.877622722570772e-06,
"loss": 0.8368,
"step": 894
},
{
"epoch": 0.58,
"grad_norm": 0.056396484375,
"learning_rate": 7.857037099671008e-06,
"loss": 0.8214,
"step": 895
},
{
"epoch": 0.58,
"grad_norm": 0.056396484375,
"learning_rate": 7.836460990679312e-06,
"loss": 0.8223,
"step": 896
},
{
"epoch": 0.58,
"grad_norm": 0.051513671875,
"learning_rate": 7.815894486945466e-06,
"loss": 0.793,
"step": 897
},
{
"epoch": 0.58,
"grad_norm": 0.050537109375,
"learning_rate": 7.795337679776613e-06,
"loss": 0.8183,
"step": 898
},
{
"epoch": 0.58,
"grad_norm": 0.048828125,
"learning_rate": 7.774790660436857e-06,
"loss": 0.774,
"step": 899
},
{
"epoch": 0.59,
"grad_norm": 0.052734375,
"learning_rate": 7.754253520146835e-06,
"loss": 0.887,
"step": 900
},
{
"epoch": 0.59,
"grad_norm": 0.0498046875,
"learning_rate": 7.733726350083331e-06,
"loss": 0.7979,
"step": 901
},
{
"epoch": 0.59,
"grad_norm": 0.05615234375,
"learning_rate": 7.713209241378871e-06,
"loss": 0.8028,
"step": 902
},
{
"epoch": 0.59,
"grad_norm": 0.0546875,
"learning_rate": 7.692702285121299e-06,
"loss": 0.7976,
"step": 903
},
{
"epoch": 0.59,
"grad_norm": 0.0498046875,
"learning_rate": 7.672205572353394e-06,
"loss": 0.747,
"step": 904
},
{
"epoch": 0.59,
"grad_norm": 0.08251953125,
"learning_rate": 7.65171919407246e-06,
"loss": 0.7857,
"step": 905
},
{
"epoch": 0.59,
"grad_norm": 0.051513671875,
"learning_rate": 7.631243241229916e-06,
"loss": 0.7942,
"step": 906
},
{
"epoch": 0.59,
"grad_norm": 0.052001953125,
"learning_rate": 7.610777804730903e-06,
"loss": 0.8072,
"step": 907
},
{
"epoch": 0.59,
"grad_norm": 0.0537109375,
"learning_rate": 7.590322975433857e-06,
"loss": 0.7873,
"step": 908
},
{
"epoch": 0.59,
"grad_norm": 0.0537109375,
"learning_rate": 7.56987884415014e-06,
"loss": 0.8778,
"step": 909
},
{
"epoch": 0.59,
"grad_norm": 0.051513671875,
"learning_rate": 7.549445501643615e-06,
"loss": 0.7803,
"step": 910
},
{
"epoch": 0.59,
"grad_norm": 0.0517578125,
"learning_rate": 7.5290230386302384e-06,
"loss": 0.8263,
"step": 911
},
{
"epoch": 0.59,
"grad_norm": 0.05126953125,
"learning_rate": 7.508611545777679e-06,
"loss": 0.8951,
"step": 912
},
{
"epoch": 0.59,
"grad_norm": 0.0517578125,
"learning_rate": 7.488211113704886e-06,
"loss": 0.8128,
"step": 913
},
{
"epoch": 0.59,
"grad_norm": 0.054931640625,
"learning_rate": 7.46782183298172e-06,
"loss": 0.833,
"step": 914
},
{
"epoch": 0.59,
"grad_norm": 0.056640625,
"learning_rate": 7.447443794128525e-06,
"loss": 0.783,
"step": 915
},
{
"epoch": 0.6,
"grad_norm": 0.052734375,
"learning_rate": 7.427077087615735e-06,
"loss": 0.8378,
"step": 916
},
{
"epoch": 0.6,
"grad_norm": 0.056884765625,
"learning_rate": 7.406721803863475e-06,
"loss": 0.8127,
"step": 917
},
{
"epoch": 0.6,
"grad_norm": 0.06640625,
"learning_rate": 7.386378033241164e-06,
"loss": 0.7863,
"step": 918
},
{
"epoch": 0.6,
"grad_norm": 0.054931640625,
"learning_rate": 7.3660458660670905e-06,
"loss": 0.795,
"step": 919
},
{
"epoch": 0.6,
"grad_norm": 0.060791015625,
"learning_rate": 7.345725392608047e-06,
"loss": 0.9132,
"step": 920
},
{
"epoch": 0.6,
"grad_norm": 0.052001953125,
"learning_rate": 7.3254167030788955e-06,
"loss": 0.8416,
"step": 921
},
{
"epoch": 0.6,
"grad_norm": 0.055419921875,
"learning_rate": 7.305119887642191e-06,
"loss": 0.8038,
"step": 922
},
{
"epoch": 0.6,
"grad_norm": 0.053466796875,
"learning_rate": 7.284835036407776e-06,
"loss": 0.8545,
"step": 923
},
{
"epoch": 0.6,
"grad_norm": 0.049560546875,
"learning_rate": 7.26456223943236e-06,
"loss": 0.8192,
"step": 924
},
{
"epoch": 0.6,
"grad_norm": 0.053466796875,
"learning_rate": 7.244301586719151e-06,
"loss": 0.8427,
"step": 925
},
{
"epoch": 0.6,
"grad_norm": 0.0556640625,
"learning_rate": 7.22405316821744e-06,
"loss": 0.84,
"step": 926
},
{
"epoch": 0.6,
"grad_norm": 0.05859375,
"learning_rate": 7.2038170738221945e-06,
"loss": 0.8305,
"step": 927
},
{
"epoch": 0.6,
"grad_norm": 0.057861328125,
"learning_rate": 7.18359339337368e-06,
"loss": 0.838,
"step": 928
},
{
"epoch": 0.6,
"grad_norm": 0.051513671875,
"learning_rate": 7.163382216657033e-06,
"loss": 0.8448,
"step": 929
},
{
"epoch": 0.6,
"grad_norm": 0.05419921875,
"learning_rate": 7.143183633401896e-06,
"loss": 0.8069,
"step": 930
},
{
"epoch": 0.61,
"grad_norm": 0.0556640625,
"learning_rate": 7.122997733281995e-06,
"loss": 0.7352,
"step": 931
},
{
"epoch": 0.61,
"grad_norm": 0.05078125,
"learning_rate": 7.102824605914736e-06,
"loss": 0.8466,
"step": 932
},
{
"epoch": 0.61,
"grad_norm": 0.052001953125,
"learning_rate": 7.082664340860834e-06,
"loss": 0.7785,
"step": 933
},
{
"epoch": 0.61,
"grad_norm": 0.054931640625,
"learning_rate": 7.0625170276239005e-06,
"loss": 0.7869,
"step": 934
},
{
"epoch": 0.61,
"grad_norm": 0.050537109375,
"learning_rate": 7.0423827556500325e-06,
"loss": 0.8095,
"step": 935
},
{
"epoch": 0.61,
"grad_norm": 0.052490234375,
"learning_rate": 7.022261614327448e-06,
"loss": 0.8043,
"step": 936
},
{
"epoch": 0.61,
"grad_norm": 0.05224609375,
"learning_rate": 7.002153692986048e-06,
"loss": 0.8092,
"step": 937
},
{
"epoch": 0.61,
"grad_norm": 0.053955078125,
"learning_rate": 6.982059080897059e-06,
"loss": 0.8259,
"step": 938
},
{
"epoch": 0.61,
"grad_norm": 0.05029296875,
"learning_rate": 6.961977867272616e-06,
"loss": 0.7845,
"step": 939
},
{
"epoch": 0.61,
"grad_norm": 0.06787109375,
"learning_rate": 6.941910141265364e-06,
"loss": 0.7595,
"step": 940
},
{
"epoch": 0.61,
"grad_norm": 0.05419921875,
"learning_rate": 6.921855991968078e-06,
"loss": 0.7633,
"step": 941
},
{
"epoch": 0.61,
"grad_norm": 0.0546875,
"learning_rate": 6.901815508413252e-06,
"loss": 0.882,
"step": 942
},
{
"epoch": 0.61,
"grad_norm": 0.0517578125,
"learning_rate": 6.881788779572704e-06,
"loss": 0.8275,
"step": 943
},
{
"epoch": 0.61,
"grad_norm": 0.051025390625,
"learning_rate": 6.861775894357197e-06,
"loss": 0.8186,
"step": 944
},
{
"epoch": 0.61,
"grad_norm": 0.05078125,
"learning_rate": 6.841776941616031e-06,
"loss": 0.8279,
"step": 945
},
{
"epoch": 0.62,
"grad_norm": 0.049072265625,
"learning_rate": 6.821792010136645e-06,
"loss": 0.8487,
"step": 946
},
{
"epoch": 0.62,
"grad_norm": 0.054443359375,
"learning_rate": 6.801821188644242e-06,
"loss": 0.8866,
"step": 947
},
{
"epoch": 0.62,
"grad_norm": 0.049072265625,
"learning_rate": 6.781864565801363e-06,
"loss": 0.8537,
"step": 948
},
{
"epoch": 0.62,
"grad_norm": 0.055419921875,
"learning_rate": 6.761922230207528e-06,
"loss": 0.8315,
"step": 949
},
{
"epoch": 0.62,
"grad_norm": 0.0556640625,
"learning_rate": 6.741994270398826e-06,
"loss": 0.8696,
"step": 950
},
{
"epoch": 0.62,
"grad_norm": 0.053955078125,
"learning_rate": 6.722080774847517e-06,
"loss": 0.7755,
"step": 951
},
{
"epoch": 0.62,
"grad_norm": 0.051513671875,
"learning_rate": 6.702181831961648e-06,
"loss": 0.8394,
"step": 952
},
{
"epoch": 0.62,
"grad_norm": 0.05322265625,
"learning_rate": 6.682297530084665e-06,
"loss": 0.8068,
"step": 953
},
{
"epoch": 0.62,
"grad_norm": 0.0546875,
"learning_rate": 6.662427957494997e-06,
"loss": 0.7511,
"step": 954
},
{
"epoch": 0.62,
"grad_norm": 0.0498046875,
"learning_rate": 6.642573202405698e-06,
"loss": 0.8835,
"step": 955
},
{
"epoch": 0.62,
"grad_norm": 0.053955078125,
"learning_rate": 6.622733352964026e-06,
"loss": 0.8036,
"step": 956
},
{
"epoch": 0.62,
"grad_norm": 0.05029296875,
"learning_rate": 6.602908497251073e-06,
"loss": 0.8399,
"step": 957
},
{
"epoch": 0.62,
"grad_norm": 0.051513671875,
"learning_rate": 6.583098723281362e-06,
"loss": 0.8352,
"step": 958
},
{
"epoch": 0.62,
"grad_norm": 0.05126953125,
"learning_rate": 6.563304119002452e-06,
"loss": 0.7379,
"step": 959
},
{
"epoch": 0.62,
"grad_norm": 0.0576171875,
"learning_rate": 6.54352477229456e-06,
"loss": 0.9129,
"step": 960
},
{
"epoch": 0.62,
"grad_norm": 0.05810546875,
"learning_rate": 6.523760770970175e-06,
"loss": 0.858,
"step": 961
},
{
"epoch": 0.63,
"grad_norm": 0.051513671875,
"learning_rate": 6.50401220277364e-06,
"loss": 0.8677,
"step": 962
},
{
"epoch": 0.63,
"grad_norm": 0.053955078125,
"learning_rate": 6.484279155380797e-06,
"loss": 0.8417,
"step": 963
},
{
"epoch": 0.63,
"grad_norm": 0.055908203125,
"learning_rate": 6.464561716398565e-06,
"loss": 0.8293,
"step": 964
},
{
"epoch": 0.63,
"grad_norm": 0.05078125,
"learning_rate": 6.444859973364588e-06,
"loss": 0.7939,
"step": 965
},
{
"epoch": 0.63,
"grad_norm": 0.0625,
"learning_rate": 6.425174013746814e-06,
"loss": 0.7653,
"step": 966
},
{
"epoch": 0.63,
"grad_norm": 0.0546875,
"learning_rate": 6.405503924943116e-06,
"loss": 0.8113,
"step": 967
},
{
"epoch": 0.63,
"grad_norm": 0.053955078125,
"learning_rate": 6.385849794280915e-06,
"loss": 0.8182,
"step": 968
},
{
"epoch": 0.63,
"grad_norm": 0.0517578125,
"learning_rate": 6.366211709016783e-06,
"loss": 0.851,
"step": 969
},
{
"epoch": 0.63,
"grad_norm": 0.056396484375,
"learning_rate": 6.34658975633605e-06,
"loss": 0.8861,
"step": 970
},
{
"epoch": 0.63,
"grad_norm": 0.0537109375,
"learning_rate": 6.326984023352435e-06,
"loss": 0.8709,
"step": 971
},
{
"epoch": 0.63,
"grad_norm": 0.0546875,
"learning_rate": 6.307394597107633e-06,
"loss": 0.8176,
"step": 972
},
{
"epoch": 0.63,
"grad_norm": 0.056396484375,
"learning_rate": 6.287821564570957e-06,
"loss": 0.8082,
"step": 973
},
{
"epoch": 0.63,
"grad_norm": 0.05126953125,
"learning_rate": 6.268265012638934e-06,
"loss": 0.8379,
"step": 974
},
{
"epoch": 0.63,
"grad_norm": 0.0546875,
"learning_rate": 6.2487250281349185e-06,
"loss": 0.8798,
"step": 975
},
{
"epoch": 0.63,
"grad_norm": 0.053466796875,
"learning_rate": 6.22920169780872e-06,
"loss": 0.7396,
"step": 976
},
{
"epoch": 0.64,
"grad_norm": 0.048583984375,
"learning_rate": 6.209695108336211e-06,
"loss": 0.8417,
"step": 977
},
{
"epoch": 0.64,
"grad_norm": 0.05712890625,
"learning_rate": 6.190205346318927e-06,
"loss": 0.8823,
"step": 978
},
{
"epoch": 0.64,
"grad_norm": 0.0576171875,
"learning_rate": 6.1707324982837145e-06,
"loss": 0.7866,
"step": 979
},
{
"epoch": 0.64,
"grad_norm": 0.052978515625,
"learning_rate": 6.151276650682314e-06,
"loss": 0.7997,
"step": 980
},
{
"epoch": 0.64,
"grad_norm": 0.052734375,
"learning_rate": 6.131837889891e-06,
"loss": 0.7398,
"step": 981
},
{
"epoch": 0.64,
"grad_norm": 0.051025390625,
"learning_rate": 6.112416302210189e-06,
"loss": 0.8599,
"step": 982
},
{
"epoch": 0.64,
"grad_norm": 0.052978515625,
"learning_rate": 6.093011973864045e-06,
"loss": 0.8279,
"step": 983
},
{
"epoch": 0.64,
"grad_norm": 0.05615234375,
"learning_rate": 6.073624991000116e-06,
"loss": 0.8036,
"step": 984
},
{
"epoch": 0.64,
"grad_norm": 0.053466796875,
"learning_rate": 6.054255439688947e-06,
"loss": 0.777,
"step": 985
},
{
"epoch": 0.64,
"grad_norm": 0.05712890625,
"learning_rate": 6.034903405923681e-06,
"loss": 0.8602,
"step": 986
},
{
"epoch": 0.64,
"grad_norm": 0.05712890625,
"learning_rate": 6.015568975619705e-06,
"loss": 0.7723,
"step": 987
},
{
"epoch": 0.64,
"grad_norm": 0.052734375,
"learning_rate": 5.996252234614235e-06,
"loss": 0.8925,
"step": 988
},
{
"epoch": 0.64,
"grad_norm": 0.0595703125,
"learning_rate": 5.976953268665971e-06,
"loss": 0.7483,
"step": 989
},
{
"epoch": 0.64,
"grad_norm": 0.052001953125,
"learning_rate": 5.957672163454694e-06,
"loss": 0.8351,
"step": 990
},
{
"epoch": 0.64,
"grad_norm": 0.08544921875,
"learning_rate": 5.938409004580883e-06,
"loss": 0.841,
"step": 991
},
{
"epoch": 0.64,
"grad_norm": 0.05419921875,
"learning_rate": 5.919163877565351e-06,
"loss": 0.8116,
"step": 992
},
{
"epoch": 0.65,
"grad_norm": 0.052734375,
"learning_rate": 5.899936867848857e-06,
"loss": 0.7741,
"step": 993
},
{
"epoch": 0.65,
"grad_norm": 0.059814453125,
"learning_rate": 5.880728060791712e-06,
"loss": 0.8981,
"step": 994
},
{
"epoch": 0.65,
"grad_norm": 0.05126953125,
"learning_rate": 5.861537541673436e-06,
"loss": 0.7791,
"step": 995
},
{
"epoch": 0.65,
"grad_norm": 0.052490234375,
"learning_rate": 5.8423653956923394e-06,
"loss": 0.804,
"step": 996
},
{
"epoch": 0.65,
"grad_norm": 0.050537109375,
"learning_rate": 5.823211707965173e-06,
"loss": 0.7934,
"step": 997
},
{
"epoch": 0.65,
"grad_norm": 0.059814453125,
"learning_rate": 5.804076563526743e-06,
"loss": 0.7932,
"step": 998
},
{
"epoch": 0.65,
"grad_norm": 0.050537109375,
"learning_rate": 5.78496004732952e-06,
"loss": 0.7689,
"step": 999
},
{
"epoch": 0.65,
"grad_norm": 0.05615234375,
"learning_rate": 5.765862244243279e-06,
"loss": 0.8088,
"step": 1000
},
{
"epoch": 0.65,
"grad_norm": 0.05126953125,
"learning_rate": 5.7467832390547205e-06,
"loss": 0.8581,
"step": 1001
},
{
"epoch": 0.65,
"grad_norm": 0.0546875,
"learning_rate": 5.727723116467076e-06,
"loss": 0.846,
"step": 1002
},
{
"epoch": 0.65,
"grad_norm": 0.052001953125,
"learning_rate": 5.708681961099758e-06,
"loss": 0.8459,
"step": 1003
},
{
"epoch": 0.65,
"grad_norm": 0.05078125,
"learning_rate": 5.689659857487974e-06,
"loss": 0.8362,
"step": 1004
},
{
"epoch": 0.65,
"grad_norm": 0.05517578125,
"learning_rate": 5.670656890082336e-06,
"loss": 0.8684,
"step": 1005
},
{
"epoch": 0.65,
"grad_norm": 0.04931640625,
"learning_rate": 5.651673143248509e-06,
"loss": 0.7712,
"step": 1006
},
{
"epoch": 0.65,
"grad_norm": 0.0537109375,
"learning_rate": 5.6327087012668226e-06,
"loss": 0.8785,
"step": 1007
},
{
"epoch": 0.66,
"grad_norm": 0.052734375,
"learning_rate": 5.613763648331904e-06,
"loss": 0.8575,
"step": 1008
},
{
"epoch": 0.66,
"grad_norm": 0.05224609375,
"learning_rate": 5.594838068552302e-06,
"loss": 0.7946,
"step": 1009
},
{
"epoch": 0.66,
"grad_norm": 0.054443359375,
"learning_rate": 5.5759320459500994e-06,
"loss": 0.9174,
"step": 1010
},
{
"epoch": 0.66,
"grad_norm": 0.055419921875,
"learning_rate": 5.557045664460567e-06,
"loss": 0.866,
"step": 1011
},
{
"epoch": 0.66,
"grad_norm": 0.050048828125,
"learning_rate": 5.538179007931777e-06,
"loss": 0.8701,
"step": 1012
},
{
"epoch": 0.66,
"grad_norm": 0.05859375,
"learning_rate": 5.519332160124215e-06,
"loss": 0.8036,
"step": 1013
},
{
"epoch": 0.66,
"grad_norm": 0.05224609375,
"learning_rate": 5.500505204710445e-06,
"loss": 0.9006,
"step": 1014
},
{
"epoch": 0.66,
"grad_norm": 0.049072265625,
"learning_rate": 5.481698225274696e-06,
"loss": 0.8594,
"step": 1015
},
{
"epoch": 0.66,
"grad_norm": 0.05419921875,
"learning_rate": 5.462911305312526e-06,
"loss": 0.8567,
"step": 1016
},
{
"epoch": 0.66,
"grad_norm": 0.05078125,
"learning_rate": 5.444144528230432e-06,
"loss": 0.8518,
"step": 1017
},
{
"epoch": 0.66,
"grad_norm": 0.052978515625,
"learning_rate": 5.425397977345485e-06,
"loss": 0.8722,
"step": 1018
},
{
"epoch": 0.66,
"grad_norm": 0.061279296875,
"learning_rate": 5.406671735884959e-06,
"loss": 0.8859,
"step": 1019
},
{
"epoch": 0.66,
"grad_norm": 0.0546875,
"learning_rate": 5.387965886985967e-06,
"loss": 0.7231,
"step": 1020
},
{
"epoch": 0.66,
"grad_norm": 0.051025390625,
"learning_rate": 5.369280513695074e-06,
"loss": 0.7835,
"step": 1021
},
{
"epoch": 0.66,
"grad_norm": 0.05078125,
"learning_rate": 5.3506156989679605e-06,
"loss": 0.7461,
"step": 1022
},
{
"epoch": 0.67,
"grad_norm": 0.05126953125,
"learning_rate": 5.331971525669015e-06,
"loss": 0.8321,
"step": 1023
},
{
"epoch": 0.67,
"grad_norm": 0.051513671875,
"learning_rate": 5.313348076571001e-06,
"loss": 0.7939,
"step": 1024
},
{
"epoch": 0.67,
"grad_norm": 0.052490234375,
"learning_rate": 5.294745434354671e-06,
"loss": 0.8036,
"step": 1025
},
{
"epoch": 0.67,
"grad_norm": 0.052001953125,
"learning_rate": 5.276163681608399e-06,
"loss": 0.78,
"step": 1026
},
{
"epoch": 0.67,
"grad_norm": 0.054443359375,
"learning_rate": 5.257602900827821e-06,
"loss": 0.833,
"step": 1027
},
{
"epoch": 0.67,
"grad_norm": 0.052978515625,
"learning_rate": 5.239063174415466e-06,
"loss": 0.8092,
"step": 1028
},
{
"epoch": 0.67,
"grad_norm": 0.05029296875,
"learning_rate": 5.22054458468039e-06,
"loss": 0.7748,
"step": 1029
},
{
"epoch": 0.67,
"grad_norm": 0.05126953125,
"learning_rate": 5.202047213837815e-06,
"loss": 0.7401,
"step": 1030
},
{
"epoch": 0.67,
"grad_norm": 0.051025390625,
"learning_rate": 5.1835711440087475e-06,
"loss": 0.8139,
"step": 1031
},
{
"epoch": 0.67,
"grad_norm": 0.05078125,
"learning_rate": 5.1651164572196346e-06,
"loss": 0.8338,
"step": 1032
},
{
"epoch": 0.67,
"grad_norm": 0.05224609375,
"learning_rate": 5.146683235401995e-06,
"loss": 0.855,
"step": 1033
},
{
"epoch": 0.67,
"grad_norm": 0.052001953125,
"learning_rate": 5.128271560392037e-06,
"loss": 0.7874,
"step": 1034
},
{
"epoch": 0.67,
"grad_norm": 0.053466796875,
"learning_rate": 5.1098815139303235e-06,
"loss": 0.7854,
"step": 1035
},
{
"epoch": 0.67,
"grad_norm": 0.050537109375,
"learning_rate": 5.091513177661391e-06,
"loss": 0.7936,
"step": 1036
},
{
"epoch": 0.67,
"grad_norm": 0.05419921875,
"learning_rate": 5.0731666331333894e-06,
"loss": 0.8039,
"step": 1037
},
{
"epoch": 0.67,
"grad_norm": 0.05517578125,
"learning_rate": 5.0548419617977275e-06,
"loss": 0.8405,
"step": 1038
},
{
"epoch": 0.68,
"grad_norm": 0.05517578125,
"learning_rate": 5.036539245008694e-06,
"loss": 0.828,
"step": 1039
},
{
"epoch": 0.68,
"grad_norm": 0.049072265625,
"learning_rate": 5.01825856402312e-06,
"loss": 0.788,
"step": 1040
},
{
"epoch": 0.68,
"grad_norm": 0.052734375,
"learning_rate": 5.000000000000003e-06,
"loss": 0.8911,
"step": 1041
},
{
"epoch": 0.68,
"grad_norm": 0.056640625,
"learning_rate": 4.981763634000143e-06,
"loss": 0.8753,
"step": 1042
},
{
"epoch": 0.68,
"grad_norm": 0.047607421875,
"learning_rate": 4.963549546985799e-06,
"loss": 0.8118,
"step": 1043
},
{
"epoch": 0.68,
"grad_norm": 0.055908203125,
"learning_rate": 4.94535781982032e-06,
"loss": 0.7488,
"step": 1044
},
{
"epoch": 0.68,
"grad_norm": 0.051025390625,
"learning_rate": 4.927188533267776e-06,
"loss": 0.8432,
"step": 1045
},
{
"epoch": 0.68,
"grad_norm": 0.0498046875,
"learning_rate": 4.90904176799262e-06,
"loss": 0.8129,
"step": 1046
},
{
"epoch": 0.68,
"grad_norm": 0.0576171875,
"learning_rate": 4.890917604559312e-06,
"loss": 0.8256,
"step": 1047
},
{
"epoch": 0.68,
"grad_norm": 0.052001953125,
"learning_rate": 4.872816123431977e-06,
"loss": 0.8088,
"step": 1048
},
{
"epoch": 0.68,
"grad_norm": 0.054931640625,
"learning_rate": 4.854737404974036e-06,
"loss": 0.834,
"step": 1049
},
{
"epoch": 0.68,
"grad_norm": 0.057373046875,
"learning_rate": 4.836681529447844e-06,
"loss": 0.7758,
"step": 1050
},
{
"epoch": 0.68,
"grad_norm": 0.05322265625,
"learning_rate": 4.818648577014354e-06,
"loss": 0.8316,
"step": 1051
},
{
"epoch": 0.68,
"grad_norm": 0.0615234375,
"learning_rate": 4.800638627732751e-06,
"loss": 0.8591,
"step": 1052
},
{
"epoch": 0.68,
"grad_norm": 0.05712890625,
"learning_rate": 4.78265176156008e-06,
"loss": 0.8888,
"step": 1053
},
{
"epoch": 0.69,
"grad_norm": 0.0576171875,
"learning_rate": 4.764688058350924e-06,
"loss": 0.8113,
"step": 1054
},
{
"epoch": 0.69,
"grad_norm": 0.05078125,
"learning_rate": 4.746747597857014e-06,
"loss": 0.7517,
"step": 1055
},
{
"epoch": 0.69,
"grad_norm": 0.0517578125,
"learning_rate": 4.728830459726909e-06,
"loss": 0.7524,
"step": 1056
},
{
"epoch": 0.69,
"grad_norm": 0.04931640625,
"learning_rate": 4.710936723505618e-06,
"loss": 0.7694,
"step": 1057
},
{
"epoch": 0.69,
"grad_norm": 0.05322265625,
"learning_rate": 4.693066468634253e-06,
"loss": 0.862,
"step": 1058
},
{
"epoch": 0.69,
"grad_norm": 0.051513671875,
"learning_rate": 4.675219774449684e-06,
"loss": 0.7901,
"step": 1059
},
{
"epoch": 0.69,
"grad_norm": 0.0556640625,
"learning_rate": 4.6573967201841785e-06,
"loss": 0.8082,
"step": 1060
},
{
"epoch": 0.69,
"grad_norm": 0.058837890625,
"learning_rate": 4.639597384965045e-06,
"loss": 0.8262,
"step": 1061
},
{
"epoch": 0.69,
"grad_norm": 0.05810546875,
"learning_rate": 4.621821847814298e-06,
"loss": 0.8953,
"step": 1062
},
{
"epoch": 0.69,
"grad_norm": 0.05517578125,
"learning_rate": 4.6040701876483004e-06,
"loss": 0.7649,
"step": 1063
},
{
"epoch": 0.69,
"grad_norm": 0.0517578125,
"learning_rate": 4.586342483277396e-06,
"loss": 0.8502,
"step": 1064
},
{
"epoch": 0.69,
"grad_norm": 0.048095703125,
"learning_rate": 4.568638813405591e-06,
"loss": 0.7762,
"step": 1065
},
{
"epoch": 0.69,
"grad_norm": 0.052734375,
"learning_rate": 4.550959256630171e-06,
"loss": 0.8551,
"step": 1066
},
{
"epoch": 0.69,
"grad_norm": 0.0498046875,
"learning_rate": 4.533303891441388e-06,
"loss": 0.8396,
"step": 1067
},
{
"epoch": 0.69,
"grad_norm": 0.047607421875,
"learning_rate": 4.515672796222083e-06,
"loss": 0.7527,
"step": 1068
},
{
"epoch": 0.7,
"grad_norm": 0.0537109375,
"learning_rate": 4.498066049247344e-06,
"loss": 0.792,
"step": 1069
},
{
"epoch": 0.7,
"grad_norm": 0.051025390625,
"learning_rate": 4.480483728684169e-06,
"loss": 0.7919,
"step": 1070
},
{
"epoch": 0.7,
"grad_norm": 0.0498046875,
"learning_rate": 4.4629259125911105e-06,
"loss": 0.8415,
"step": 1071
},
{
"epoch": 0.7,
"grad_norm": 0.05078125,
"learning_rate": 4.445392678917928e-06,
"loss": 0.8421,
"step": 1072
},
{
"epoch": 0.7,
"grad_norm": 0.053466796875,
"learning_rate": 4.427884105505252e-06,
"loss": 0.8139,
"step": 1073
},
{
"epoch": 0.7,
"grad_norm": 0.049560546875,
"learning_rate": 4.410400270084219e-06,
"loss": 0.7996,
"step": 1074
},
{
"epoch": 0.7,
"grad_norm": 0.053955078125,
"learning_rate": 4.392941250276152e-06,
"loss": 0.8123,
"step": 1075
},
{
"epoch": 0.7,
"grad_norm": 0.05029296875,
"learning_rate": 4.375507123592194e-06,
"loss": 0.8365,
"step": 1076
},
{
"epoch": 0.7,
"grad_norm": 0.052001953125,
"learning_rate": 4.358097967432976e-06,
"loss": 0.7831,
"step": 1077
},
{
"epoch": 0.7,
"grad_norm": 0.055908203125,
"learning_rate": 4.3407138590882695e-06,
"loss": 0.8176,
"step": 1078
},
{
"epoch": 0.7,
"grad_norm": 0.0576171875,
"learning_rate": 4.323354875736644e-06,
"loss": 0.8779,
"step": 1079
},
{
"epoch": 0.7,
"grad_norm": 0.053466796875,
"learning_rate": 4.306021094445119e-06,
"loss": 0.8436,
"step": 1080
},
{
"epoch": 0.7,
"grad_norm": 0.056884765625,
"learning_rate": 4.288712592168834e-06,
"loss": 0.8882,
"step": 1081
},
{
"epoch": 0.7,
"grad_norm": 0.0498046875,
"learning_rate": 4.2714294457506926e-06,
"loss": 0.8107,
"step": 1082
},
{
"epoch": 0.7,
"grad_norm": 0.053955078125,
"learning_rate": 4.254171731921032e-06,
"loss": 0.7882,
"step": 1083
},
{
"epoch": 0.7,
"grad_norm": 0.054443359375,
"learning_rate": 4.236939527297286e-06,
"loss": 0.8365,
"step": 1084
},
{
"epoch": 0.71,
"grad_norm": 0.05419921875,
"learning_rate": 4.21973290838362e-06,
"loss": 0.8201,
"step": 1085
},
{
"epoch": 0.71,
"grad_norm": 0.055908203125,
"learning_rate": 4.202551951570625e-06,
"loss": 0.8693,
"step": 1086
},
{
"epoch": 0.71,
"grad_norm": 0.0517578125,
"learning_rate": 4.185396733134957e-06,
"loss": 0.8346,
"step": 1087
},
{
"epoch": 0.71,
"grad_norm": 0.049072265625,
"learning_rate": 4.168267329239002e-06,
"loss": 0.7612,
"step": 1088
},
{
"epoch": 0.71,
"grad_norm": 0.05126953125,
"learning_rate": 4.151163815930547e-06,
"loss": 0.8227,
"step": 1089
},
{
"epoch": 0.71,
"grad_norm": 0.051025390625,
"learning_rate": 4.1340862691424184e-06,
"loss": 0.762,
"step": 1090
},
{
"epoch": 0.71,
"grad_norm": 0.059326171875,
"learning_rate": 4.11703476469218e-06,
"loss": 0.8148,
"step": 1091
},
{
"epoch": 0.71,
"grad_norm": 0.052490234375,
"learning_rate": 4.100009378281772e-06,
"loss": 0.7531,
"step": 1092
},
{
"epoch": 0.71,
"grad_norm": 0.054443359375,
"learning_rate": 4.0830101854971725e-06,
"loss": 0.8744,
"step": 1093
},
{
"epoch": 0.71,
"grad_norm": 0.05517578125,
"learning_rate": 4.066037261808079e-06,
"loss": 0.8733,
"step": 1094
},
{
"epoch": 0.71,
"grad_norm": 0.05615234375,
"learning_rate": 4.049090682567568e-06,
"loss": 0.8293,
"step": 1095
},
{
"epoch": 0.71,
"grad_norm": 0.05419921875,
"learning_rate": 4.032170523011743e-06,
"loss": 0.8088,
"step": 1096
},
{
"epoch": 0.71,
"grad_norm": 0.0517578125,
"learning_rate": 4.015276858259427e-06,
"loss": 0.8208,
"step": 1097
},
{
"epoch": 0.71,
"grad_norm": 0.06396484375,
"learning_rate": 3.998409763311814e-06,
"loss": 0.8684,
"step": 1098
},
{
"epoch": 0.71,
"grad_norm": 0.05224609375,
"learning_rate": 3.981569313052135e-06,
"loss": 0.7578,
"step": 1099
},
{
"epoch": 0.72,
"grad_norm": 0.049072265625,
"learning_rate": 3.964755582245335e-06,
"loss": 0.8526,
"step": 1100
},
{
"epoch": 0.72,
"grad_norm": 0.056640625,
"learning_rate": 3.947968645537727e-06,
"loss": 0.8404,
"step": 1101
},
{
"epoch": 0.72,
"grad_norm": 0.0537109375,
"learning_rate": 3.931208577456674e-06,
"loss": 0.8259,
"step": 1102
},
{
"epoch": 0.72,
"grad_norm": 0.052978515625,
"learning_rate": 3.914475452410257e-06,
"loss": 0.8161,
"step": 1103
},
{
"epoch": 0.72,
"grad_norm": 0.053466796875,
"learning_rate": 3.897769344686929e-06,
"loss": 0.8634,
"step": 1104
},
{
"epoch": 0.72,
"grad_norm": 0.05322265625,
"learning_rate": 3.881090328455206e-06,
"loss": 0.8254,
"step": 1105
},
{
"epoch": 0.72,
"grad_norm": 0.052001953125,
"learning_rate": 3.864438477763327e-06,
"loss": 0.8013,
"step": 1106
},
{
"epoch": 0.72,
"grad_norm": 0.052978515625,
"learning_rate": 3.847813866538926e-06,
"loss": 0.7607,
"step": 1107
},
{
"epoch": 0.72,
"grad_norm": 0.05224609375,
"learning_rate": 3.831216568588707e-06,
"loss": 0.8182,
"step": 1108
},
{
"epoch": 0.72,
"grad_norm": 0.057373046875,
"learning_rate": 3.8146466575981044e-06,
"loss": 0.8132,
"step": 1109
},
{
"epoch": 0.72,
"grad_norm": 0.052490234375,
"learning_rate": 3.798104207130977e-06,
"loss": 0.8323,
"step": 1110
},
{
"epoch": 0.72,
"grad_norm": 0.049560546875,
"learning_rate": 3.781589290629266e-06,
"loss": 0.7456,
"step": 1111
},
{
"epoch": 0.72,
"grad_norm": 0.051025390625,
"learning_rate": 3.7651019814126656e-06,
"loss": 0.8267,
"step": 1112
},
{
"epoch": 0.72,
"grad_norm": 0.05322265625,
"learning_rate": 3.748642352678318e-06,
"loss": 0.8337,
"step": 1113
},
{
"epoch": 0.72,
"grad_norm": 0.052490234375,
"learning_rate": 3.73221047750046e-06,
"loss": 0.8805,
"step": 1114
},
{
"epoch": 0.72,
"grad_norm": 0.050048828125,
"learning_rate": 3.715806428830124e-06,
"loss": 0.7678,
"step": 1115
},
{
"epoch": 0.73,
"grad_norm": 0.050537109375,
"learning_rate": 3.6994302794948023e-06,
"loss": 0.8131,
"step": 1116
},
{
"epoch": 0.73,
"grad_norm": 0.056396484375,
"learning_rate": 3.6830821021981223e-06,
"loss": 0.8511,
"step": 1117
},
{
"epoch": 0.73,
"grad_norm": 0.05078125,
"learning_rate": 3.6667619695195287e-06,
"loss": 0.7869,
"step": 1118
},
{
"epoch": 0.73,
"grad_norm": 0.04931640625,
"learning_rate": 3.650469953913962e-06,
"loss": 0.7729,
"step": 1119
},
{
"epoch": 0.73,
"grad_norm": 0.049072265625,
"learning_rate": 3.6342061277115235e-06,
"loss": 0.7878,
"step": 1120
},
{
"epoch": 0.73,
"grad_norm": 0.056396484375,
"learning_rate": 3.6179705631171747e-06,
"loss": 0.7963,
"step": 1121
},
{
"epoch": 0.73,
"grad_norm": 0.05322265625,
"learning_rate": 3.6017633322104063e-06,
"loss": 0.877,
"step": 1122
},
{
"epoch": 0.73,
"grad_norm": 0.053466796875,
"learning_rate": 3.5855845069449114e-06,
"loss": 0.7568,
"step": 1123
},
{
"epoch": 0.73,
"grad_norm": 0.05224609375,
"learning_rate": 3.569434159148283e-06,
"loss": 0.8158,
"step": 1124
},
{
"epoch": 0.73,
"grad_norm": 0.050537109375,
"learning_rate": 3.5533123605216748e-06,
"loss": 0.8384,
"step": 1125
},
{
"epoch": 0.73,
"grad_norm": 0.052978515625,
"learning_rate": 3.5372191826395007e-06,
"loss": 0.7743,
"step": 1126
},
{
"epoch": 0.73,
"grad_norm": 0.048095703125,
"learning_rate": 3.5211546969491096e-06,
"loss": 0.8214,
"step": 1127
},
{
"epoch": 0.73,
"grad_norm": 0.050537109375,
"learning_rate": 3.505118974770466e-06,
"loss": 0.8198,
"step": 1128
},
{
"epoch": 0.73,
"grad_norm": 0.060546875,
"learning_rate": 3.4891120872958374e-06,
"loss": 0.7972,
"step": 1129
},
{
"epoch": 0.73,
"grad_norm": 0.059814453125,
"learning_rate": 3.4731341055894784e-06,
"loss": 0.8484,
"step": 1130
},
{
"epoch": 0.74,
"grad_norm": 0.052978515625,
"learning_rate": 3.457185100587306e-06,
"loss": 0.8246,
"step": 1131
},
{
"epoch": 0.74,
"grad_norm": 0.051025390625,
"learning_rate": 3.4412651430966006e-06,
"loss": 0.8374,
"step": 1132
},
{
"epoch": 0.74,
"grad_norm": 0.0517578125,
"learning_rate": 3.4253743037956756e-06,
"loss": 0.7951,
"step": 1133
},
{
"epoch": 0.74,
"grad_norm": 0.052001953125,
"learning_rate": 3.4095126532335776e-06,
"loss": 0.8404,
"step": 1134
},
{
"epoch": 0.74,
"grad_norm": 0.0537109375,
"learning_rate": 3.3936802618297637e-06,
"loss": 0.863,
"step": 1135
},
{
"epoch": 0.74,
"grad_norm": 0.05126953125,
"learning_rate": 3.3778771998737935e-06,
"loss": 0.781,
"step": 1136
},
{
"epoch": 0.74,
"grad_norm": 0.052490234375,
"learning_rate": 3.3621035375250134e-06,
"loss": 0.833,
"step": 1137
},
{
"epoch": 0.74,
"grad_norm": 0.049072265625,
"learning_rate": 3.346359344812251e-06,
"loss": 0.7757,
"step": 1138
},
{
"epoch": 0.74,
"grad_norm": 0.05419921875,
"learning_rate": 3.330644691633492e-06,
"loss": 0.8102,
"step": 1139
},
{
"epoch": 0.74,
"grad_norm": 0.056884765625,
"learning_rate": 3.31495964775559e-06,
"loss": 0.8311,
"step": 1140
},
{
"epoch": 0.74,
"grad_norm": 0.05224609375,
"learning_rate": 3.2993042828139334e-06,
"loss": 0.8824,
"step": 1141
},
{
"epoch": 0.74,
"grad_norm": 0.056396484375,
"learning_rate": 3.2836786663121544e-06,
"loss": 0.8383,
"step": 1142
},
{
"epoch": 0.74,
"grad_norm": 0.05908203125,
"learning_rate": 3.268082867621817e-06,
"loss": 0.8184,
"step": 1143
},
{
"epoch": 0.74,
"grad_norm": 0.05078125,
"learning_rate": 3.2525169559820944e-06,
"loss": 0.8039,
"step": 1144
},
{
"epoch": 0.74,
"grad_norm": 0.050537109375,
"learning_rate": 3.236981000499485e-06,
"loss": 0.763,
"step": 1145
},
{
"epoch": 0.75,
"grad_norm": 0.0546875,
"learning_rate": 3.2214750701474875e-06,
"loss": 0.7887,
"step": 1146
},
{
"epoch": 0.75,
"grad_norm": 0.056884765625,
"learning_rate": 3.205999233766304e-06,
"loss": 0.784,
"step": 1147
},
{
"epoch": 0.75,
"grad_norm": 0.05517578125,
"learning_rate": 3.1905535600625315e-06,
"loss": 0.7708,
"step": 1148
},
{
"epoch": 0.75,
"grad_norm": 0.0517578125,
"learning_rate": 3.175138117608849e-06,
"loss": 0.7955,
"step": 1149
},
{
"epoch": 0.75,
"grad_norm": 0.05224609375,
"learning_rate": 3.1597529748437296e-06,
"loss": 0.8098,
"step": 1150
},
{
"epoch": 0.75,
"grad_norm": 0.049560546875,
"learning_rate": 3.144398200071128e-06,
"loss": 0.8455,
"step": 1151
},
{
"epoch": 0.75,
"grad_norm": 0.05322265625,
"learning_rate": 3.1290738614601678e-06,
"loss": 0.9135,
"step": 1152
},
{
"epoch": 0.75,
"grad_norm": 0.0546875,
"learning_rate": 3.113780027044856e-06,
"loss": 0.792,
"step": 1153
},
{
"epoch": 0.75,
"grad_norm": 0.052001953125,
"learning_rate": 3.0985167647237756e-06,
"loss": 0.8489,
"step": 1154
},
{
"epoch": 0.75,
"grad_norm": 0.05029296875,
"learning_rate": 3.0832841422597694e-06,
"loss": 0.8262,
"step": 1155
},
{
"epoch": 0.75,
"grad_norm": 0.052001953125,
"learning_rate": 3.068082227279663e-06,
"loss": 0.7806,
"step": 1156
},
{
"epoch": 0.75,
"grad_norm": 0.05517578125,
"learning_rate": 3.0529110872739488e-06,
"loss": 0.8596,
"step": 1157
},
{
"epoch": 0.75,
"grad_norm": 0.05126953125,
"learning_rate": 3.0377707895964893e-06,
"loss": 0.803,
"step": 1158
},
{
"epoch": 0.75,
"grad_norm": 0.05126953125,
"learning_rate": 3.0226614014642252e-06,
"loss": 0.7745,
"step": 1159
},
{
"epoch": 0.75,
"grad_norm": 0.052734375,
"learning_rate": 3.00758298995686e-06,
"loss": 0.8167,
"step": 1160
},
{
"epoch": 0.75,
"grad_norm": 0.04931640625,
"learning_rate": 2.9925356220165815e-06,
"loss": 0.7761,
"step": 1161
},
{
"epoch": 0.76,
"grad_norm": 0.050537109375,
"learning_rate": 2.9775193644477584e-06,
"loss": 0.8103,
"step": 1162
},
{
"epoch": 0.76,
"grad_norm": 0.05517578125,
"learning_rate": 2.9625342839166315e-06,
"loss": 0.9147,
"step": 1163
},
{
"epoch": 0.76,
"grad_norm": 0.0537109375,
"learning_rate": 2.9475804469510405e-06,
"loss": 0.8639,
"step": 1164
},
{
"epoch": 0.76,
"grad_norm": 0.059326171875,
"learning_rate": 2.9326579199401018e-06,
"loss": 0.7753,
"step": 1165
},
{
"epoch": 0.76,
"grad_norm": 0.05224609375,
"learning_rate": 2.9177667691339374e-06,
"loss": 0.8012,
"step": 1166
},
{
"epoch": 0.76,
"grad_norm": 0.054443359375,
"learning_rate": 2.9029070606433787e-06,
"loss": 0.8307,
"step": 1167
},
{
"epoch": 0.76,
"grad_norm": 0.050048828125,
"learning_rate": 2.888078860439647e-06,
"loss": 0.735,
"step": 1168
},
{
"epoch": 0.76,
"grad_norm": 0.058837890625,
"learning_rate": 2.8732822343540913e-06,
"loss": 0.8013,
"step": 1169
},
{
"epoch": 0.76,
"grad_norm": 0.060302734375,
"learning_rate": 2.8585172480778865e-06,
"loss": 0.756,
"step": 1170
},
{
"epoch": 0.76,
"grad_norm": 0.0537109375,
"learning_rate": 2.8437839671617263e-06,
"loss": 0.7932,
"step": 1171
},
{
"epoch": 0.76,
"grad_norm": 0.0595703125,
"learning_rate": 2.8290824570155608e-06,
"loss": 0.7666,
"step": 1172
},
{
"epoch": 0.76,
"grad_norm": 0.0537109375,
"learning_rate": 2.8144127829082756e-06,
"loss": 0.695,
"step": 1173
},
{
"epoch": 0.76,
"grad_norm": 0.053955078125,
"learning_rate": 2.7997750099674282e-06,
"loss": 0.847,
"step": 1174
},
{
"epoch": 0.76,
"grad_norm": 0.052978515625,
"learning_rate": 2.7851692031789423e-06,
"loss": 0.8311,
"step": 1175
},
{
"epoch": 0.76,
"grad_norm": 0.05224609375,
"learning_rate": 2.770595427386826e-06,
"loss": 0.8379,
"step": 1176
},
{
"epoch": 0.77,
"grad_norm": 0.050537109375,
"learning_rate": 2.756053747292884e-06,
"loss": 0.8856,
"step": 1177
},
{
"epoch": 0.77,
"grad_norm": 0.05517578125,
"learning_rate": 2.7415442274564275e-06,
"loss": 0.9162,
"step": 1178
},
{
"epoch": 0.77,
"grad_norm": 0.06787109375,
"learning_rate": 2.7270669322939823e-06,
"loss": 0.8122,
"step": 1179
},
{
"epoch": 0.77,
"grad_norm": 0.051025390625,
"learning_rate": 2.7126219260790233e-06,
"loss": 0.9167,
"step": 1180
},
{
"epoch": 0.77,
"grad_norm": 0.051025390625,
"learning_rate": 2.698209272941659e-06,
"loss": 0.898,
"step": 1181
},
{
"epoch": 0.77,
"grad_norm": 0.05322265625,
"learning_rate": 2.683829036868376e-06,
"loss": 0.7977,
"step": 1182
},
{
"epoch": 0.77,
"grad_norm": 0.057861328125,
"learning_rate": 2.669481281701739e-06,
"loss": 0.9092,
"step": 1183
},
{
"epoch": 0.77,
"grad_norm": 0.052734375,
"learning_rate": 2.6551660711401038e-06,
"loss": 0.8432,
"step": 1184
},
{
"epoch": 0.77,
"grad_norm": 0.0556640625,
"learning_rate": 2.6408834687373487e-06,
"loss": 0.7602,
"step": 1185
},
{
"epoch": 0.77,
"grad_norm": 0.055419921875,
"learning_rate": 2.626633537902581e-06,
"loss": 0.8318,
"step": 1186
},
{
"epoch": 0.77,
"grad_norm": 0.052001953125,
"learning_rate": 2.6124163418998603e-06,
"loss": 0.8412,
"step": 1187
},
{
"epoch": 0.77,
"grad_norm": 0.050537109375,
"learning_rate": 2.5982319438479163e-06,
"loss": 0.8249,
"step": 1188
},
{
"epoch": 0.77,
"grad_norm": 0.050048828125,
"learning_rate": 2.584080406719871e-06,
"loss": 0.8881,
"step": 1189
},
{
"epoch": 0.77,
"grad_norm": 0.052490234375,
"learning_rate": 2.56996179334295e-06,
"loss": 0.8113,
"step": 1190
},
{
"epoch": 0.77,
"grad_norm": 0.056884765625,
"learning_rate": 2.555876166398218e-06,
"loss": 0.834,
"step": 1191
},
{
"epoch": 0.78,
"grad_norm": 0.0517578125,
"learning_rate": 2.5418235884202867e-06,
"loss": 0.8203,
"step": 1192
},
{
"epoch": 0.78,
"grad_norm": 0.052734375,
"learning_rate": 2.5278041217970485e-06,
"loss": 0.8383,
"step": 1193
},
{
"epoch": 0.78,
"grad_norm": 0.050048828125,
"learning_rate": 2.513817828769396e-06,
"loss": 0.8427,
"step": 1194
},
{
"epoch": 0.78,
"grad_norm": 0.052734375,
"learning_rate": 2.499864771430934e-06,
"loss": 0.8696,
"step": 1195
},
{
"epoch": 0.78,
"grad_norm": 0.054443359375,
"learning_rate": 2.485945011727725e-06,
"loss": 0.8183,
"step": 1196
},
{
"epoch": 0.78,
"grad_norm": 0.0517578125,
"learning_rate": 2.4720586114579982e-06,
"loss": 0.8467,
"step": 1197
},
{
"epoch": 0.78,
"grad_norm": 0.0732421875,
"learning_rate": 2.4582056322718808e-06,
"loss": 0.8259,
"step": 1198
},
{
"epoch": 0.78,
"grad_norm": 0.05126953125,
"learning_rate": 2.4443861356711263e-06,
"loss": 0.7741,
"step": 1199
},
{
"epoch": 0.78,
"grad_norm": 0.0546875,
"learning_rate": 2.43060018300883e-06,
"loss": 0.7944,
"step": 1200
},
{
"epoch": 0.78,
"grad_norm": 0.0537109375,
"learning_rate": 2.4168478354891766e-06,
"loss": 0.8229,
"step": 1201
},
{
"epoch": 0.78,
"grad_norm": 0.05078125,
"learning_rate": 2.403129154167153e-06,
"loss": 0.8489,
"step": 1202
},
{
"epoch": 0.78,
"grad_norm": 0.052734375,
"learning_rate": 2.3894441999482744e-06,
"loss": 0.8031,
"step": 1203
},
{
"epoch": 0.78,
"grad_norm": 0.056396484375,
"learning_rate": 2.3757930335883315e-06,
"loss": 0.8906,
"step": 1204
},
{
"epoch": 0.78,
"grad_norm": 0.054443359375,
"learning_rate": 2.3621757156931056e-06,
"loss": 0.7922,
"step": 1205
},
{
"epoch": 0.78,
"grad_norm": 0.057373046875,
"learning_rate": 2.348592306718105e-06,
"loss": 0.8033,
"step": 1206
},
{
"epoch": 0.78,
"grad_norm": 0.0537109375,
"learning_rate": 2.335042866968298e-06,
"loss": 0.8245,
"step": 1207
},
{
"epoch": 0.79,
"grad_norm": 0.052001953125,
"learning_rate": 2.321527456597833e-06,
"loss": 0.8246,
"step": 1208
},
{
"epoch": 0.79,
"grad_norm": 0.057861328125,
"learning_rate": 2.3080461356097938e-06,
"loss": 0.8141,
"step": 1209
},
{
"epoch": 0.79,
"grad_norm": 0.051025390625,
"learning_rate": 2.2945989638559172e-06,
"loss": 0.8116,
"step": 1210
},
{
"epoch": 0.79,
"grad_norm": 0.05615234375,
"learning_rate": 2.2811860010363252e-06,
"loss": 0.8415,
"step": 1211
},
{
"epoch": 0.79,
"grad_norm": 0.05419921875,
"learning_rate": 2.267807306699271e-06,
"loss": 0.8137,
"step": 1212
},
{
"epoch": 0.79,
"grad_norm": 0.05517578125,
"learning_rate": 2.2544629402408725e-06,
"loss": 0.8255,
"step": 1213
},
{
"epoch": 0.79,
"grad_norm": 0.056884765625,
"learning_rate": 2.2411529609048343e-06,
"loss": 0.9124,
"step": 1214
},
{
"epoch": 0.79,
"grad_norm": 0.05322265625,
"learning_rate": 2.227877427782207e-06,
"loss": 0.7985,
"step": 1215
},
{
"epoch": 0.79,
"grad_norm": 0.05126953125,
"learning_rate": 2.2146363998111077e-06,
"loss": 0.8754,
"step": 1216
},
{
"epoch": 0.79,
"grad_norm": 0.056640625,
"learning_rate": 2.201429935776466e-06,
"loss": 0.7956,
"step": 1217
},
{
"epoch": 0.79,
"grad_norm": 0.05419921875,
"learning_rate": 2.1882580943097643e-06,
"loss": 0.894,
"step": 1218
},
{
"epoch": 0.79,
"grad_norm": 0.052978515625,
"learning_rate": 2.1751209338887656e-06,
"loss": 0.8416,
"step": 1219
},
{
"epoch": 0.79,
"grad_norm": 0.0556640625,
"learning_rate": 2.1620185128372718e-06,
"loss": 0.754,
"step": 1220
},
{
"epoch": 0.79,
"grad_norm": 0.05126953125,
"learning_rate": 2.1489508893248534e-06,
"loss": 0.7848,
"step": 1221
},
{
"epoch": 0.79,
"grad_norm": 0.054931640625,
"learning_rate": 2.1359181213665892e-06,
"loss": 0.841,
"step": 1222
},
{
"epoch": 0.8,
"grad_norm": 0.052490234375,
"learning_rate": 2.1229202668228197e-06,
"loss": 0.8385,
"step": 1223
},
{
"epoch": 0.8,
"grad_norm": 0.05078125,
"learning_rate": 2.1099573833988774e-06,
"loss": 0.8484,
"step": 1224
},
{
"epoch": 0.8,
"grad_norm": 0.059326171875,
"learning_rate": 2.0970295286448406e-06,
"loss": 0.797,
"step": 1225
},
{
"epoch": 0.8,
"grad_norm": 0.05419921875,
"learning_rate": 2.0841367599552733e-06,
"loss": 0.8927,
"step": 1226
},
{
"epoch": 0.8,
"grad_norm": 0.0517578125,
"learning_rate": 2.071279134568973e-06,
"loss": 0.841,
"step": 1227
},
{
"epoch": 0.8,
"grad_norm": 0.052001953125,
"learning_rate": 2.0584567095687126e-06,
"loss": 0.7737,
"step": 1228
},
{
"epoch": 0.8,
"grad_norm": 0.051025390625,
"learning_rate": 2.0456695418809915e-06,
"loss": 0.8007,
"step": 1229
},
{
"epoch": 0.8,
"grad_norm": 0.05322265625,
"learning_rate": 2.032917688275774e-06,
"loss": 0.791,
"step": 1230
},
{
"epoch": 0.8,
"grad_norm": 0.056396484375,
"learning_rate": 2.0202012053662547e-06,
"loss": 0.8262,
"step": 1231
},
{
"epoch": 0.8,
"grad_norm": 0.050048828125,
"learning_rate": 2.007520149608584e-06,
"loss": 0.76,
"step": 1232
},
{
"epoch": 0.8,
"grad_norm": 0.055419921875,
"learning_rate": 1.9948745773016396e-06,
"loss": 0.7102,
"step": 1233
},
{
"epoch": 0.8,
"grad_norm": 0.0517578125,
"learning_rate": 1.982264544586765e-06,
"loss": 0.8655,
"step": 1234
},
{
"epoch": 0.8,
"grad_norm": 0.05517578125,
"learning_rate": 1.9696901074475127e-06,
"loss": 0.8823,
"step": 1235
},
{
"epoch": 0.8,
"grad_norm": 0.056396484375,
"learning_rate": 1.9571513217094206e-06,
"loss": 0.8059,
"step": 1236
},
{
"epoch": 0.8,
"grad_norm": 0.05419921875,
"learning_rate": 1.9446482430397383e-06,
"loss": 0.855,
"step": 1237
},
{
"epoch": 0.8,
"grad_norm": 0.052490234375,
"learning_rate": 1.932180926947189e-06,
"loss": 0.8311,
"step": 1238
},
{
"epoch": 0.81,
"grad_norm": 0.05517578125,
"learning_rate": 1.919749428781732e-06,
"loss": 0.8034,
"step": 1239
},
{
"epoch": 0.81,
"grad_norm": 0.05322265625,
"learning_rate": 1.9073538037342975e-06,
"loss": 0.842,
"step": 1240
},
{
"epoch": 0.81,
"grad_norm": 0.04931640625,
"learning_rate": 1.8949941068365652e-06,
"loss": 0.8793,
"step": 1241
},
{
"epoch": 0.81,
"grad_norm": 0.05029296875,
"learning_rate": 1.8826703929607037e-06,
"loss": 0.8284,
"step": 1242
},
{
"epoch": 0.81,
"grad_norm": 0.05810546875,
"learning_rate": 1.8703827168191256e-06,
"loss": 0.789,
"step": 1243
},
{
"epoch": 0.81,
"grad_norm": 0.049072265625,
"learning_rate": 1.8581311329642592e-06,
"loss": 0.7761,
"step": 1244
},
{
"epoch": 0.81,
"grad_norm": 0.048583984375,
"learning_rate": 1.8459156957882906e-06,
"loss": 0.8019,
"step": 1245
},
{
"epoch": 0.81,
"grad_norm": 0.05224609375,
"learning_rate": 1.8337364595229335e-06,
"loss": 0.8432,
"step": 1246
},
{
"epoch": 0.81,
"grad_norm": 0.0595703125,
"learning_rate": 1.8215934782391808e-06,
"loss": 0.8811,
"step": 1247
},
{
"epoch": 0.81,
"grad_norm": 0.053466796875,
"learning_rate": 1.8094868058470716e-06,
"loss": 0.8087,
"step": 1248
},
{
"epoch": 0.81,
"grad_norm": 0.051025390625,
"learning_rate": 1.797416496095441e-06,
"loss": 0.8373,
"step": 1249
},
{
"epoch": 0.81,
"grad_norm": 0.0537109375,
"learning_rate": 1.7853826025716959e-06,
"loss": 0.783,
"step": 1250
},
{
"epoch": 0.81,
"grad_norm": 0.054931640625,
"learning_rate": 1.773385178701561e-06,
"loss": 0.8308,
"step": 1251
},
{
"epoch": 0.81,
"grad_norm": 0.050048828125,
"learning_rate": 1.7614242777488577e-06,
"loss": 0.8137,
"step": 1252
},
{
"epoch": 0.81,
"grad_norm": 0.0595703125,
"learning_rate": 1.74949995281526e-06,
"loss": 0.8278,
"step": 1253
},
{
"epoch": 0.82,
"grad_norm": 0.056640625,
"learning_rate": 1.7376122568400533e-06,
"loss": 0.7838,
"step": 1254
},
{
"epoch": 0.82,
"grad_norm": 0.056884765625,
"learning_rate": 1.7257612425999071e-06,
"loss": 0.8916,
"step": 1255
},
{
"epoch": 0.82,
"grad_norm": 0.052490234375,
"learning_rate": 1.7139469627086425e-06,
"loss": 0.8322,
"step": 1256
},
{
"epoch": 0.82,
"grad_norm": 0.050537109375,
"learning_rate": 1.7021694696169888e-06,
"loss": 0.8138,
"step": 1257
},
{
"epoch": 0.82,
"grad_norm": 0.050537109375,
"learning_rate": 1.690428815612364e-06,
"loss": 0.8311,
"step": 1258
},
{
"epoch": 0.82,
"grad_norm": 0.054931640625,
"learning_rate": 1.6787250528186228e-06,
"loss": 0.7946,
"step": 1259
},
{
"epoch": 0.82,
"grad_norm": 0.05419921875,
"learning_rate": 1.6670582331958496e-06,
"loss": 0.8173,
"step": 1260
},
{
"epoch": 0.82,
"grad_norm": 0.049560546875,
"learning_rate": 1.6554284085401129e-06,
"loss": 0.7441,
"step": 1261
},
{
"epoch": 0.82,
"grad_norm": 0.05517578125,
"learning_rate": 1.6438356304832316e-06,
"loss": 0.7828,
"step": 1262
},
{
"epoch": 0.82,
"grad_norm": 0.049072265625,
"learning_rate": 1.6322799504925578e-06,
"loss": 0.7149,
"step": 1263
},
{
"epoch": 0.82,
"grad_norm": 0.05224609375,
"learning_rate": 1.6207614198707466e-06,
"loss": 0.8026,
"step": 1264
},
{
"epoch": 0.82,
"grad_norm": 0.048583984375,
"learning_rate": 1.609280089755515e-06,
"loss": 0.7968,
"step": 1265
},
{
"epoch": 0.82,
"grad_norm": 0.05078125,
"learning_rate": 1.5978360111194313e-06,
"loss": 0.7701,
"step": 1266
},
{
"epoch": 0.82,
"grad_norm": 0.0517578125,
"learning_rate": 1.5864292347696808e-06,
"loss": 0.7944,
"step": 1267
},
{
"epoch": 0.82,
"grad_norm": 0.05322265625,
"learning_rate": 1.57505981134784e-06,
"loss": 0.8619,
"step": 1268
},
{
"epoch": 0.83,
"grad_norm": 0.051513671875,
"learning_rate": 1.5637277913296579e-06,
"loss": 0.8243,
"step": 1269
},
{
"epoch": 0.83,
"grad_norm": 0.051513671875,
"learning_rate": 1.5524332250248165e-06,
"loss": 0.8036,
"step": 1270
},
{
"epoch": 0.83,
"grad_norm": 0.05419921875,
"learning_rate": 1.5411761625767296e-06,
"loss": 0.766,
"step": 1271
},
{
"epoch": 0.83,
"grad_norm": 0.05224609375,
"learning_rate": 1.5299566539623046e-06,
"loss": 0.8959,
"step": 1272
},
{
"epoch": 0.83,
"grad_norm": 0.138671875,
"learning_rate": 1.5187747489917204e-06,
"loss": 0.7868,
"step": 1273
},
{
"epoch": 0.83,
"grad_norm": 0.05078125,
"learning_rate": 1.5076304973082156e-06,
"loss": 0.8892,
"step": 1274
},
{
"epoch": 0.83,
"grad_norm": 0.053955078125,
"learning_rate": 1.4965239483878636e-06,
"loss": 0.8054,
"step": 1275
},
{
"epoch": 0.83,
"grad_norm": 0.049072265625,
"learning_rate": 1.485455151539349e-06,
"loss": 0.8192,
"step": 1276
},
{
"epoch": 0.83,
"grad_norm": 0.052734375,
"learning_rate": 1.4744241559037576e-06,
"loss": 0.7824,
"step": 1277
},
{
"epoch": 0.83,
"grad_norm": 0.05322265625,
"learning_rate": 1.4634310104543426e-06,
"loss": 0.7301,
"step": 1278
},
{
"epoch": 0.83,
"grad_norm": 0.051025390625,
"learning_rate": 1.452475763996326e-06,
"loss": 0.7545,
"step": 1279
},
{
"epoch": 0.83,
"grad_norm": 0.053955078125,
"learning_rate": 1.4415584651666759e-06,
"loss": 0.8269,
"step": 1280
},
{
"epoch": 0.83,
"grad_norm": 0.0537109375,
"learning_rate": 1.4306791624338745e-06,
"loss": 0.8573,
"step": 1281
},
{
"epoch": 0.83,
"grad_norm": 0.051513671875,
"learning_rate": 1.4198379040977328e-06,
"loss": 0.8815,
"step": 1282
},
{
"epoch": 0.83,
"grad_norm": 0.055419921875,
"learning_rate": 1.4090347382891457e-06,
"loss": 0.8565,
"step": 1283
},
{
"epoch": 0.83,
"grad_norm": 0.050048828125,
"learning_rate": 1.3982697129699008e-06,
"loss": 0.8012,
"step": 1284
},
{
"epoch": 0.84,
"grad_norm": 0.05126953125,
"learning_rate": 1.387542875932454e-06,
"loss": 0.758,
"step": 1285
},
{
"epoch": 0.84,
"grad_norm": 0.049560546875,
"learning_rate": 1.3768542747997215e-06,
"loss": 0.8068,
"step": 1286
},
{
"epoch": 0.84,
"grad_norm": 0.052490234375,
"learning_rate": 1.3662039570248676e-06,
"loss": 0.8396,
"step": 1287
},
{
"epoch": 0.84,
"grad_norm": 0.05322265625,
"learning_rate": 1.3555919698910924e-06,
"loss": 0.7952,
"step": 1288
},
{
"epoch": 0.84,
"grad_norm": 0.052001953125,
"learning_rate": 1.345018360511422e-06,
"loss": 0.8341,
"step": 1289
},
{
"epoch": 0.84,
"grad_norm": 0.057861328125,
"learning_rate": 1.3344831758285036e-06,
"loss": 0.7865,
"step": 1290
},
{
"epoch": 0.84,
"grad_norm": 0.0634765625,
"learning_rate": 1.3239864626143883e-06,
"loss": 0.7917,
"step": 1291
},
{
"epoch": 0.84,
"grad_norm": 0.050048828125,
"learning_rate": 1.3135282674703364e-06,
"loss": 0.7564,
"step": 1292
},
{
"epoch": 0.84,
"grad_norm": 0.0537109375,
"learning_rate": 1.3031086368266e-06,
"loss": 0.8057,
"step": 1293
},
{
"epoch": 0.84,
"grad_norm": 0.05615234375,
"learning_rate": 1.2927276169422176e-06,
"loss": 0.8225,
"step": 1294
},
{
"epoch": 0.84,
"grad_norm": 0.05322265625,
"learning_rate": 1.2823852539048133e-06,
"loss": 0.7773,
"step": 1295
},
{
"epoch": 0.84,
"grad_norm": 0.053955078125,
"learning_rate": 1.272081593630392e-06,
"loss": 0.828,
"step": 1296
},
{
"epoch": 0.84,
"grad_norm": 0.0498046875,
"learning_rate": 1.2618166818631306e-06,
"loss": 0.8241,
"step": 1297
},
{
"epoch": 0.84,
"grad_norm": 0.05224609375,
"learning_rate": 1.2515905641751824e-06,
"loss": 0.8284,
"step": 1298
},
{
"epoch": 0.84,
"grad_norm": 0.053955078125,
"learning_rate": 1.2414032859664615e-06,
"loss": 0.8259,
"step": 1299
},
{
"epoch": 0.85,
"grad_norm": 0.056640625,
"learning_rate": 1.2312548924644585e-06,
"loss": 0.8019,
"step": 1300
},
{
"epoch": 0.85,
"grad_norm": 0.051025390625,
"learning_rate": 1.2211454287240299e-06,
"loss": 0.7128,
"step": 1301
},
{
"epoch": 0.85,
"grad_norm": 0.054443359375,
"learning_rate": 1.2110749396271948e-06,
"loss": 0.7937,
"step": 1302
},
{
"epoch": 0.85,
"grad_norm": 0.052978515625,
"learning_rate": 1.2010434698829442e-06,
"loss": 0.736,
"step": 1303
},
{
"epoch": 0.85,
"grad_norm": 0.05322265625,
"learning_rate": 1.191051064027039e-06,
"loss": 0.7393,
"step": 1304
},
{
"epoch": 0.85,
"grad_norm": 0.0517578125,
"learning_rate": 1.1810977664218105e-06,
"loss": 0.7792,
"step": 1305
},
{
"epoch": 0.85,
"grad_norm": 0.055419921875,
"learning_rate": 1.1711836212559646e-06,
"loss": 0.8096,
"step": 1306
},
{
"epoch": 0.85,
"grad_norm": 0.056396484375,
"learning_rate": 1.161308672544389e-06,
"loss": 0.8582,
"step": 1307
},
{
"epoch": 0.85,
"grad_norm": 0.0498046875,
"learning_rate": 1.1514729641279476e-06,
"loss": 0.741,
"step": 1308
},
{
"epoch": 0.85,
"grad_norm": 0.053466796875,
"learning_rate": 1.141676539673301e-06,
"loss": 0.8262,
"step": 1309
},
{
"epoch": 0.85,
"grad_norm": 0.050537109375,
"learning_rate": 1.1319194426726965e-06,
"loss": 0.758,
"step": 1310
},
{
"epoch": 0.85,
"grad_norm": 0.052001953125,
"learning_rate": 1.1222017164437903e-06,
"loss": 0.8233,
"step": 1311
},
{
"epoch": 0.85,
"grad_norm": 0.0546875,
"learning_rate": 1.1125234041294465e-06,
"loss": 0.8641,
"step": 1312
},
{
"epoch": 0.85,
"grad_norm": 0.04931640625,
"learning_rate": 1.1028845486975404e-06,
"loss": 0.8025,
"step": 1313
},
{
"epoch": 0.85,
"grad_norm": 0.0498046875,
"learning_rate": 1.0932851929407828e-06,
"loss": 0.7808,
"step": 1314
},
{
"epoch": 0.86,
"grad_norm": 0.053466796875,
"learning_rate": 1.083725379476519e-06,
"loss": 0.7964,
"step": 1315
},
{
"epoch": 0.86,
"grad_norm": 0.05078125,
"learning_rate": 1.074205150746539e-06,
"loss": 0.8107,
"step": 1316
},
{
"epoch": 0.86,
"grad_norm": 0.05419921875,
"learning_rate": 1.0647245490168978e-06,
"loss": 0.8743,
"step": 1317
},
{
"epoch": 0.86,
"grad_norm": 0.054443359375,
"learning_rate": 1.0552836163777148e-06,
"loss": 0.8072,
"step": 1318
},
{
"epoch": 0.86,
"grad_norm": 0.056640625,
"learning_rate": 1.0458823947430009e-06,
"loss": 0.8139,
"step": 1319
},
{
"epoch": 0.86,
"grad_norm": 0.056396484375,
"learning_rate": 1.0365209258504648e-06,
"loss": 0.8526,
"step": 1320
},
{
"epoch": 0.86,
"grad_norm": 0.056396484375,
"learning_rate": 1.0271992512613239e-06,
"loss": 0.8614,
"step": 1321
},
{
"epoch": 0.86,
"grad_norm": 0.0576171875,
"learning_rate": 1.0179174123601288e-06,
"loss": 0.879,
"step": 1322
},
{
"epoch": 0.86,
"grad_norm": 0.052490234375,
"learning_rate": 1.0086754503545782e-06,
"loss": 0.7704,
"step": 1323
},
{
"epoch": 0.86,
"grad_norm": 0.053955078125,
"learning_rate": 9.994734062753253e-07,
"loss": 0.8364,
"step": 1324
},
{
"epoch": 0.86,
"grad_norm": 0.0517578125,
"learning_rate": 9.903113209758098e-07,
"loss": 0.7597,
"step": 1325
},
{
"epoch": 0.86,
"grad_norm": 0.061767578125,
"learning_rate": 9.811892351320673e-07,
"loss": 0.8455,
"step": 1326
},
{
"epoch": 0.86,
"grad_norm": 0.060791015625,
"learning_rate": 9.721071892425527e-07,
"loss": 0.8715,
"step": 1327
},
{
"epoch": 0.86,
"grad_norm": 0.0546875,
"learning_rate": 9.630652236279626e-07,
"loss": 0.7367,
"step": 1328
},
{
"epoch": 0.86,
"grad_norm": 0.049560546875,
"learning_rate": 9.540633784310439e-07,
"loss": 0.7387,
"step": 1329
},
{
"epoch": 0.86,
"grad_norm": 0.06640625,
"learning_rate": 9.451016936164337e-07,
"loss": 0.8088,
"step": 1330
},
{
"epoch": 0.87,
"grad_norm": 0.05419921875,
"learning_rate": 9.361802089704708e-07,
"loss": 0.8717,
"step": 1331
},
{
"epoch": 0.87,
"grad_norm": 0.054443359375,
"learning_rate": 9.272989641010166e-07,
"loss": 0.8701,
"step": 1332
},
{
"epoch": 0.87,
"grad_norm": 0.057373046875,
"learning_rate": 9.184579984372898e-07,
"loss": 0.8088,
"step": 1333
},
{
"epoch": 0.87,
"grad_norm": 0.054443359375,
"learning_rate": 9.096573512296802e-07,
"loss": 0.8254,
"step": 1334
},
{
"epoch": 0.87,
"grad_norm": 0.052734375,
"learning_rate": 9.008970615495827e-07,
"loss": 0.802,
"step": 1335
},
{
"epoch": 0.87,
"grad_norm": 0.05126953125,
"learning_rate": 8.921771682892233e-07,
"loss": 0.8359,
"step": 1336
},
{
"epoch": 0.87,
"grad_norm": 0.052734375,
"learning_rate": 8.834977101614772e-07,
"loss": 0.7697,
"step": 1337
},
{
"epoch": 0.87,
"grad_norm": 0.08837890625,
"learning_rate": 8.748587256997076e-07,
"loss": 0.7886,
"step": 1338
},
{
"epoch": 0.87,
"grad_norm": 0.052734375,
"learning_rate": 8.662602532575936e-07,
"loss": 0.861,
"step": 1339
},
{
"epoch": 0.87,
"grad_norm": 0.052001953125,
"learning_rate": 8.577023310089483e-07,
"loss": 0.7782,
"step": 1340
},
{
"epoch": 0.87,
"grad_norm": 0.055908203125,
"learning_rate": 8.491849969475663e-07,
"loss": 0.8152,
"step": 1341
},
{
"epoch": 0.87,
"grad_norm": 0.05322265625,
"learning_rate": 8.407082888870422e-07,
"loss": 0.8611,
"step": 1342
},
{
"epoch": 0.87,
"grad_norm": 0.054931640625,
"learning_rate": 8.322722444606079e-07,
"loss": 0.7826,
"step": 1343
},
{
"epoch": 0.87,
"grad_norm": 0.05126953125,
"learning_rate": 8.238769011209668e-07,
"loss": 0.8387,
"step": 1344
},
{
"epoch": 0.87,
"grad_norm": 0.0546875,
"learning_rate": 8.155222961401244e-07,
"loss": 0.835,
"step": 1345
},
{
"epoch": 0.88,
"grad_norm": 0.055908203125,
"learning_rate": 8.072084666092228e-07,
"loss": 0.8613,
"step": 1346
},
{
"epoch": 0.88,
"grad_norm": 0.052978515625,
"learning_rate": 7.989354494383816e-07,
"loss": 0.8169,
"step": 1347
},
{
"epoch": 0.88,
"grad_norm": 0.049560546875,
"learning_rate": 7.907032813565208e-07,
"loss": 0.8156,
"step": 1348
},
{
"epoch": 0.88,
"grad_norm": 0.050048828125,
"learning_rate": 7.825119989112173e-07,
"loss": 0.801,
"step": 1349
},
{
"epoch": 0.88,
"grad_norm": 0.05224609375,
"learning_rate": 7.743616384685226e-07,
"loss": 0.7801,
"step": 1350
},
{
"epoch": 0.88,
"grad_norm": 0.049072265625,
"learning_rate": 7.662522362128166e-07,
"loss": 0.7665,
"step": 1351
},
{
"epoch": 0.88,
"grad_norm": 0.05224609375,
"learning_rate": 7.581838281466414e-07,
"loss": 0.8116,
"step": 1352
},
{
"epoch": 0.88,
"grad_norm": 0.056884765625,
"learning_rate": 7.501564500905345e-07,
"loss": 0.854,
"step": 1353
},
{
"epoch": 0.88,
"grad_norm": 0.05615234375,
"learning_rate": 7.421701376828838e-07,
"loss": 0.8209,
"step": 1354
},
{
"epoch": 0.88,
"grad_norm": 0.0556640625,
"learning_rate": 7.342249263797574e-07,
"loss": 0.8141,
"step": 1355
},
{
"epoch": 0.88,
"grad_norm": 0.050537109375,
"learning_rate": 7.263208514547548e-07,
"loss": 0.7355,
"step": 1356
},
{
"epoch": 0.88,
"grad_norm": 0.05322265625,
"learning_rate": 7.18457947998843e-07,
"loss": 0.801,
"step": 1357
},
{
"epoch": 0.88,
"grad_norm": 0.04931640625,
"learning_rate": 7.106362509202036e-07,
"loss": 0.7248,
"step": 1358
},
{
"epoch": 0.88,
"grad_norm": 0.054443359375,
"learning_rate": 7.028557949440784e-07,
"loss": 0.8608,
"step": 1359
},
{
"epoch": 0.88,
"grad_norm": 0.053466796875,
"learning_rate": 6.95116614612621e-07,
"loss": 0.8288,
"step": 1360
},
{
"epoch": 0.88,
"grad_norm": 0.053955078125,
"learning_rate": 6.874187442847258e-07,
"loss": 0.9059,
"step": 1361
},
{
"epoch": 0.89,
"grad_norm": 0.053955078125,
"learning_rate": 6.797622181358976e-07,
"loss": 0.7799,
"step": 1362
},
{
"epoch": 0.89,
"grad_norm": 0.0517578125,
"learning_rate": 6.721470701580856e-07,
"loss": 0.7829,
"step": 1363
},
{
"epoch": 0.89,
"grad_norm": 0.051025390625,
"learning_rate": 6.64573334159534e-07,
"loss": 0.7842,
"step": 1364
},
{
"epoch": 0.89,
"grad_norm": 0.0537109375,
"learning_rate": 6.57041043764638e-07,
"loss": 0.8756,
"step": 1365
},
{
"epoch": 0.89,
"grad_norm": 0.0625,
"learning_rate": 6.495502324137892e-07,
"loss": 0.8221,
"step": 1366
},
{
"epoch": 0.89,
"grad_norm": 0.0517578125,
"learning_rate": 6.421009333632266e-07,
"loss": 0.7969,
"step": 1367
},
{
"epoch": 0.89,
"grad_norm": 0.05078125,
"learning_rate": 6.346931796848977e-07,
"loss": 0.7808,
"step": 1368
},
{
"epoch": 0.89,
"grad_norm": 0.0517578125,
"learning_rate": 6.273270042662937e-07,
"loss": 0.8596,
"step": 1369
},
{
"epoch": 0.89,
"grad_norm": 0.051513671875,
"learning_rate": 6.200024398103255e-07,
"loss": 0.7704,
"step": 1370
},
{
"epoch": 0.89,
"grad_norm": 0.0546875,
"learning_rate": 6.127195188351631e-07,
"loss": 0.8318,
"step": 1371
},
{
"epoch": 0.89,
"grad_norm": 0.05517578125,
"learning_rate": 6.05478273674095e-07,
"loss": 0.7964,
"step": 1372
},
{
"epoch": 0.89,
"grad_norm": 0.05224609375,
"learning_rate": 5.982787364753873e-07,
"loss": 0.7835,
"step": 1373
},
{
"epoch": 0.89,
"grad_norm": 0.0556640625,
"learning_rate": 5.911209392021399e-07,
"loss": 0.8853,
"step": 1374
},
{
"epoch": 0.89,
"grad_norm": 0.05224609375,
"learning_rate": 5.840049136321413e-07,
"loss": 0.7487,
"step": 1375
},
{
"epoch": 0.89,
"grad_norm": 0.05078125,
"learning_rate": 5.769306913577344e-07,
"loss": 0.7896,
"step": 1376
},
{
"epoch": 0.9,
"grad_norm": 0.052978515625,
"learning_rate": 5.698983037856665e-07,
"loss": 0.8435,
"step": 1377
},
{
"epoch": 0.9,
"grad_norm": 0.05517578125,
"learning_rate": 5.629077821369577e-07,
"loss": 0.8227,
"step": 1378
},
{
"epoch": 0.9,
"grad_norm": 0.0498046875,
"learning_rate": 5.559591574467649e-07,
"loss": 0.7761,
"step": 1379
},
{
"epoch": 0.9,
"grad_norm": 0.06005859375,
"learning_rate": 5.490524605642289e-07,
"loss": 0.9768,
"step": 1380
},
{
"epoch": 0.9,
"grad_norm": 0.054443359375,
"learning_rate": 5.421877221523564e-07,
"loss": 0.8819,
"step": 1381
},
{
"epoch": 0.9,
"grad_norm": 0.056640625,
"learning_rate": 5.353649726878741e-07,
"loss": 0.8356,
"step": 1382
},
{
"epoch": 0.9,
"grad_norm": 0.05126953125,
"learning_rate": 5.285842424610865e-07,
"loss": 0.7716,
"step": 1383
},
{
"epoch": 0.9,
"grad_norm": 0.053955078125,
"learning_rate": 5.218455615757601e-07,
"loss": 0.804,
"step": 1384
},
{
"epoch": 0.9,
"grad_norm": 0.0517578125,
"learning_rate": 5.151489599489712e-07,
"loss": 0.8088,
"step": 1385
},
{
"epoch": 0.9,
"grad_norm": 0.059326171875,
"learning_rate": 5.084944673109837e-07,
"loss": 0.774,
"step": 1386
}
],
"logging_steps": 1,
"max_steps": 1538,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 462,
"total_flos": 3.8023330887389676e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}